aGrUM  0.14.2
inferenceEngine_tpl.h
Go to the documentation of this file.
1 /***************************************************************************
2  * Copyright (C) 2005 by Christophe GONZALES and Pierre-Henri WUILLEMIN *
3  * {prenom.nom}_at_lip6.fr *
4  * *
5  * This program is free software; you can redistribute it and/or modify *
6  * it under the terms of the GNU General Public License as published by *
7  * the Free Software Foundation; either version 2 of the License, or *
8  * (at your option) any later version. *
9  * *
10  * This program is distributed in the hope that it will be useful, *
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
13  * GNU General Public License for more details. *
14  * *
15  * You should have received a copy of the GNU General Public License *
16  * along with this program; if not, write to the *
17  * Free Software Foundation, Inc., *
18  * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
19  ***************************************************************************/
26 #include <agrum/agrum.h>
27 
28 namespace gum {
29  namespace credal {
30 
31  /*template< typename GUM_SCALAR >
32  InferenceEngine< GUM_SCALAR >::InferenceEngine () : ApproximationScheme() {
33  std::cout << "InferenceEngine construct ()" << std::endl;
34  GUM_CONSTRUCTOR ( InferenceEngine );
35  }*/
36 
37  template < typename GUM_SCALAR >
39  const CredalNet< GUM_SCALAR >& credalNet) :
42 
43  _dbnOpt.setCNet(credalNet);
44 
46 
47  GUM_CONSTRUCTOR(InferenceEngine);
48  }
49 
50  template < typename GUM_SCALAR >
52  GUM_DESTRUCTOR(InferenceEngine);
53  }
54 
55  template < typename GUM_SCALAR >
57  return *_credalNet;
58  }
59 
60  template < typename GUM_SCALAR >
62  _evidence.clear();
63  _query.clear();
64  /*
65  _marginalMin.clear();
66  _marginalMax.clear();
67  _oldMarginalMin.clear();
68  _oldMarginalMax.clear();
69  */
71  /*
72  _expectationMin.clear();
73  _expectationMax.clear();
74  */
76 
77  // _marginalSets.clear();
79 
80  _dynamicExpMin.clear();
81  _dynamicExpMax.clear();
82 
83  //_modal.clear();
84 
85  //_t0.clear();
86  //_t1.clear();
87  }
88 
89  /*
90  template< typename GUM_SCALAR >
91  void InferenceEngine< GUM_SCALAR >::setIterStop ( const int &iter_stop ) {
92  _iterStop = iter_stop;
93  }*/
94 
95  template < typename GUM_SCALAR >
97  _storeBNOpt = value;
98  }
99 
100  template < typename GUM_SCALAR >
102  _storeVertices = value;
103 
104  if (value) _initMarginalSets();
105  }
106 
107  template < typename GUM_SCALAR >
109  bool oldValue = _repetitiveInd;
110  _repetitiveInd = repetitive;
111 
112  // do not compute clusters more than once
113  if (_repetitiveInd && !oldValue) _repetitiveInit();
114  }
115 
116  template < typename GUM_SCALAR >
118  return _repetitiveInd;
119  }
120  /*
121  template< typename GUM_SCALAR >
122  int InferenceEngine< GUM_SCALAR >::iterStop () const {
123  return _iterStop;
124  }*/
125 
126  template < typename GUM_SCALAR >
128  return _storeVertices;
129  }
130 
131  template < typename GUM_SCALAR >
133  return _storeBNOpt;
134  }
135 
136  template < typename GUM_SCALAR >
139  return &_dbnOpt;
140  }
141 
142  template < typename GUM_SCALAR >
143  void InferenceEngine< GUM_SCALAR >::insertModalsFile(const std::string& path) {
144  std::ifstream mod_stream(path.c_str(), std::ios::in);
145 
146  if (!mod_stream.good()) {
148  "void InferenceEngine< GUM_SCALAR "
149  ">::insertModals(const std::string & path) : "
150  "could not open input file : "
151  << path);
152  }
153 
154  if (!_modal.empty()) _modal.clear();
155 
156  std::string line, tmp;
157  char * cstr, *p;
158 
159  while (mod_stream.good()) {
160  getline(mod_stream, line);
161 
162  if (line.size() == 0) continue;
163 
164  cstr = new char[line.size() + 1];
165  strcpy(cstr, line.c_str());
166 
167  p = strtok(cstr, " ");
168  tmp = p;
169 
170  std::vector< GUM_SCALAR > values;
171  p = strtok(nullptr, " ");
172 
173  while (p != nullptr) {
174  values.push_back(GUM_SCALAR(atof(p)));
175  p = strtok(nullptr, " ");
176  } // end of : line
177 
178  _modal.insert(tmp, values); //[tmp] = values;
179 
180  delete[] p;
181  delete[] cstr;
182  } // end of : file
183 
184  mod_stream.close();
185 
187  }
188 
189  template < typename GUM_SCALAR >
191  const std::map< std::string, std::vector< GUM_SCALAR > >& modals) {
192  if (!_modal.empty()) _modal.clear();
193 
194  for (auto it = modals.cbegin(), theEnd = modals.cend(); it != theEnd; ++it) {
195  NodeId id;
196 
197  try {
198  id = _credalNet->current_bn().idFromName(it->first);
199  } catch (NotFound& err) {
200  GUM_SHOWERROR(err);
201  continue;
202  }
203 
204  // check that modals are net compatible
205  auto dSize = _credalNet->current_bn().variable(id).domainSize();
206 
207  if (dSize != it->second.size()) continue;
208 
209  // GUM_ERROR(OperationNotAllowed, "void InferenceEngine< GUM_SCALAR
210  // >::insertModals( const std::map< std::string, std::vector< GUM_SCALAR
211  // > >
212  // &modals) : modalities does not respect variable cardinality : " <<
213  // _credalNet->current_bn().variable( id ).name() << " : " << dSize << "
214  // != "
215  // << it->second.size());
216 
217  _modal.insert(it->first, it->second); //[ it->first ] = it->second;
218  }
219 
220  //_modal = modals;
221 
223  }
224 
225  template < typename GUM_SCALAR >
227  const std::map< std::string, std::vector< GUM_SCALAR > >& eviMap) {
228  if (!_evidence.empty()) _evidence.clear();
229 
230  for (auto it = eviMap.cbegin(), theEnd = eviMap.cend(); it != theEnd; ++it) {
231  NodeId id;
232 
233  try {
234  id = _credalNet->current_bn().idFromName(it->first);
235  } catch (NotFound& err) {
236  GUM_SHOWERROR(err);
237  continue;
238  }
239 
240  _evidence.insert(id, it->second);
241  }
242  }
243 
244  // check that observed variables DO exists in the network (otherwise Lazy
245  // report
246  // an error and app crash)
247  template < typename GUM_SCALAR >
249  const NodeProperty< std::vector< GUM_SCALAR > >& evidence) {
250  if (!_evidence.empty()) _evidence.clear();
251 
252  // use cbegin() to get const_iterator when available in aGrUM hashtables
253  for (const auto& elt : evidence) {
254  try {
255  _credalNet->current_bn().variable(elt.first);
256  } catch (NotFound& err) {
257  GUM_SHOWERROR(err);
258  continue;
259  }
260 
261  _evidence.insert(elt.first, elt.second);
262  }
263  }
264 
265  template < typename GUM_SCALAR >
266  void
268  std::ifstream evi_stream(path.c_str(), std::ios::in);
269 
270  if (!evi_stream.good()) {
272  "void InferenceEngine< GUM_SCALAR "
273  ">::insertEvidence(const std::string & path) : could not "
274  "open input file : "
275  << path);
276  }
277 
278  if (!_evidence.empty()) _evidence.clear();
279 
280  std::string line, tmp;
281  char * cstr, *p;
282 
283  while (evi_stream.good() && std::strcmp(line.c_str(), "[EVIDENCE]") != 0) {
284  getline(evi_stream, line);
285  }
286 
287  while (evi_stream.good()) {
288  getline(evi_stream, line);
289 
290  if (std::strcmp(line.c_str(), "[QUERY]") == 0) break;
291 
292  if (line.size() == 0) continue;
293 
294  cstr = new char[line.size() + 1];
295  strcpy(cstr, line.c_str());
296 
297  p = strtok(cstr, " ");
298  tmp = p;
299 
300  // if user input is wrong
301  NodeId node = -1;
302 
303  try {
304  node = _credalNet->current_bn().idFromName(tmp);
305  } catch (NotFound& err) {
306  GUM_SHOWERROR(err);
307  continue;
308  }
309 
310  std::vector< GUM_SCALAR > values;
311  p = strtok(nullptr, " ");
312 
313  while (p != nullptr) {
314  values.push_back(GUM_SCALAR(atof(p)));
315  p = strtok(nullptr, " ");
316  } // end of : line
317 
318  _evidence.insert(node, values);
319 
320  delete[] p;
321  delete[] cstr;
322  } // end of : file
323 
324  evi_stream.close();
325  }
326 
327  template < typename GUM_SCALAR >
329  const NodeProperty< std::vector< bool > >& query) {
330  if (!_query.empty()) _query.clear();
331 
332  for (const auto& elt : query) {
333  try {
334  _credalNet->current_bn().variable(elt.first);
335  } catch (NotFound& err) {
336  GUM_SHOWERROR(err);
337  continue;
338  }
339 
340  _query.insert(elt.first, elt.second);
341  }
342  }
343 
344  template < typename GUM_SCALAR >
345  void InferenceEngine< GUM_SCALAR >::insertQueryFile(const std::string& path) {
346  std::ifstream evi_stream(path.c_str(), std::ios::in);
347 
348  if (!evi_stream.good()) {
350  "void InferenceEngine< GUM_SCALAR >::insertQuery(const "
351  "std::string & path) : could not open input file : "
352  << path);
353  }
354 
355  if (!_query.empty()) _query.clear();
356 
357  std::string line, tmp;
358  char * cstr, *p;
359 
360  while (evi_stream.good() && std::strcmp(line.c_str(), "[QUERY]") != 0) {
361  getline(evi_stream, line);
362  }
363 
364  while (evi_stream.good()) {
365  getline(evi_stream, line);
366 
367  if (std::strcmp(line.c_str(), "[EVIDENCE]") == 0) break;
368 
369  if (line.size() == 0) continue;
370 
371  cstr = new char[line.size() + 1];
372  strcpy(cstr, line.c_str());
373 
374  p = strtok(cstr, " ");
375  tmp = p;
376 
377  // if user input is wrong
378  NodeId node = -1;
379 
380  try {
381  node = _credalNet->current_bn().idFromName(tmp);
382  } catch (NotFound& err) {
383  GUM_SHOWERROR(err);
384  continue;
385  }
386 
387  auto dSize = _credalNet->current_bn().variable(node).domainSize();
388 
389  p = strtok(nullptr, " ");
390 
391  if (p == nullptr) {
392  _query.insert(node, std::vector< bool >(dSize, true));
393  } else {
394  std::vector< bool > values(dSize, false);
395 
396  while (p != nullptr) {
397  if ((Size)atoi(p) >= dSize)
399  "void InferenceEngine< GUM_SCALAR "
400  ">::insertQuery(const std::string & path) : "
401  "query modality is higher or equal to "
402  "cardinality");
403 
404  values[atoi(p)] = true;
405  p = strtok(nullptr, " ");
406  } // end of : line
407 
408  _query.insert(node, values);
409  }
410 
411  delete[] p;
412  delete[] cstr;
413  } // end of : file
414 
415  evi_stream.close();
416  }
417 
418  template < typename GUM_SCALAR >
419  const std::vector< GUM_SCALAR >& InferenceEngine< GUM_SCALAR >::marginalMin(
420  const std::string& varName) const {
421  try {
422  return _marginalMin[_credalNet->current_bn().idFromName(varName)];
423  } catch (NotFound& err) { throw(err); }
424  }
425 
426  template < typename GUM_SCALAR >
427  const std::vector< GUM_SCALAR >& InferenceEngine< GUM_SCALAR >::marginalMax(
428  const std::string& varName) const {
429  try {
430  return _marginalMax[_credalNet->current_bn().idFromName(varName)];
431  } catch (NotFound& err) { throw(err); }
432  }
433 
434  template < typename GUM_SCALAR >
435  const std::vector< GUM_SCALAR >&
437  try {
438  return _marginalMin[id];
439  } catch (NotFound& err) { throw(err); }
440  }
441 
442  template < typename GUM_SCALAR >
443  const std::vector< GUM_SCALAR >&
445  try {
446  return _marginalMax[id];
447  } catch (NotFound& err) { throw(err); }
448  }
449 
450  template < typename GUM_SCALAR >
452  const std::string& varName) const {
453  try {
454  return _expectationMin[_credalNet->current_bn().idFromName(varName)];
455  } catch (NotFound& err) { throw(err); }
456  }
457 
458  template < typename GUM_SCALAR >
460  const std::string& varName) const {
461  try {
462  return _expectationMax[_credalNet->current_bn().idFromName(varName)];
463  } catch (NotFound& err) { throw(err); }
464  }
465 
466  template < typename GUM_SCALAR >
467  const GUM_SCALAR&
469  try {
470  return _expectationMin[id];
471  } catch (NotFound& err) { throw(err); }
472  }
473 
474  template < typename GUM_SCALAR >
475  const GUM_SCALAR&
477  try {
478  return _expectationMax[id];
479  } catch (NotFound& err) { throw(err); }
480  }
481 
482  template < typename GUM_SCALAR >
483  const std::vector< GUM_SCALAR >& InferenceEngine< GUM_SCALAR >::dynamicExpMin(
484  const std::string& varName) const {
485  std::string errTxt = "const std::vector< GUM_SCALAR > & InferenceEngine< "
486  "GUM_SCALAR >::dynamicExpMin ( const std::string & "
487  "varName ) const : ";
488 
489  if (_dynamicExpMin.empty())
491  errTxt + "_dynamicExpectations() needs to be called before");
492 
493  if (!_dynamicExpMin.exists(
494  varName) /*_dynamicExpMin.find(varName) == _dynamicExpMin.end()*/)
495  GUM_ERROR(NotFound, errTxt + "variable name not found : " << varName);
496 
497  return _dynamicExpMin[varName];
498  }
499 
500  template < typename GUM_SCALAR >
501  const std::vector< GUM_SCALAR >& InferenceEngine< GUM_SCALAR >::dynamicExpMax(
502  const std::string& varName) const {
503  std::string errTxt = "const std::vector< GUM_SCALAR > & InferenceEngine< "
504  "GUM_SCALAR >::dynamicExpMax ( const std::string & "
505  "varName ) const : ";
506 
507  if (_dynamicExpMax.empty())
509  errTxt + "_dynamicExpectations() needs to be called before");
510 
511  if (!_dynamicExpMax.exists(
512  varName) /*_dynamicExpMin.find(varName) == _dynamicExpMin.end()*/)
513  GUM_ERROR(NotFound, errTxt + "variable name not found : " << varName);
514 
515  return _dynamicExpMax[varName];
516  }
517 
518  template < typename GUM_SCALAR >
519  const std::vector< std::vector< GUM_SCALAR > >&
521  return _marginalSets[id];
522  }
523 
524  template < typename GUM_SCALAR >
526  const std::string& path) const {
527  std::ofstream m_stream(path.c_str(), std::ios::out | std::ios::trunc);
528 
529  if (!m_stream.good()) {
531  "void InferenceEngine< GUM_SCALAR >::saveMarginals(const "
532  "std::string & path) const : could not open output file "
533  ": "
534  << path);
535  }
536 
537  for (const auto& elt : _marginalMin) {
538  Size esize = Size(elt.second.size());
539 
540  for (Size mod = 0; mod < esize; mod++) {
541  m_stream << _credalNet->current_bn().variable(elt.first).name() << " "
542  << mod << " " << (elt.second)[mod] << " "
543  << _marginalMax[elt.first][mod] << std::endl;
544  }
545  }
546 
547  m_stream.close();
548  }
549 
550  template < typename GUM_SCALAR >
552  const std::string& path) const {
553  if (_dynamicExpMin.empty()) //_modal.empty())
554  return;
555 
556  // else not here, to keep the const (natural with a saving process)
557  // else if(_dynamicExpMin.empty() || _dynamicExpMax.empty())
558  //_dynamicExpectations(); // works with or without a dynamic network
559 
560  std::ofstream m_stream(path.c_str(), std::ios::out | std::ios::trunc);
561 
562  if (!m_stream.good()) {
564  "void InferenceEngine< GUM_SCALAR "
565  ">::saveExpectations(const std::string & path) : could "
566  "not open output file : "
567  << path);
568  }
569 
570  for (const auto& elt : _dynamicExpMin) {
571  m_stream << elt.first; // it->first;
572 
573  // iterates over a vector
574  for (const auto& elt2 : elt.second) {
575  m_stream << " " << elt2;
576  }
577 
578  m_stream << std::endl;
579  }
580 
581  for (const auto& elt : _dynamicExpMax) {
582  m_stream << elt.first;
583 
584  // iterates over a vector
585  for (const auto& elt2 : elt.second) {
586  m_stream << " " << elt2;
587  }
588 
589  m_stream << std::endl;
590  }
591 
592  m_stream.close();
593  }
594 
595  template < typename GUM_SCALAR >
597  std::stringstream output;
598  output << std::endl;
599 
600  // use cbegin() when available
601  for (const auto& elt : _marginalMin) {
602  Size esize = Size(elt.second.size());
603 
604  for (Size mod = 0; mod < esize; mod++) {
605  output << "P(" << _credalNet->current_bn().variable(elt.first).name()
606  << "=" << mod << "|e) = [ ";
607  output << _marginalMin[elt.first][mod] << ", "
608  << _marginalMax[elt.first][mod] << " ]";
609 
610  if (!_query.empty())
611  if (_query.exists(elt.first) && _query[elt.first][mod])
612  output << " QUERY";
613 
614  output << std::endl;
615  }
616 
617  output << std::endl;
618  }
619 
620  return output.str();
621  }
622 
623  template < typename GUM_SCALAR >
624  void
625  InferenceEngine< GUM_SCALAR >::saveVertices(const std::string& path) const {
626  std::ofstream m_stream(path.c_str(), std::ios::out | std::ios::trunc);
627 
628  if (!m_stream.good()) {
630  "void InferenceEngine< GUM_SCALAR >::saveVertices(const "
631  "std::string & path) : could not open outpul file : "
632  << path);
633  }
634 
635  for (const auto& elt : _marginalSets) {
636  m_stream << _credalNet->current_bn().variable(elt.first).name()
637  << std::endl;
638 
639  for (const auto& elt2 : elt.second) {
640  m_stream << "[";
641  bool first = true;
642 
643  for (const auto& elt3 : elt2) {
644  if (!first) {
645  m_stream << ",";
646  first = false;
647  }
648 
649  m_stream << elt3;
650  }
651 
652  m_stream << "]\n";
653  }
654  }
655 
656  m_stream.close();
657  }
658 
659  template < typename GUM_SCALAR >
665 
666  for (auto node : _credalNet->current_bn().nodes()) {
667  auto dSize = _credalNet->current_bn().variable(node).domainSize();
668  _marginalMin.insert(node, std::vector< GUM_SCALAR >(dSize, 1));
669  _oldMarginalMin.insert(node, std::vector< GUM_SCALAR >(dSize, 1));
670 
671  _marginalMax.insert(node, std::vector< GUM_SCALAR >(dSize, 0));
672  _oldMarginalMax.insert(node, std::vector< GUM_SCALAR >(dSize, 0));
673  }
674  }
675 
676  template < typename GUM_SCALAR >
679 
680  if (!_storeVertices) return;
681 
682  for (auto node : _credalNet->current_bn().nodes())
683  _marginalSets.insert(node, std::vector< std::vector< GUM_SCALAR > >());
684  }
685 
686  // since only monitored variables in _modal will be alble to compute
687  // expectations, it is useless to initialize those for all variables
688  // _modal variables will always be checked further, so it is not necessary
689  // to
690  // check it here, but doing so will use less memory
691  template < typename GUM_SCALAR >
695 
696  if (_modal.empty()) return;
697 
698  for (auto node : _credalNet->current_bn().nodes()) {
699  std::string var_name, time_step;
700 
701  var_name = _credalNet->current_bn().variable(node).name();
702  auto delim = var_name.find_first_of("_");
703  var_name = var_name.substr(0, delim);
704 
705  if (!_modal.exists(var_name)) continue;
706 
707  _expectationMin.insert(node, _modal[var_name].back());
708  _expectationMax.insert(node, _modal[var_name].front());
709  }
710  }
711 
712  template < typename GUM_SCALAR >
715  }
716 
717  template < typename GUM_SCALAR >
719  // no modals, no expectations computed during inference
720  if (_expectationMin.empty() || _modal.empty()) return;
721 
722  // already called by the algorithm or the user
723  if (_dynamicExpMax.size() > 0 && _dynamicExpMin.size() > 0) return;
724 
725  // typedef typename std::map< int, GUM_SCALAR > innerMap;
726  using innerMap = typename gum::HashTable< int, GUM_SCALAR >;
727 
728  // typedef typename std::map< std::string, innerMap > outerMap;
729  using outerMap = typename gum::HashTable< std::string, innerMap >;
730 
731  // typedef typename std::map< std::string, std::vector< GUM_SCALAR > >
732  // mod;
733 
734  // si non dynamique, sauver directement _expectationMin et Max (revient au
735  // meme
736  // mais plus rapide)
737  outerMap expectationsMin, expectationsMax;
738 
739  for (const auto& elt : _expectationMin) {
740  std::string var_name, time_step;
741 
742  var_name = _credalNet->current_bn().variable(elt.first).name();
743  auto delim = var_name.find_first_of("_");
744  time_step = var_name.substr(delim + 1, var_name.size());
745  var_name = var_name.substr(0, delim);
746 
747  // to be sure (don't store not monitored variables' expectations)
748  // although it
749  // should be taken care of before this point
750  if (!_modal.exists(var_name)) continue;
751 
752  expectationsMin.getWithDefault(var_name, innerMap())
753  .getWithDefault(atoi(time_step.c_str()), 0) =
754  elt.second; // we iterate with min iterators
755  expectationsMax.getWithDefault(var_name, innerMap())
756  .getWithDefault(atoi(time_step.c_str()), 0) =
757  _expectationMax[elt.first];
758  }
759 
760  for (const auto& elt : expectationsMin) {
761  typename std::vector< GUM_SCALAR > dynExp(elt.second.size());
762 
763  for (const auto& elt2 : elt.second)
764  dynExp[elt2.first] = elt2.second;
765 
766  _dynamicExpMin.insert(elt.first, dynExp);
767  }
768 
769  for (const auto& elt : expectationsMax) {
770  typename std::vector< GUM_SCALAR > dynExp(elt.second.size());
771 
772  for (const auto& elt2 : elt.second) {
773  dynExp[elt2.first] = elt2.second;
774  }
775 
776  _dynamicExpMax.insert(elt.first, dynExp);
777  }
778  }
779 
780  template < typename GUM_SCALAR >
782  _timeSteps = 0;
783  _t0.clear();
784  _t1.clear();
785 
786  // t = 0 vars belongs to _t0 as keys
787  for (auto node : _credalNet->current_bn().dag().nodes()) {
788  std::string var_name = _credalNet->current_bn().variable(node).name();
789  auto delim = var_name.find_first_of("_");
790 
791  if (delim > var_name.size()) {
793  "void InferenceEngine< GUM_SCALAR "
794  ">::_repetitiveInit() : the network does not "
795  "appear to be dynamic");
796  }
797 
798  std::string time_step = var_name.substr(delim + 1, 1);
799 
800  if (time_step.compare("0") == 0) _t0.insert(node, std::vector< NodeId >());
801  }
802 
803  // t = 1 vars belongs to either _t0 as member value or _t1 as keys
804  for (const auto& node : _credalNet->current_bn().dag().nodes()) {
805  std::string var_name = _credalNet->current_bn().variable(node).name();
806  auto delim = var_name.find_first_of("_");
807  std::string time_step = var_name.substr(delim + 1, var_name.size());
808  var_name = var_name.substr(0, delim);
809  delim = time_step.find_first_of("_");
810  time_step = time_step.substr(0, delim);
811 
812  if (time_step.compare("1") == 0) {
813  bool found = false;
814 
815  for (const auto& elt : _t0) {
816  std::string var_0_name =
817  _credalNet->current_bn().variable(elt.first).name();
818  delim = var_0_name.find_first_of("_");
819  var_0_name = var_0_name.substr(0, delim);
820 
821  if (var_name.compare(var_0_name) == 0) {
822  const Potential< GUM_SCALAR >* potential(
823  &_credalNet->current_bn().cpt(node));
824  const Potential< GUM_SCALAR >* potential2(
825  &_credalNet->current_bn().cpt(elt.first));
826 
827  if (potential->domainSize() == potential2->domainSize())
828  _t0[elt.first].push_back(node);
829  else
830  _t1.insert(node, std::vector< NodeId >());
831 
832  found = true;
833  break;
834  }
835  }
836 
837  if (!found) { _t1.insert(node, std::vector< NodeId >()); }
838  }
839  }
840 
841  // t > 1 vars belongs to either _t0 or _t1 as member value
842  // remember _timeSteps
843  for (auto node : _credalNet->current_bn().dag().nodes()) {
844  std::string var_name = _credalNet->current_bn().variable(node).name();
845  auto delim = var_name.find_first_of("_");
846  std::string time_step = var_name.substr(delim + 1, var_name.size());
847  var_name = var_name.substr(0, delim);
848  delim = time_step.find_first_of("_");
849  time_step = time_step.substr(0, delim);
850 
851  if (time_step.compare("0") != 0 && time_step.compare("1") != 0) {
852  // keep max time_step
853  if (atoi(time_step.c_str()) > _timeSteps)
854  _timeSteps = atoi(time_step.c_str());
855 
856  std::string var_0_name;
857  bool found = false;
858 
859  for (const auto& elt : _t0) {
860  std::string var_0_name =
861  _credalNet->current_bn().variable(elt.first).name();
862  delim = var_0_name.find_first_of("_");
863  var_0_name = var_0_name.substr(0, delim);
864 
865  if (var_name.compare(var_0_name) == 0) {
866  const Potential< GUM_SCALAR >* potential(
867  &_credalNet->current_bn().cpt(node));
868  const Potential< GUM_SCALAR >* potential2(
869  &_credalNet->current_bn().cpt(elt.first));
870 
871  if (potential->domainSize() == potential2->domainSize()) {
872  _t0[elt.first].push_back(node);
873  found = true;
874  break;
875  }
876  }
877  }
878 
879  if (!found) {
880  for (const auto& elt : _t1) {
881  std::string var_0_name =
882  _credalNet->current_bn().variable(elt.first).name();
883  auto delim = var_0_name.find_first_of("_");
884  var_0_name = var_0_name.substr(0, delim);
885 
886  if (var_name.compare(var_0_name) == 0) {
887  const Potential< GUM_SCALAR >* potential(
888  &_credalNet->current_bn().cpt(node));
889  const Potential< GUM_SCALAR >* potential2(
890  &_credalNet->current_bn().cpt(elt.first));
891 
892  if (potential->domainSize() == potential2->domainSize()) {
893  _t1[elt.first].push_back(node);
894  break;
895  }
896  }
897  }
898  }
899  }
900  }
901  }
902 
903  template < typename GUM_SCALAR >
905  const NodeId& id, const std::vector< GUM_SCALAR >& vertex) {
906  std::string var_name = _credalNet->current_bn().variable(id).name();
907  auto delim = var_name.find_first_of("_");
908 
909  var_name = var_name.substr(0, delim);
910 
911  if (_modal.exists(var_name) /*_modal.find(var_name) != _modal.end()*/) {
912  GUM_SCALAR exp = 0;
913  auto vsize = vertex.size();
914 
915  for (Size mod = 0; mod < vsize; mod++)
916  exp += vertex[mod] * _modal[var_name][mod];
917 
918  if (exp > _expectationMax[id]) _expectationMax[id] = exp;
919 
920  if (exp < _expectationMin[id]) _expectationMin[id] = exp;
921  }
922  }
923 
924  template < typename GUM_SCALAR >
926  const NodeId& id,
927  const std::vector< GUM_SCALAR >& vertex,
928  const bool& elimRedund) {
929  auto& nodeCredalSet = _marginalSets[id];
930  auto dsize = vertex.size();
931 
932  bool eq = true;
933 
934  for (auto it = nodeCredalSet.cbegin(), itEnd = nodeCredalSet.cend();
935  it != itEnd;
936  ++it) {
937  eq = true;
938 
939  for (Size i = 0; i < dsize; i++) {
940  if (std::fabs(vertex[i] - (*it)[i]) > 1e-6) {
941  eq = false;
942  break;
943  }
944  }
945 
946  if (eq) break;
947  }
948 
949  if (!eq || nodeCredalSet.size() == 0) {
950  nodeCredalSet.push_back(vertex);
951  return;
952  } else
953  return;
954 
955  // because of next lambda return condition
956  if (nodeCredalSet.size() == 1) return;
957 
958  // check that the point and all previously added ones are not inside the
959  // actual
960  // polytope
961  auto itEnd = std::remove_if(
962  nodeCredalSet.begin(),
963  nodeCredalSet.end(),
964  [&](const std::vector< GUM_SCALAR >& v) -> bool {
965  for (auto jt = v.cbegin(),
966  jtEnd = v.cend(),
967  minIt = _marginalMin[id].cbegin(),
968  minItEnd = _marginalMin[id].cend(),
969  maxIt = _marginalMax[id].cbegin(),
970  maxItEnd = _marginalMax[id].cend();
971  jt != jtEnd && minIt != minItEnd && maxIt != maxItEnd;
972  ++jt, ++minIt, ++maxIt) {
973  if ((std::fabs(*jt - *minIt) < 1e-6 || std::fabs(*jt - *maxIt) < 1e-6)
974  && std::fabs(*minIt - *maxIt) > 1e-6)
975  return false;
976  }
977  return true;
978  });
979 
980  nodeCredalSet.erase(itEnd, nodeCredalSet.end());
981 
982  // we need at least 2 points to make a convex combination
983  if (!elimRedund || nodeCredalSet.size() <= 2) return;
984 
985  // there may be points not inside the polytope but on one of it's facet,
986  // meaning it's still a convex combination of vertices of this facet. Here
987  // we
988  // need lrs.
989  LRSWrapper< GUM_SCALAR > lrsWrapper;
990  lrsWrapper.setUpV((unsigned int)dsize, (unsigned int)(nodeCredalSet.size()));
991 
992  for (const auto& vtx : nodeCredalSet)
993  lrsWrapper.fillV(vtx);
994 
995  lrsWrapper.elimRedundVrep();
996 
997  _marginalSets[id] = lrsWrapper.getOutput();
998  }
999 
1000  template < typename GUM_SCALAR >
1003  return _t0;
1004  }
1005 
1006  template < typename GUM_SCALAR >
1009  return _t1;
1010  }
1011 
1012  template < typename GUM_SCALAR >
1014  GUM_SCALAR eps = 0;
1015 #pragma omp parallel
1016  {
1017  GUM_SCALAR tEps = 0;
1018  GUM_SCALAR delta;
1019 
1021  int nsize = int(_marginalMin.size());
1022 
1023 #pragma omp for
1024 
1025  for (int i = 0; i < nsize; i++) {
1026  auto dSize = _marginalMin[i].size();
1027 
1028  for (Size j = 0; j < dSize; j++) {
1029  // on min
1030  delta = _marginalMin[i][j] - _oldMarginalMin[i][j];
1031  delta = (delta < 0) ? (-delta) : delta;
1032  tEps = (tEps < delta) ? delta : tEps;
1033 
1034  // on max
1035  delta = _marginalMax[i][j] - _oldMarginalMax[i][j];
1036  delta = (delta < 0) ? (-delta) : delta;
1037  tEps = (tEps < delta) ? delta : tEps;
1038 
1039  _oldMarginalMin[i][j] = _marginalMin[i][j];
1040  _oldMarginalMax[i][j] = _marginalMax[i][j];
1041  }
1042  } // end of : all variables
1043 
1044 #pragma omp critical(epsilon_max)
1045  {
1046 #pragma omp flush(eps)
1047  eps = (eps < tEps) ? tEps : eps;
1048  }
1049  }
1050 
1051  return eps;
1052  }
1053  } // namespace credal
1054 } // namespace gum
virtual ~InferenceEngine()
Destructor.
const NodeProperty< std::vector< NodeId > > & getT1Cluster() const
Get the _t1 cluster.
aGrUM&#39;s Potential is a multi-dimensional array with tensor operators.
Definition: potential.h:57
const std::vector< GUM_SCALAR > & marginalMax(const NodeId id) const
Get the upper marginals of a given node id.
const GUM_SCALAR & expectationMax(const NodeId id) const
Get the upper expectation of a given node id.
margi _oldMarginalMin
Old lower marginals used to compute epsilon.
void insertModals(const std::map< std::string, std::vector< GUM_SCALAR > > &modals)
Insert variables modalities from map to compute expectations.
Approximation Scheme.
Size size() const noexcept
Returns the number of elements stored into the hashtable.
void insertModalsFile(const std::string &path)
Insert variables modalities from file to compute expectations.
bool _storeBNOpt
Iterations limit stopping rule used by some algorithms such as CNMonteCarloSampling.
#define GUM_SHOWERROR(e)
Definition: exceptions.h:58
void _dynamicExpectations()
Rearrange lower and upper expectations to suit dynamic networks.
dynExpe _dynamicExpMin
Lower dynamic expectations.
bool repetitiveInd() const
Get the current independence status.
credalSet _marginalSets
Credal sets vertices, if enabled.
virtual void insertEvidenceFile(const std::string &path)
Insert evidence from file.
const std::vector< GUM_SCALAR > & marginalMin(const NodeId id) const
Get the lower marginals of a given node id.
void insertQueryFile(const std::string &path)
Insert query variables states from file.
margi _marginalMin
Lower marginals.
bool storeVertices() const
Get the number of iterations without changes used to stop some algorithms.
int _timeSteps
The number of time steps of this network (only usefull for dynamic networks).
bool exists(const Key &key) const
Checks whether there exists an element with a given key in the hashtable.
const std::vector< std::vector< GUM_SCALAR > > & vertices(const NodeId id) const
Get the vertice of a given node id.
margi _oldMarginalMax
Old upper marginals used to compute epsilon.
VarMod2BNsMap< GUM_SCALAR > * getVarMod2BNsMap()
Get optimum IBayesNet.
const std::vector< GUM_SCALAR > & dynamicExpMax(const std::string &varName) const
Get the upper dynamic expectation of a given variable prefix (without the time step included...
void dynamicExpectations()
Compute dynamic expectations.
const GUM_SCALAR _computeEpsilon()
Compute approximation scheme epsilon using the old marginals and the new ones.
gum is the global namespace for all aGrUM entities
Definition: agrum.h:25
void saveVertices(const std::string &path) const
Saves vertices to file.
void saveMarginals(const std::string &path) const
Saves marginals to file.
Abstract class representing CredalNet inference engines.
void _repetitiveInit()
Initialize _t0 and _t1 clusters.
expe _expectationMax
Upper expectations, if some variables modalities were inserted.
InferenceEngine(const CredalNet< GUM_SCALAR > &credalNet)
Construtor.
The class for generic Hash Tables.
Definition: hashTable.h:676
cluster _t0
Clusters of nodes used with dynamic networks.
std::string toString() const
Print all nodes marginals to standart output.
void fillV(const std::vector< GUM_SCALAR > &vertex)
Creates the V-representation of a polytope by adding a vertex to the problem input __input...
Class template representing a Credal Network.
Definition: credalNet.h:87
Class template acting as a wrapper for Lexicographic Reverse Search by David Avis.
Definition: LrsWrapper.h:105
VarMod2BNsMap< GUM_SCALAR > _dbnOpt
Object used to efficiently store optimal bayes net during inference, for some algorithms.
const CredalNet< GUM_SCALAR > * _credalNet
A pointer to the Credal Net used.
const const_iterator & cend() const noexcept
Returns the unsafe const_iterator pointing to the end of the hashtable.
dynExpe _dynamicExpMax
Upper dynamic expectations.
void _initMarginals()
Initialize lower and upper old marginals and marginals before inference, with the lower marginal bein...
void insertQuery(const NodeProperty< std::vector< bool > > &query)
Insert query variables and states from Property.
query _query
Holds the query nodes states.
bool _repetitiveInd
True if using repetitive independence ( dynamic network only ), False otherwise.
void setUpV(const Size &card, const Size &vertices)
Sets up a V-representation.
const_iterator cbegin() const
Returns an unsafe const_iterator pointing to the beginning of the hashtable.
dynExpe _modal
Variables modalities used to compute expectations.
void _updateCredalSets(const NodeId &id, const std::vector< GUM_SCALAR > &vertex, const bool &elimRedund=false)
Given a node id and one of it&#39;s possible vertex, update it&#39;s credal set.
Abstract class template representing a CredalNet inference engine.
void _initMarginalSets()
Initialize credal set vertices with empty sets.
const CredalNet< GUM_SCALAR > & credalNet()
Get this creadal network.
margi _evidence
Holds observed variables states.
cluster _t1
Clusters of nodes used with dynamic networks.
void clear()
Removes all the elements in the hash table.
expe _expectationMin
Lower expectations, if some variables modalities were inserted.
void elimRedundVrep()
V-Redundancy elimination.
const NodeProperty< std::vector< NodeId > > & getT0Cluster() const
Get the _t0 cluster.
const matrix & getOutput() const
Get the output matrix solution of the problem.
bool _storeVertices
True if credal sets vertices are stored, False otherwise.
void setRepetitiveInd(const bool repetitive)
std::size_t Size
In aGrUM, hashed values are unsigned long int.
Definition: types.h:45
const GUM_SCALAR & expectationMin(const NodeId id) const
Get the lower expectation of a given node id.
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
Class used to store optimum IBayesNet during some inference algorithms.
Definition: varMod2BNsMap.h:54
void _initExpectations()
Initialize lower and upper expectations before inference, with the lower expectation being initialize...
void saveExpectations(const std::string &path) const
Saves expectations to file.
bool empty() const noexcept
Indicates whether the hash table is empty.
void insertEvidence(const std::map< std::string, std::vector< GUM_SCALAR > > &eviMap)
Insert evidence from map.
Size NodeId
Type for node ids.
Definition: graphElements.h:97
void _updateExpectations(const NodeId &id, const std::vector< GUM_SCALAR > &vertex)
Given a node id and one of it&#39;s possible vertex obtained during inference, update this node lower and...
margi _marginalMax
Upper marginals.
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52
virtual void eraseAllEvidence()
Erase all inference related data to perform another one.
const std::vector< GUM_SCALAR > & dynamicExpMin(const std::string &varName) const
Get the lower dynamic expectation of a given variable prefix (without the time step included...