aGrUM  0.20.2
a C++ library for (probabilistic) graphical models
inferenceEngine_tpl.h
Go to the documentation of this file.
1 /**
2  *
3  * Copyright 2005-2020 Pierre-Henri WUILLEMIN(@LIP6) & Christophe GONZALES(@AMU)
4  * info_at_agrum_dot_org
5  *
6  * This library is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU Lesser General Public License as published by
8  * the Free Software Foundation, either version 3 of the License, or
9  * (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14  * GNU Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public License
17  * along with this library. If not, see <http://www.gnu.org/licenses/>.
18  *
19  */
20 
21 
22 /** @file
23  * @brief the class for computing G2 scores
24  *
25  * @author Christophe GONZALES(@AMU) and Pierre-Henri WUILLEMIN(@LIP6)
26  */
27 #include <agrum/CN/inference/inferenceEngine.h>
28 #include <agrum/agrum.h>
29 
30 namespace gum {
31  namespace credal {
32 
33  /*template< typename GUM_SCALAR >
34  InferenceEngine< GUM_SCALAR >::InferenceEngine () : ApproximationScheme() {
35  std::cout << "InferenceEngine construct ()" << std::endl;
36  GUM_CONSTRUCTOR ( InferenceEngine );
37  }*/
38 
39  template < typename GUM_SCALAR >
40  InferenceEngine< GUM_SCALAR >::InferenceEngine(
41  const CredalNet< GUM_SCALAR >& credalNet) :
42  ApproximationScheme() {
43  credalNet_ = &credalNet;
44 
45  dbnOpt_.setCNet(credalNet);
46 
47  initMarginals_();
48 
49  GUM_CONSTRUCTOR(InferenceEngine);
50  }
51 
52  template < typename GUM_SCALAR >
55  }
56 
57  template < typename GUM_SCALAR >
58  const CredalNet< GUM_SCALAR >&
60  return *credalNet_;
61  }
62 
63  template < typename GUM_SCALAR >
65  evidence_.clear();
66  query_.clear();
67  /*
68  marginalMin_.clear();
69  marginalMax_.clear();
70  oldMarginalMin_.clear();
71  oldMarginalMax_.clear();
72  */
74  /*
75  expectationMin_.clear();
76  expectationMax_.clear();
77  */
79 
80  // marginalSets_.clear();
82 
85 
86  //_modal.clear();
87 
88  //_t0.clear();
89  //_t1.clear();
90  }
91 
92  /*
93  template< typename GUM_SCALAR >
94  void InferenceEngine< GUM_SCALAR >::setIterStop ( const int &iter_stop ) {
95  iterStop_ = iter_stop;
96  }*/
97 
98  template < typename GUM_SCALAR >
99  void InferenceEngine< GUM_SCALAR >::storeBNOpt(const bool value) {
100  storeBNOpt_ = value;
101  }
102 
103  template < typename GUM_SCALAR >
106 
107  if (value) initMarginalSets_();
108  }
109 
110  template < typename GUM_SCALAR >
112  bool oldValue = repetitiveInd_;
114 
115  // do not compute clusters more than once
117  }
118 
119  template < typename GUM_SCALAR >
121  return repetitiveInd_;
122  }
123  /*
124  template< typename GUM_SCALAR >
125  int InferenceEngine< GUM_SCALAR >::iterStop () const {
126  return iterStop_;
127  }*/
128 
129  template < typename GUM_SCALAR >
131  return storeVertices_;
132  }
133 
134  template < typename GUM_SCALAR >
136  return storeBNOpt_;
137  }
138 
139  template < typename GUM_SCALAR >
142  return &dbnOpt_;
143  }
144 
145  template < typename GUM_SCALAR >
148 
149  if (!mod_stream.good()) {
151  "void InferenceEngine< GUM_SCALAR "
152  ">::insertModals(const std::string & path) : "
153  "could not open input file : "
154  << path);
155  }
156 
157  if (!modal_.empty()) modal_.clear();
158 
159  std::string line, tmp;
160  char * cstr, *p;
161 
162  while (mod_stream.good()) {
164 
165  if (line.size() == 0) continue;
166 
167  cstr = new char[line.size() + 1];
168  strcpy(cstr, line.c_str());
169 
170  p = strtok(cstr, " ");
171  tmp = p;
172 
174  p = strtok(nullptr, " ");
175 
176  while (p != nullptr) {
178  p = strtok(nullptr, " ");
179  } // end of : line
180 
181  modal_.insert(tmp, values); //[tmp] = values;
182 
183  delete[] p;
184  delete[] cstr;
185  } // end of : file
186 
187  mod_stream.close();
188 
190  }
191 
192  template < typename GUM_SCALAR >
194  const std::map< std::string, std::vector< GUM_SCALAR > >& modals) {
195  if (!modal_.empty()) modal_.clear();
196 
197  for (auto it = modals.cbegin(), theEnd = modals.cend(); it != theEnd; ++it) {
198  NodeId id;
199 
200  try {
202  } catch (NotFound& err) {
204  continue;
205  }
206 
207  // check that modals are net compatible
209 
210  if (dSize != it->second.size()) continue;
211 
212  // GUM_ERROR(OperationNotAllowed, "void InferenceEngine< GUM_SCALAR
213  // >::insertModals( const std::map< std::string, std::vector< GUM_SCALAR
214  // > >
215  // &modals) : modalities does not respect variable cardinality : " <<
216  // credalNet_->current_bn().variable( id ).name() << " : " << dSize << "
217  // != "
218  // << it->second.size());
219 
220  modal_.insert(it->first, it->second); //[ it->first ] = it->second;
221  }
222 
223  //_modal = modals;
224 
226  }
227 
228  template < typename GUM_SCALAR >
230  const std::map< std::string, std::vector< GUM_SCALAR > >& eviMap) {
231  if (!evidence_.empty()) evidence_.clear();
232 
233  for (auto it = eviMap.cbegin(), theEnd = eviMap.cend(); it != theEnd; ++it) {
234  NodeId id;
235 
236  try {
238  } catch (NotFound& err) {
240  continue;
241  }
242 
244  }
245  }
246 
247  // check that observed variables DO exists in the network (otherwise Lazy
248  // report
249  // an error and app crash)
250  template < typename GUM_SCALAR >
252  const NodeProperty< std::vector< GUM_SCALAR > >& evidence) {
253  if (!evidence_.empty()) evidence_.clear();
254 
255  // use cbegin() to get const_iterator when available in aGrUM hashtables
256  for (const auto& elt: evidence) {
257  try {
259  } catch (NotFound& err) {
261  continue;
262  }
263 
265  }
266  }
267 
268  template < typename GUM_SCALAR >
269  void
272 
273  if (!evi_stream.good()) {
275  "void InferenceEngine< GUM_SCALAR "
276  ">::insertEvidence(const std::string & path) : could not "
277  "open input file : "
278  << path);
279  }
280 
281  if (!evidence_.empty()) evidence_.clear();
282 
283  std::string line, tmp;
284  char * cstr, *p;
285 
286  while (evi_stream.good() && std::strcmp(line.c_str(), "[EVIDENCE]") != 0) {
288  }
289 
290  while (evi_stream.good()) {
292 
293  if (std::strcmp(line.c_str(), "[QUERY]") == 0) break;
294 
295  if (line.size() == 0) continue;
296 
297  cstr = new char[line.size() + 1];
298  strcpy(cstr, line.c_str());
299 
300  p = strtok(cstr, " ");
301  tmp = p;
302 
303  // if user input is wrong
304  NodeId node = -1;
305 
306  try {
308  } catch (NotFound& err) {
310  continue;
311  }
312 
314  p = strtok(nullptr, " ");
315 
316  while (p != nullptr) {
318  p = strtok(nullptr, " ");
319  } // end of : line
320 
322 
323  delete[] p;
324  delete[] cstr;
325  } // end of : file
326 
327  evi_stream.close();
328  }
329 
330  template < typename GUM_SCALAR >
332  const NodeProperty< std::vector< bool > >& query) {
333  if (!query_.empty()) query_.clear();
334 
335  for (const auto& elt: query) {
336  try {
338  } catch (NotFound& err) {
340  continue;
341  }
342 
344  }
345  }
346 
347  template < typename GUM_SCALAR >
350 
351  if (!evi_stream.good()) {
353  "void InferenceEngine< GUM_SCALAR >::insertQuery(const "
354  "std::string & path) : could not open input file : "
355  << path);
356  }
357 
358  if (!query_.empty()) query_.clear();
359 
360  std::string line, tmp;
361  char * cstr, *p;
362 
363  while (evi_stream.good() && std::strcmp(line.c_str(), "[QUERY]") != 0) {
365  }
366 
367  while (evi_stream.good()) {
369 
370  if (std::strcmp(line.c_str(), "[EVIDENCE]") == 0) break;
371 
372  if (line.size() == 0) continue;
373 
374  cstr = new char[line.size() + 1];
375  strcpy(cstr, line.c_str());
376 
377  p = strtok(cstr, " ");
378  tmp = p;
379 
380  // if user input is wrong
381  NodeId node = -1;
382 
383  try {
385  } catch (NotFound& err) {
387  continue;
388  }
389 
391 
392  p = strtok(nullptr, " ");
393 
394  if (p == nullptr) {
395  query_.insert(node, std::vector< bool >(dSize, true));
396  } else {
397  std::vector< bool > values(dSize, false);
398 
399  while (p != nullptr) {
400  if ((Size)atoi(p) >= dSize)
402  "void InferenceEngine< GUM_SCALAR "
403  ">::insertQuery(const std::string & path) : "
404  "query modality is higher or equal to "
405  "cardinality");
406 
407  values[atoi(p)] = true;
408  p = strtok(nullptr, " ");
409  } // end of : line
410 
412  }
413 
414  delete[] p;
415  delete[] cstr;
416  } // end of : file
417 
418  evi_stream.close();
419  }
420 
421  template < typename GUM_SCALAR >
423  const std::string& varName) const {
425  }
426 
427  template < typename GUM_SCALAR >
429  const std::string& varName) const {
431  }
432 
433  template < typename GUM_SCALAR >
436  try {
440  return res;
441  } catch (NotFound& err) { throw(err); }
442  }
443 
444  template < typename GUM_SCALAR >
447  try {
451  return res;
452  } catch (NotFound& err) { throw(err); }
453  }
454 
455  template < typename GUM_SCALAR >
457  const std::string& varName) const {
458  try {
460  } catch (NotFound& err) { throw(err); }
461  }
462 
463  template < typename GUM_SCALAR >
465  const std::string& varName) const {
466  try {
468  } catch (NotFound& err) { throw(err); }
469  }
470 
471  template < typename GUM_SCALAR >
472  const GUM_SCALAR&
474  try {
475  return expectationMin_[id];
476  } catch (NotFound& err) { throw(err); }
477  }
478 
479  template < typename GUM_SCALAR >
480  const GUM_SCALAR&
482  try {
483  return expectationMax_[id];
484  } catch (NotFound& err) { throw(err); }
485  }
486 
487  template < typename GUM_SCALAR >
489  const std::string& varName) const {
490  std::string errTxt = "const std::vector< GUM_SCALAR > & InferenceEngine< "
491  "GUM_SCALAR >::dynamicExpMin ( const std::string & "
492  "varName ) const : ";
493 
494  if (dynamicExpMin_.empty())
496  errTxt + "_dynamicExpectations() needs to be called before");
497 
498  if (!dynamicExpMin_.exists(
499  varName) /*dynamicExpMin_.find(varName) == dynamicExpMin_.end()*/)
500  GUM_ERROR(NotFound, errTxt + "variable name not found : " << varName);
501 
502  return dynamicExpMin_[varName];
503  }
504 
505  template < typename GUM_SCALAR >
507  const std::string& varName) const {
508  std::string errTxt = "const std::vector< GUM_SCALAR > & InferenceEngine< "
509  "GUM_SCALAR >::dynamicExpMax ( const std::string & "
510  "varName ) const : ";
511 
512  if (dynamicExpMax_.empty())
514  errTxt + "_dynamicExpectations() needs to be called before");
515 
516  if (!dynamicExpMax_.exists(
517  varName) /*dynamicExpMin_.find(varName) == dynamicExpMin_.end()*/)
518  GUM_ERROR(NotFound, errTxt + "variable name not found : " << varName);
519 
520  return dynamicExpMax_[varName];
521  }
522 
523  template < typename GUM_SCALAR >
524  const std::vector< std::vector< GUM_SCALAR > >&
526  return marginalSets_[id];
527  }
528 
529  template < typename GUM_SCALAR >
531  const std::string& path) const {
533 
534  if (!m_stream.good()) {
536  "void InferenceEngine< GUM_SCALAR >::saveMarginals(const "
537  "std::string & path) const : could not open output file "
538  ": "
539  << path);
540  }
541 
542  for (const auto& elt: marginalMin_) {
543  Size esize = Size(elt.second.size());
544 
545  for (Size mod = 0; mod < esize; mod++) {
547  << mod << " " << (elt.second)[mod] << " "
548  << marginalMax_[elt.first][mod] << std::endl;
549  }
550  }
551 
552  m_stream.close();
553  }
554 
555  template < typename GUM_SCALAR >
557  const std::string& path) const {
558  if (dynamicExpMin_.empty()) //_modal.empty())
559  return;
560 
561  // else not here, to keep the const (natural with a saving process)
562  // else if(dynamicExpMin_.empty() || dynamicExpMax_.empty())
563  //_dynamicExpectations(); // works with or without a dynamic network
564 
566 
567  if (!m_stream.good()) {
569  "void InferenceEngine< GUM_SCALAR "
570  ">::saveExpectations(const std::string & path) : could "
571  "not open output file : "
572  << path);
573  }
574 
575  for (const auto& elt: dynamicExpMin_) {
576  m_stream << elt.first; // it->first;
577 
578  // iterates over a vector
579  for (const auto& elt2: elt.second) {
580  m_stream << " " << elt2;
581  }
582 
583  m_stream << std::endl;
584  }
585 
586  for (const auto& elt: dynamicExpMax_) {
587  m_stream << elt.first;
588 
589  // iterates over a vector
590  for (const auto& elt2: elt.second) {
591  m_stream << " " << elt2;
592  }
593 
594  m_stream << std::endl;
595  }
596 
597  m_stream.close();
598  }
599 
600  template < typename GUM_SCALAR >
603  output << std::endl;
604 
605  // use cbegin() when available
606  for (const auto& elt: marginalMin_) {
607  Size esize = Size(elt.second.size());
608 
609  for (Size mod = 0; mod < esize; mod++) {
611  << "=" << mod << "|e) = [ ";
612  output << marginalMin_[elt.first][mod] << ", "
613  << marginalMax_[elt.first][mod] << " ]";
614 
615  if (!query_.empty())
616  if (query_.exists(elt.first) && query_[elt.first][mod])
617  output << " QUERY";
618 
619  output << std::endl;
620  }
621 
622  output << std::endl;
623  }
624 
625  return output.str();
626  }
627 
628  template < typename GUM_SCALAR >
629  void
632 
633  if (!m_stream.good()) {
635  "void InferenceEngine< GUM_SCALAR >::saveVertices(const "
636  "std::string & path) : could not open outpul file : "
637  << path);
638  }
639 
640  for (const auto& elt: marginalSets_) {
642  << std::endl;
643 
644  for (const auto& elt2: elt.second) {
645  m_stream << "[";
646  bool first = true;
647 
648  for (const auto& elt3: elt2) {
649  if (!first) {
650  m_stream << ",";
651  first = false;
652  }
653 
654  m_stream << elt3;
655  }
656 
657  m_stream << "]\n";
658  }
659  }
660 
661  m_stream.close();
662  }
663 
664  template < typename GUM_SCALAR >
670 
671  for (auto node: credalNet_->current_bn().nodes()) {
675 
678  }
679  }
680 
681  template < typename GUM_SCALAR >
684 
685  if (!storeVertices_) return;
686 
687  for (auto node: credalNet_->current_bn().nodes())
689  }
690 
691  // since only monitored variables in modal_ will be alble to compute
692  // expectations, it is useless to initialize those for all variables
693  // modal_ variables will always be checked further, so it is not necessary
694  // to
695  // check it here, but doing so will use less memory
696  template < typename GUM_SCALAR >
700 
701  if (modal_.empty()) return;
702 
703  for (auto node: credalNet_->current_bn().nodes()) {
705 
707  auto delim = var_name.find_first_of("_");
709 
710  if (!modal_.exists(var_name)) continue;
711 
714  }
715  }
716 
717  template < typename GUM_SCALAR >
720  }
721 
722  template < typename GUM_SCALAR >
724  // no modals, no expectations computed during inference
725  if (expectationMin_.empty() || modal_.empty()) return;
726 
727  // already called by the algorithm or the user
728  if (dynamicExpMax_.size() > 0 && dynamicExpMin_.size() > 0) return;
729 
730  // typedef typename std::map< int, GUM_SCALAR > innerMap;
731  using innerMap = typename gum::HashTable< int, GUM_SCALAR >;
732 
733  // typedef typename std::map< std::string, innerMap > outerMap;
734  using outerMap = typename gum::HashTable< std::string, innerMap >;
735 
736  // typedef typename std::map< std::string, std::vector< GUM_SCALAR > >
737  // mod;
738 
739  // si non dynamique, sauver directement expectationMin_ et Max (revient au
740  // meme
741  // mais plus rapide)
743 
744  for (const auto& elt: expectationMin_) {
746 
748  auto delim = var_name.find_first_of("_");
751 
752  // to be sure (don't store not monitored variables' expectations)
753  // although it
754  // should be taken care of before this point
755  if (!modal_.exists(var_name)) continue;
756 
759  = elt.second; // we iterate with min iterators
763  }
764 
765  for (const auto& elt: expectationsMin) {
766  typename std::vector< GUM_SCALAR > dynExp(elt.second.size());
767 
768  for (const auto& elt2: elt.second)
770 
772  }
773 
774  for (const auto& elt: expectationsMax) {
775  typename std::vector< GUM_SCALAR > dynExp(elt.second.size());
776 
777  for (const auto& elt2: elt.second) {
779  }
780 
782  }
783  }
784 
785  template < typename GUM_SCALAR >
787  timeSteps_ = 0;
788  t0_.clear();
789  t1_.clear();
790 
791  // t = 0 vars belongs to t0_ as keys
792  for (auto node: credalNet_->current_bn().dag().nodes()) {
794  auto delim = var_name.find_first_of("_");
795 
796  if (delim > var_name.size()) {
798  "void InferenceEngine< GUM_SCALAR "
799  ">::repetitiveInit_() : the network does not "
800  "appear to be dynamic");
801  }
802 
804 
805  if (time_step.compare("0") == 0) t0_.insert(node, std::vector< NodeId >());
806  }
807 
808  // t = 1 vars belongs to either t0_ as member value or t1_ as keys
809  for (const auto& node: credalNet_->current_bn().dag().nodes()) {
811  auto delim = var_name.find_first_of("_");
816 
817  if (time_step.compare("1") == 0) {
818  bool found = false;
819 
820  for (const auto& elt: t0_) {
825 
826  if (var_name.compare(var_0_name) == 0) {
827  const Potential< GUM_SCALAR >* potential(
829  const Potential< GUM_SCALAR >* potential2(
831 
834  else
835  t1_.insert(node, std::vector< NodeId >());
836 
837  found = true;
838  break;
839  }
840  }
841 
842  if (!found) { t1_.insert(node, std::vector< NodeId >()); }
843  }
844  }
845 
846  // t > 1 vars belongs to either t0_ or t1_ as member value
847  // remember timeSteps_
848  for (auto node: credalNet_->current_bn().dag().nodes()) {
850  auto delim = var_name.find_first_of("_");
855 
856  if (time_step.compare("0") != 0 && time_step.compare("1") != 0) {
857  // keep max time_step
858  if (atoi(time_step.c_str()) > timeSteps_)
860 
862  bool found = false;
863 
864  for (const auto& elt: t0_) {
869 
870  if (var_name.compare(var_0_name) == 0) {
871  const Potential< GUM_SCALAR >* potential(
873  const Potential< GUM_SCALAR >* potential2(
875 
876  if (potential->domainSize() == potential2->domainSize()) {
878  found = true;
879  break;
880  }
881  }
882  }
883 
884  if (!found) {
885  for (const auto& elt: t1_) {
888  auto delim = var_0_name.find_first_of("_");
890 
891  if (var_name.compare(var_0_name) == 0) {
892  const Potential< GUM_SCALAR >* potential(
894  const Potential< GUM_SCALAR >* potential2(
896 
897  if (potential->domainSize() == potential2->domainSize()) {
899  break;
900  }
901  }
902  }
903  }
904  }
905  }
906  }
907 
908  template < typename GUM_SCALAR >
910  const NodeId& id,
911  const std::vector< GUM_SCALAR >& vertex) {
913  auto delim = var_name.find_first_of("_");
914 
916 
917  if (modal_.exists(var_name) /*modal_.find(var_name) != modal_.end()*/) {
918  GUM_SCALAR exp = 0;
919  auto vsize = vertex.size();
920 
921  for (Size mod = 0; mod < vsize; mod++)
922  exp += vertex[mod] * modal_[var_name][mod];
923 
925 
927  }
928  }
929 
930  template < typename GUM_SCALAR >
932  const NodeId& id,
933  const std::vector< GUM_SCALAR >& vertex,
934  const bool& elimRedund) {
935  auto& nodeCredalSet = marginalSets_[id];
936  auto dsize = vertex.size();
937 
938  bool eq = true;
939 
940  for (auto it = nodeCredalSet.cbegin(), itEnd = nodeCredalSet.cend();
941  it != itEnd;
942  ++it) {
943  eq = true;
944 
945  for (Size i = 0; i < dsize; i++) {
946  if (std::fabs(vertex[i] - (*it)[i]) > 1e-6) {
947  eq = false;
948  break;
949  }
950  }
951 
952  if (eq) break;
953  }
954 
955  if (!eq || nodeCredalSet.size() == 0) {
957  return;
958  } else
959  return;
960 
961  // because of next lambda return condition
962  if (nodeCredalSet.size() == 1) return;
963 
964  // check that the point and all previously added ones are not inside the
965  // actual
966  // polytope
967  auto itEnd = std::remove_if(
969  nodeCredalSet.end(),
970  [&](const std::vector< GUM_SCALAR >& v) -> bool {
971  for (auto jt = v.cbegin(),
972  jtEnd = v.cend(),
977  jt != jtEnd && minIt != minItEnd && maxIt != maxItEnd;
978  ++jt, ++minIt, ++maxIt) {
979  if ((std::fabs(*jt - *minIt) < 1e-6 || std::fabs(*jt - *maxIt) < 1e-6)
980  && std::fabs(*minIt - *maxIt) > 1e-6)
981  return false;
982  }
983  return true;
984  });
985 
987 
988  // we need at least 2 points to make a convex combination
989  if (!elimRedund || nodeCredalSet.size() <= 2) return;
990 
991  // there may be points not inside the polytope but on one of it's facet,
992  // meaning it's still a convex combination of vertices of this facet. Here
993  // we
994  // need lrs.
996  lrsWrapper.setUpV((unsigned int)dsize, (unsigned int)(nodeCredalSet.size()));
997 
998  for (const auto& vtx: nodeCredalSet)
1000 
1002 
1004  }
1005 
1006  template < typename GUM_SCALAR >
1007  const NodeProperty< std::vector< NodeId > >&
1009  return t0_;
1010  }
1011 
1012  template < typename GUM_SCALAR >
1013  const NodeProperty< std::vector< NodeId > >&
1015  return t1_;
1016  }
1017 
1018  template < typename GUM_SCALAR >
1020  GUM_SCALAR eps = 0;
1021 #pragma omp parallel
1022  {
1023  GUM_SCALAR tEps = 0;
1024  GUM_SCALAR delta;
1025 
1026  /// int tId = getThreadNumber();
1027  int nsize = int(marginalMin_.size());
1028 
1029 #pragma omp for
1030 
1031  for (int i = 0; i < nsize; i++) {
1032  auto dSize = marginalMin_[i].size();
1033 
1034  for (Size j = 0; j < dSize; j++) {
1035  // on min
1037  delta = (delta < 0) ? (-delta) : delta;
1038  tEps = (tEps < delta) ? delta : tEps;
1039 
1040  // on max
1042  delta = (delta < 0) ? (-delta) : delta;
1043  tEps = (tEps < delta) ? delta : tEps;
1044 
1047  }
1048  } // end of : all variables
1049 
1050 #pragma omp critical(epsilon_max)
1051  {
1052 #pragma omp flush(eps)
1053  eps = (eps < tEps) ? tEps : eps;
1054  }
1055  }
1056 
1057  return eps;
1058  }
1059  } // namespace credal
1060 } // namespace gum
INLINE void emplace(Args &&... args)
Definition: set_tpl.h:669
namespace for all credal networks entities
Definition: LpInterface.cpp:37