37 template <
typename GUM_SCALAR >
50 template <
typename GUM_SCALAR >
55 template <
typename GUM_SCALAR >
60 template <
typename GUM_SCALAR >
95 template <
typename GUM_SCALAR >
100 template <
typename GUM_SCALAR >
107 template <
typename GUM_SCALAR >
116 template <
typename GUM_SCALAR >
126 template <
typename GUM_SCALAR >
131 template <
typename GUM_SCALAR >
136 template <
typename GUM_SCALAR >
142 template <
typename GUM_SCALAR >
144 std::ifstream mod_stream(path.c_str(), std::ios::in);
146 if (!mod_stream.good()) {
148 "void InferenceEngine< GUM_SCALAR " 149 ">::insertModals(const std::string & path) : " 150 "could not open input file : " 156 std::string line, tmp;
159 while (mod_stream.good()) {
160 getline(mod_stream, line);
162 if (line.size() == 0)
continue;
164 cstr =
new char[line.size() + 1];
165 strcpy(cstr, line.c_str());
167 p = strtok(cstr,
" ");
170 std::vector< GUM_SCALAR > values;
171 p = strtok(
nullptr,
" ");
173 while (p !=
nullptr) {
174 values.push_back(GUM_SCALAR(atof(p)));
175 p = strtok(
nullptr,
" ");
178 _modal.insert(tmp, values);
189 template <
typename GUM_SCALAR >
191 const std::map< std::string, std::vector< GUM_SCALAR > >& modals) {
194 for (
auto it = modals.cbegin(), theEnd = modals.cend(); it != theEnd; ++it) {
198 id =
_credalNet->current_bn().idFromName(it->first);
205 auto dSize =
_credalNet->current_bn().variable(
id).domainSize();
207 if (dSize != it->second.size())
continue;
217 _modal.insert(it->first, it->second);
225 template <
typename GUM_SCALAR >
227 const std::map< std::string, std::vector< GUM_SCALAR > >& eviMap) {
230 for (
auto it = eviMap.cbegin(), theEnd = eviMap.cend(); it != theEnd; ++it) {
234 id =
_credalNet->current_bn().idFromName(it->first);
247 template <
typename GUM_SCALAR >
249 const NodeProperty< std::vector< GUM_SCALAR > >& evidence) {
253 for (
const auto& elt : evidence) {
265 template <
typename GUM_SCALAR >
268 std::ifstream evi_stream(path.c_str(), std::ios::in);
270 if (!evi_stream.good()) {
272 "void InferenceEngine< GUM_SCALAR " 273 ">::insertEvidence(const std::string & path) : could not " 280 std::string line, tmp;
283 while (evi_stream.good() && std::strcmp(line.c_str(),
"[EVIDENCE]") != 0) {
284 getline(evi_stream, line);
287 while (evi_stream.good()) {
288 getline(evi_stream, line);
290 if (std::strcmp(line.c_str(),
"[QUERY]") == 0)
break;
292 if (line.size() == 0)
continue;
294 cstr =
new char[line.size() + 1];
295 strcpy(cstr, line.c_str());
297 p = strtok(cstr,
" ");
304 node =
_credalNet->current_bn().idFromName(tmp);
310 std::vector< GUM_SCALAR > values;
311 p = strtok(
nullptr,
" ");
313 while (p !=
nullptr) {
314 values.push_back(GUM_SCALAR(atof(p)));
315 p = strtok(
nullptr,
" ");
327 template <
typename GUM_SCALAR >
332 for (
const auto& elt :
query) {
344 template <
typename GUM_SCALAR >
346 std::ifstream evi_stream(path.c_str(), std::ios::in);
348 if (!evi_stream.good()) {
350 "void InferenceEngine< GUM_SCALAR >::insertQuery(const " 351 "std::string & path) : could not open input file : " 357 std::string line, tmp;
360 while (evi_stream.good() && std::strcmp(line.c_str(),
"[QUERY]") != 0) {
361 getline(evi_stream, line);
364 while (evi_stream.good()) {
365 getline(evi_stream, line);
367 if (std::strcmp(line.c_str(),
"[EVIDENCE]") == 0)
break;
369 if (line.size() == 0)
continue;
371 cstr =
new char[line.size() + 1];
372 strcpy(cstr, line.c_str());
374 p = strtok(cstr,
" ");
381 node =
_credalNet->current_bn().idFromName(tmp);
387 auto dSize =
_credalNet->current_bn().variable(node).domainSize();
389 p = strtok(
nullptr,
" ");
394 std::vector< bool > values(dSize,
false);
396 while (p !=
nullptr) {
397 if ((
Size)atoi(p) >= dSize)
399 "void InferenceEngine< GUM_SCALAR " 400 ">::insertQuery(const std::string & path) : " 401 "query modality is higher or equal to " 404 values[atoi(p)] =
true;
405 p = strtok(
nullptr,
" ");
418 template <
typename GUM_SCALAR >
420 const std::string& varName)
const {
423 }
catch (
NotFound& err) {
throw(err); }
426 template <
typename GUM_SCALAR >
428 const std::string& varName)
const {
431 }
catch (
NotFound& err) {
throw(err); }
434 template <
typename GUM_SCALAR >
435 const std::vector< GUM_SCALAR >&
439 }
catch (
NotFound& err) {
throw(err); }
442 template <
typename GUM_SCALAR >
443 const std::vector< GUM_SCALAR >&
447 }
catch (
NotFound& err) {
throw(err); }
450 template <
typename GUM_SCALAR >
452 const std::string& varName)
const {
455 }
catch (
NotFound& err) {
throw(err); }
458 template <
typename GUM_SCALAR >
460 const std::string& varName)
const {
463 }
catch (
NotFound& err) {
throw(err); }
466 template <
typename GUM_SCALAR >
471 }
catch (
NotFound& err) {
throw(err); }
474 template <
typename GUM_SCALAR >
479 }
catch (
NotFound& err) {
throw(err); }
482 template <
typename GUM_SCALAR >
484 const std::string& varName)
const {
485 std::string errTxt =
"const std::vector< GUM_SCALAR > & InferenceEngine< " 486 "GUM_SCALAR >::dynamicExpMin ( const std::string & " 487 "varName ) const : ";
491 errTxt +
"_dynamicExpectations() needs to be called before");
500 template <
typename GUM_SCALAR >
502 const std::string& varName)
const {
503 std::string errTxt =
"const std::vector< GUM_SCALAR > & InferenceEngine< " 504 "GUM_SCALAR >::dynamicExpMax ( const std::string & " 505 "varName ) const : ";
509 errTxt +
"_dynamicExpectations() needs to be called before");
518 template <
typename GUM_SCALAR >
519 const std::vector< std::vector< GUM_SCALAR > >&
524 template <
typename GUM_SCALAR >
526 const std::string& path)
const {
527 std::ofstream m_stream(path.c_str(), std::ios::out | std::ios::trunc);
529 if (!m_stream.good()) {
531 "void InferenceEngine< GUM_SCALAR >::saveMarginals(const " 532 "std::string & path) const : could not open output file " 538 Size esize =
Size(elt.second.size());
540 for (
Size mod = 0; mod < esize; mod++) {
541 m_stream <<
_credalNet->current_bn().variable(elt.first).name() <<
" " 542 << mod <<
" " << (elt.second)[mod] <<
" " 550 template <
typename GUM_SCALAR >
552 const std::string& path)
const {
560 std::ofstream m_stream(path.c_str(), std::ios::out | std::ios::trunc);
562 if (!m_stream.good()) {
564 "void InferenceEngine< GUM_SCALAR " 565 ">::saveExpectations(const std::string & path) : could " 566 "not open output file : " 571 m_stream << elt.first;
574 for (
const auto& elt2 : elt.second) {
575 m_stream <<
" " << elt2;
578 m_stream << std::endl;
582 m_stream << elt.first;
585 for (
const auto& elt2 : elt.second) {
586 m_stream <<
" " << elt2;
589 m_stream << std::endl;
595 template <
typename GUM_SCALAR >
597 std::stringstream output;
602 Size esize =
Size(elt.second.size());
604 for (
Size mod = 0; mod < esize; mod++) {
605 output <<
"P(" <<
_credalNet->current_bn().variable(elt.first).name()
606 <<
"=" << mod <<
"|e) = [ ";
607 output << _marginalMin[elt.first][mod] <<
", " 623 template <
typename GUM_SCALAR >
626 std::ofstream m_stream(path.c_str(), std::ios::out | std::ios::trunc);
628 if (!m_stream.good()) {
630 "void InferenceEngine< GUM_SCALAR >::saveVertices(const " 631 "std::string & path) : could not open outpul file : " 636 m_stream <<
_credalNet->current_bn().variable(elt.first).name()
639 for (
const auto& elt2 : elt.second) {
643 for (
const auto& elt3 : elt2) {
659 template <
typename GUM_SCALAR >
666 for (
auto node :
_credalNet->current_bn().nodes()) {
667 auto dSize =
_credalNet->current_bn().variable(node).domainSize();
676 template <
typename GUM_SCALAR >
682 for (
auto node :
_credalNet->current_bn().nodes())
691 template <
typename GUM_SCALAR >
696 if (
_modal.empty())
return;
698 for (
auto node :
_credalNet->current_bn().nodes()) {
699 std::string var_name, time_step;
701 var_name =
_credalNet->current_bn().variable(node).name();
702 auto delim = var_name.find_first_of(
"_");
703 var_name = var_name.substr(0, delim);
705 if (!
_modal.exists(var_name))
continue;
712 template <
typename GUM_SCALAR >
717 template <
typename GUM_SCALAR >
737 outerMap expectationsMin, expectationsMax;
740 std::string var_name, time_step;
742 var_name =
_credalNet->current_bn().variable(elt.first).name();
743 auto delim = var_name.find_first_of(
"_");
744 time_step = var_name.substr(delim + 1, var_name.size());
745 var_name = var_name.substr(0, delim);
750 if (!
_modal.exists(var_name))
continue;
752 expectationsMin.getWithDefault(var_name, innerMap())
753 .getWithDefault(atoi(time_step.c_str()), 0) =
755 expectationsMax.getWithDefault(var_name, innerMap())
756 .getWithDefault(atoi(time_step.c_str()), 0) =
760 for (
const auto& elt : expectationsMin) {
761 typename std::vector< GUM_SCALAR > dynExp(elt.second.size());
763 for (
const auto& elt2 : elt.second)
764 dynExp[elt2.first] = elt2.second;
769 for (
const auto& elt : expectationsMax) {
770 typename std::vector< GUM_SCALAR > dynExp(elt.second.size());
772 for (
const auto& elt2 : elt.second) {
773 dynExp[elt2.first] = elt2.second;
780 template <
typename GUM_SCALAR >
787 for (
auto node :
_credalNet->current_bn().dag().nodes()) {
788 std::string var_name =
_credalNet->current_bn().variable(node).name();
789 auto delim = var_name.find_first_of(
"_");
791 if (delim > var_name.size()) {
793 "void InferenceEngine< GUM_SCALAR " 794 ">::_repetitiveInit() : the network does not " 795 "appear to be dynamic");
798 std::string time_step = var_name.substr(delim + 1, 1);
800 if (time_step.compare(
"0") == 0)
_t0.
insert(node, std::vector< NodeId >());
804 for (
const auto& node :
_credalNet->current_bn().dag().nodes()) {
805 std::string var_name =
_credalNet->current_bn().variable(node).name();
806 auto delim = var_name.find_first_of(
"_");
807 std::string time_step = var_name.substr(delim + 1, var_name.size());
808 var_name = var_name.substr(0, delim);
809 delim = time_step.find_first_of(
"_");
810 time_step = time_step.substr(0, delim);
812 if (time_step.compare(
"1") == 0) {
815 for (
const auto& elt :
_t0) {
816 std::string var_0_name =
817 _credalNet->current_bn().variable(elt.first).name();
818 delim = var_0_name.find_first_of(
"_");
819 var_0_name = var_0_name.substr(0, delim);
821 if (var_name.compare(var_0_name) == 0) {
827 if (potential->domainSize() == potential2->domainSize())
828 _t0[elt.first].push_back(node);
830 _t1.
insert(node, std::vector< NodeId >());
837 if (!found) {
_t1.
insert(node, std::vector< NodeId >()); }
843 for (
auto node :
_credalNet->current_bn().dag().nodes()) {
844 std::string var_name =
_credalNet->current_bn().variable(node).name();
845 auto delim = var_name.find_first_of(
"_");
846 std::string time_step = var_name.substr(delim + 1, var_name.size());
847 var_name = var_name.substr(0, delim);
848 delim = time_step.find_first_of(
"_");
849 time_step = time_step.substr(0, delim);
851 if (time_step.compare(
"0") != 0 && time_step.compare(
"1") != 0) {
856 std::string var_0_name;
859 for (
const auto& elt :
_t0) {
860 std::string var_0_name =
861 _credalNet->current_bn().variable(elt.first).name();
862 delim = var_0_name.find_first_of(
"_");
863 var_0_name = var_0_name.substr(0, delim);
865 if (var_name.compare(var_0_name) == 0) {
871 if (potential->domainSize() == potential2->domainSize()) {
872 _t0[elt.first].push_back(node);
880 for (
const auto& elt :
_t1) {
881 std::string var_0_name =
882 _credalNet->current_bn().variable(elt.first).name();
883 auto delim = var_0_name.find_first_of(
"_");
884 var_0_name = var_0_name.substr(0, delim);
886 if (var_name.compare(var_0_name) == 0) {
892 if (potential->domainSize() == potential2->domainSize()) {
893 _t1[elt.first].push_back(node);
903 template <
typename GUM_SCALAR >
905 const NodeId&
id,
const std::vector< GUM_SCALAR >& vertex) {
906 std::string var_name =
_credalNet->current_bn().variable(
id).name();
907 auto delim = var_name.find_first_of(
"_");
909 var_name = var_name.substr(0, delim);
911 if (
_modal.exists(var_name) ) {
913 auto vsize = vertex.size();
915 for (
Size mod = 0; mod < vsize; mod++)
916 exp += vertex[mod] *
_modal[var_name][mod];
924 template <
typename GUM_SCALAR >
927 const std::vector< GUM_SCALAR >& vertex,
928 const bool& elimRedund) {
930 auto dsize = vertex.size();
934 for (
auto it = nodeCredalSet.cbegin(), itEnd = nodeCredalSet.cend();
939 for (
Size i = 0; i < dsize; i++) {
940 if (std::fabs(vertex[i] - (*it)[i]) > 1e-6) {
949 if (!eq || nodeCredalSet.size() == 0) {
950 nodeCredalSet.push_back(vertex);
956 if (nodeCredalSet.size() == 1)
return;
961 auto itEnd = std::remove_if(
962 nodeCredalSet.begin(),
964 [&](
const std::vector< GUM_SCALAR >& v) ->
bool {
965 for (
auto jt = v.cbegin(),
971 jt != jtEnd && minIt != minItEnd && maxIt != maxItEnd;
972 ++jt, ++minIt, ++maxIt) {
973 if ((std::fabs(*jt - *minIt) < 1e-6 || std::fabs(*jt - *maxIt) < 1e-6)
974 && std::fabs(*minIt - *maxIt) > 1e-6)
980 nodeCredalSet.erase(itEnd, nodeCredalSet.end());
983 if (!elimRedund || nodeCredalSet.size() <= 2)
return;
990 lrsWrapper.
setUpV((
unsigned int)dsize, (
unsigned int)(nodeCredalSet.size()));
992 for (
const auto& vtx : nodeCredalSet)
993 lrsWrapper.
fillV(vtx);
1000 template <
typename GUM_SCALAR >
1006 template <
typename GUM_SCALAR >
1012 template <
typename GUM_SCALAR >
1015 #pragma omp parallel 1017 GUM_SCALAR tEps = 0;
1025 for (
int i = 0; i < nsize; i++) {
1028 for (
Size j = 0; j < dSize; j++) {
1031 delta = (delta < 0) ? (-delta) : delta;
1032 tEps = (tEps < delta) ? delta : tEps;
1036 delta = (delta < 0) ? (-delta) : delta;
1037 tEps = (tEps < delta) ? delta : tEps;
1044 #pragma omp critical(epsilon_max) 1046 #pragma omp flush(eps) 1047 eps = (eps < tEps) ? tEps : eps;
virtual ~InferenceEngine()
Destructor.
const NodeProperty< std::vector< NodeId > > & getT1Cluster() const
Get the _t1 cluster.
aGrUM's Potential is a multi-dimensional array with tensor operators.
const std::vector< GUM_SCALAR > & marginalMax(const NodeId id) const
Get the upper marginals of a given node id.
const GUM_SCALAR & expectationMax(const NodeId id) const
Get the upper expectation of a given node id.
margi _oldMarginalMin
Old lower marginals used to compute epsilon.
void insertModals(const std::map< std::string, std::vector< GUM_SCALAR > > &modals)
Insert variables modalities from map to compute expectations.
Size size() const noexcept
Returns the number of elements stored into the hashtable.
void insertModalsFile(const std::string &path)
Insert variables modalities from file to compute expectations.
bool _storeBNOpt
Iterations limit stopping rule used by some algorithms such as CNMonteCarloSampling.
void _dynamicExpectations()
Rearrange lower and upper expectations to suit dynamic networks.
dynExpe _dynamicExpMin
Lower dynamic expectations.
bool repetitiveInd() const
Get the current independence status.
credalSet _marginalSets
Credal sets vertices, if enabled.
virtual void insertEvidenceFile(const std::string &path)
Insert evidence from file.
const std::vector< GUM_SCALAR > & marginalMin(const NodeId id) const
Get the lower marginals of a given node id.
void insertQueryFile(const std::string &path)
Insert query variables states from file.
margi _marginalMin
Lower marginals.
bool storeVertices() const
Get the number of iterations without changes used to stop some algorithms.
int _timeSteps
The number of time steps of this network (only usefull for dynamic networks).
bool exists(const Key &key) const
Checks whether there exists an element with a given key in the hashtable.
const std::vector< std::vector< GUM_SCALAR > > & vertices(const NodeId id) const
Get the vertice of a given node id.
margi _oldMarginalMax
Old upper marginals used to compute epsilon.
VarMod2BNsMap< GUM_SCALAR > * getVarMod2BNsMap()
Get optimum IBayesNet.
const std::vector< GUM_SCALAR > & dynamicExpMax(const std::string &varName) const
Get the upper dynamic expectation of a given variable prefix (without the time step included...
void dynamicExpectations()
Compute dynamic expectations.
const GUM_SCALAR _computeEpsilon()
Compute approximation scheme epsilon using the old marginals and the new ones.
gum is the global namespace for all aGrUM entities
void saveVertices(const std::string &path) const
Saves vertices to file.
void saveMarginals(const std::string &path) const
Saves marginals to file.
Abstract class representing CredalNet inference engines.
void _repetitiveInit()
Initialize _t0 and _t1 clusters.
expe _expectationMax
Upper expectations, if some variables modalities were inserted.
InferenceEngine(const CredalNet< GUM_SCALAR > &credalNet)
Construtor.
The class for generic Hash Tables.
cluster _t0
Clusters of nodes used with dynamic networks.
std::string toString() const
Print all nodes marginals to standart output.
void fillV(const std::vector< GUM_SCALAR > &vertex)
Creates the V-representation of a polytope by adding a vertex to the problem input __input...
Class template representing a Credal Network.
Class template acting as a wrapper for Lexicographic Reverse Search by David Avis.
VarMod2BNsMap< GUM_SCALAR > _dbnOpt
Object used to efficiently store optimal bayes net during inference, for some algorithms.
const CredalNet< GUM_SCALAR > * _credalNet
A pointer to the Credal Net used.
const const_iterator & cend() const noexcept
Returns the unsafe const_iterator pointing to the end of the hashtable.
dynExpe _dynamicExpMax
Upper dynamic expectations.
void _initMarginals()
Initialize lower and upper old marginals and marginals before inference, with the lower marginal bein...
void insertQuery(const NodeProperty< std::vector< bool > > &query)
Insert query variables and states from Property.
query _query
Holds the query nodes states.
bool _repetitiveInd
True if using repetitive independence ( dynamic network only ), False otherwise.
void setUpV(const Size &card, const Size &vertices)
Sets up a V-representation.
const_iterator cbegin() const
Returns an unsafe const_iterator pointing to the beginning of the hashtable.
dynExpe _modal
Variables modalities used to compute expectations.
void _updateCredalSets(const NodeId &id, const std::vector< GUM_SCALAR > &vertex, const bool &elimRedund=false)
Given a node id and one of it's possible vertex, update it's credal set.
Abstract class template representing a CredalNet inference engine.
void _initMarginalSets()
Initialize credal set vertices with empty sets.
const CredalNet< GUM_SCALAR > & credalNet()
Get this creadal network.
margi _evidence
Holds observed variables states.
cluster _t1
Clusters of nodes used with dynamic networks.
void clear()
Removes all the elements in the hash table.
expe _expectationMin
Lower expectations, if some variables modalities were inserted.
void elimRedundVrep()
V-Redundancy elimination.
const NodeProperty< std::vector< NodeId > > & getT0Cluster() const
Get the _t0 cluster.
const matrix & getOutput() const
Get the output matrix solution of the problem.
bool _storeVertices
True if credal sets vertices are stored, False otherwise.
void setRepetitiveInd(const bool repetitive)
std::size_t Size
In aGrUM, hashed values are unsigned long int.
const GUM_SCALAR & expectationMin(const NodeId id) const
Get the lower expectation of a given node id.
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
Class used to store optimum IBayesNet during some inference algorithms.
void _initExpectations()
Initialize lower and upper expectations before inference, with the lower expectation being initialize...
void saveExpectations(const std::string &path) const
Saves expectations to file.
bool empty() const noexcept
Indicates whether the hash table is empty.
void insertEvidence(const std::map< std::string, std::vector< GUM_SCALAR > > &eviMap)
Insert evidence from map.
Size NodeId
Type for node ids.
void _updateExpectations(const NodeId &id, const std::vector< GUM_SCALAR > &vertex)
Given a node id and one of it's possible vertex obtained during inference, update this node lower and...
margi _marginalMax
Upper marginals.
#define GUM_ERROR(type, msg)
virtual void eraseAllEvidence()
Erase all inference related data to perform another one.
const std::vector< GUM_SCALAR > & dynamicExpMin(const std::string &varName) const
Get the lower dynamic expectation of a given variable prefix (without the time step included...