40 template <
typename GUM_SCALAR >
53 template <
typename GUM_SCALAR >
58 template <
typename GUM_SCALAR >
63 template <
typename GUM_SCALAR >
98 template <
typename GUM_SCALAR >
103 template <
typename GUM_SCALAR >
110 template <
typename GUM_SCALAR >
119 template <
typename GUM_SCALAR >
129 template <
typename GUM_SCALAR >
134 template <
typename GUM_SCALAR >
139 template <
typename GUM_SCALAR >
145 template <
typename GUM_SCALAR >
147 std::ifstream mod_stream(path.c_str(), std::ios::in);
149 if (!mod_stream.good()) {
151 "void InferenceEngine< GUM_SCALAR " 152 ">::insertModals(const std::string & path) : " 153 "could not open input file : " 159 std::string line, tmp;
162 while (mod_stream.good()) {
163 getline(mod_stream, line);
165 if (line.size() == 0)
continue;
167 cstr =
new char[line.size() + 1];
168 strcpy(cstr, line.c_str());
170 p = strtok(cstr,
" ");
173 std::vector< GUM_SCALAR > values;
174 p = strtok(
nullptr,
" ");
176 while (p !=
nullptr) {
177 values.push_back(GUM_SCALAR(atof(p)));
178 p = strtok(
nullptr,
" ");
181 _modal.insert(tmp, values);
192 template <
typename GUM_SCALAR >
194 const std::map< std::string, std::vector< GUM_SCALAR > >& modals) {
197 for (
auto it = modals.cbegin(), theEnd = modals.cend(); it != theEnd; ++it) {
201 id =
_credalNet->current_bn().idFromName(it->first);
208 auto dSize =
_credalNet->current_bn().variable(
id).domainSize();
210 if (dSize != it->second.size())
continue;
220 _modal.insert(it->first, it->second);
228 template <
typename GUM_SCALAR >
230 const std::map< std::string, std::vector< GUM_SCALAR > >& eviMap) {
233 for (
auto it = eviMap.cbegin(), theEnd = eviMap.cend(); it != theEnd; ++it) {
237 id =
_credalNet->current_bn().idFromName(it->first);
250 template <
typename GUM_SCALAR >
252 const NodeProperty< std::vector< GUM_SCALAR > >& evidence) {
256 for (
const auto& elt : evidence) {
268 template <
typename GUM_SCALAR >
271 std::ifstream evi_stream(path.c_str(), std::ios::in);
273 if (!evi_stream.good()) {
275 "void InferenceEngine< GUM_SCALAR " 276 ">::insertEvidence(const std::string & path) : could not " 283 std::string line, tmp;
286 while (evi_stream.good() && std::strcmp(line.c_str(),
"[EVIDENCE]") != 0) {
287 getline(evi_stream, line);
290 while (evi_stream.good()) {
291 getline(evi_stream, line);
293 if (std::strcmp(line.c_str(),
"[QUERY]") == 0)
break;
295 if (line.size() == 0)
continue;
297 cstr =
new char[line.size() + 1];
298 strcpy(cstr, line.c_str());
300 p = strtok(cstr,
" ");
307 node =
_credalNet->current_bn().idFromName(tmp);
313 std::vector< GUM_SCALAR > values;
314 p = strtok(
nullptr,
" ");
316 while (p !=
nullptr) {
317 values.push_back(GUM_SCALAR(atof(p)));
318 p = strtok(
nullptr,
" ");
330 template <
typename GUM_SCALAR >
335 for (
const auto& elt :
query) {
347 template <
typename GUM_SCALAR >
349 std::ifstream evi_stream(path.c_str(), std::ios::in);
351 if (!evi_stream.good()) {
353 "void InferenceEngine< GUM_SCALAR >::insertQuery(const " 354 "std::string & path) : could not open input file : " 360 std::string line, tmp;
363 while (evi_stream.good() && std::strcmp(line.c_str(),
"[QUERY]") != 0) {
364 getline(evi_stream, line);
367 while (evi_stream.good()) {
368 getline(evi_stream, line);
370 if (std::strcmp(line.c_str(),
"[EVIDENCE]") == 0)
break;
372 if (line.size() == 0)
continue;
374 cstr =
new char[line.size() + 1];
375 strcpy(cstr, line.c_str());
377 p = strtok(cstr,
" ");
384 node =
_credalNet->current_bn().idFromName(tmp);
390 auto dSize =
_credalNet->current_bn().variable(node).domainSize();
392 p = strtok(
nullptr,
" ");
397 std::vector< bool > values(dSize,
false);
399 while (p !=
nullptr) {
400 if ((
Size)atoi(p) >= dSize)
402 "void InferenceEngine< GUM_SCALAR " 403 ">::insertQuery(const std::string & path) : " 404 "query modality is higher or equal to " 407 values[atoi(p)] =
true;
408 p = strtok(
nullptr,
" ");
421 template <
typename GUM_SCALAR >
423 const std::string& varName)
const {
426 }
catch (
NotFound& err) {
throw(err); }
429 template <
typename GUM_SCALAR >
431 const std::string& varName)
const {
434 }
catch (
NotFound& err) {
throw(err); }
437 template <
typename GUM_SCALAR >
438 const std::vector< GUM_SCALAR >&
442 }
catch (
NotFound& err) {
throw(err); }
445 template <
typename GUM_SCALAR >
446 const std::vector< GUM_SCALAR >&
450 }
catch (
NotFound& err) {
throw(err); }
453 template <
typename GUM_SCALAR >
455 const std::string& varName)
const {
458 }
catch (
NotFound& err) {
throw(err); }
461 template <
typename GUM_SCALAR >
463 const std::string& varName)
const {
466 }
catch (
NotFound& err) {
throw(err); }
469 template <
typename GUM_SCALAR >
474 }
catch (
NotFound& err) {
throw(err); }
477 template <
typename GUM_SCALAR >
482 }
catch (
NotFound& err) {
throw(err); }
485 template <
typename GUM_SCALAR >
487 const std::string& varName)
const {
488 std::string errTxt =
"const std::vector< GUM_SCALAR > & InferenceEngine< " 489 "GUM_SCALAR >::dynamicExpMin ( const std::string & " 490 "varName ) const : ";
494 errTxt +
"_dynamicExpectations() needs to be called before");
503 template <
typename GUM_SCALAR >
505 const std::string& varName)
const {
506 std::string errTxt =
"const std::vector< GUM_SCALAR > & InferenceEngine< " 507 "GUM_SCALAR >::dynamicExpMax ( const std::string & " 508 "varName ) const : ";
512 errTxt +
"_dynamicExpectations() needs to be called before");
521 template <
typename GUM_SCALAR >
522 const std::vector< std::vector< GUM_SCALAR > >&
527 template <
typename GUM_SCALAR >
529 const std::string& path)
const {
530 std::ofstream m_stream(path.c_str(), std::ios::out | std::ios::trunc);
532 if (!m_stream.good()) {
534 "void InferenceEngine< GUM_SCALAR >::saveMarginals(const " 535 "std::string & path) const : could not open output file " 541 Size esize =
Size(elt.second.size());
543 for (
Size mod = 0; mod < esize; mod++) {
544 m_stream <<
_credalNet->current_bn().variable(elt.first).name() <<
" " 545 << mod <<
" " << (elt.second)[mod] <<
" " 553 template <
typename GUM_SCALAR >
555 const std::string& path)
const {
563 std::ofstream m_stream(path.c_str(), std::ios::out | std::ios::trunc);
565 if (!m_stream.good()) {
567 "void InferenceEngine< GUM_SCALAR " 568 ">::saveExpectations(const std::string & path) : could " 569 "not open output file : " 574 m_stream << elt.first;
577 for (
const auto& elt2 : elt.second) {
578 m_stream <<
" " << elt2;
581 m_stream << std::endl;
585 m_stream << elt.first;
588 for (
const auto& elt2 : elt.second) {
589 m_stream <<
" " << elt2;
592 m_stream << std::endl;
598 template <
typename GUM_SCALAR >
600 std::stringstream output;
605 Size esize =
Size(elt.second.size());
607 for (
Size mod = 0; mod < esize; mod++) {
608 output <<
"P(" <<
_credalNet->current_bn().variable(elt.first).name()
609 <<
"=" << mod <<
"|e) = [ ";
610 output << _marginalMin[elt.first][mod] <<
", " 626 template <
typename GUM_SCALAR >
629 std::ofstream m_stream(path.c_str(), std::ios::out | std::ios::trunc);
631 if (!m_stream.good()) {
633 "void InferenceEngine< GUM_SCALAR >::saveVertices(const " 634 "std::string & path) : could not open outpul file : " 639 m_stream <<
_credalNet->current_bn().variable(elt.first).name()
642 for (
const auto& elt2 : elt.second) {
646 for (
const auto& elt3 : elt2) {
662 template <
typename GUM_SCALAR >
669 for (
auto node :
_credalNet->current_bn().nodes()) {
670 auto dSize =
_credalNet->current_bn().variable(node).domainSize();
679 template <
typename GUM_SCALAR >
685 for (
auto node :
_credalNet->current_bn().nodes())
694 template <
typename GUM_SCALAR >
699 if (
_modal.empty())
return;
701 for (
auto node :
_credalNet->current_bn().nodes()) {
702 std::string var_name, time_step;
704 var_name =
_credalNet->current_bn().variable(node).name();
705 auto delim = var_name.find_first_of(
"_");
706 var_name = var_name.substr(0, delim);
708 if (!
_modal.exists(var_name))
continue;
715 template <
typename GUM_SCALAR >
720 template <
typename GUM_SCALAR >
740 outerMap expectationsMin, expectationsMax;
743 std::string var_name, time_step;
745 var_name =
_credalNet->current_bn().variable(elt.first).name();
746 auto delim = var_name.find_first_of(
"_");
747 time_step = var_name.substr(delim + 1, var_name.size());
748 var_name = var_name.substr(0, delim);
753 if (!
_modal.exists(var_name))
continue;
755 expectationsMin.getWithDefault(var_name, innerMap())
756 .getWithDefault(atoi(time_step.c_str()), 0) =
758 expectationsMax.getWithDefault(var_name, innerMap())
759 .getWithDefault(atoi(time_step.c_str()), 0) =
763 for (
const auto& elt : expectationsMin) {
764 typename std::vector< GUM_SCALAR > dynExp(elt.second.size());
766 for (
const auto& elt2 : elt.second)
767 dynExp[elt2.first] = elt2.second;
772 for (
const auto& elt : expectationsMax) {
773 typename std::vector< GUM_SCALAR > dynExp(elt.second.size());
775 for (
const auto& elt2 : elt.second) {
776 dynExp[elt2.first] = elt2.second;
783 template <
typename GUM_SCALAR >
790 for (
auto node :
_credalNet->current_bn().dag().nodes()) {
791 std::string var_name =
_credalNet->current_bn().variable(node).name();
792 auto delim = var_name.find_first_of(
"_");
794 if (delim > var_name.size()) {
796 "void InferenceEngine< GUM_SCALAR " 797 ">::_repetitiveInit() : the network does not " 798 "appear to be dynamic");
801 std::string time_step = var_name.substr(delim + 1, 1);
803 if (time_step.compare(
"0") == 0)
_t0.
insert(node, std::vector< NodeId >());
807 for (
const auto& node :
_credalNet->current_bn().dag().nodes()) {
808 std::string var_name =
_credalNet->current_bn().variable(node).name();
809 auto delim = var_name.find_first_of(
"_");
810 std::string time_step = var_name.substr(delim + 1, var_name.size());
811 var_name = var_name.substr(0, delim);
812 delim = time_step.find_first_of(
"_");
813 time_step = time_step.substr(0, delim);
815 if (time_step.compare(
"1") == 0) {
818 for (
const auto& elt :
_t0) {
819 std::string var_0_name =
820 _credalNet->current_bn().variable(elt.first).name();
821 delim = var_0_name.find_first_of(
"_");
822 var_0_name = var_0_name.substr(0, delim);
824 if (var_name.compare(var_0_name) == 0) {
830 if (potential->domainSize() == potential2->domainSize())
831 _t0[elt.first].push_back(node);
833 _t1.
insert(node, std::vector< NodeId >());
840 if (!found) {
_t1.
insert(node, std::vector< NodeId >()); }
846 for (
auto node :
_credalNet->current_bn().dag().nodes()) {
847 std::string var_name =
_credalNet->current_bn().variable(node).name();
848 auto delim = var_name.find_first_of(
"_");
849 std::string time_step = var_name.substr(delim + 1, var_name.size());
850 var_name = var_name.substr(0, delim);
851 delim = time_step.find_first_of(
"_");
852 time_step = time_step.substr(0, delim);
854 if (time_step.compare(
"0") != 0 && time_step.compare(
"1") != 0) {
859 std::string var_0_name;
862 for (
const auto& elt :
_t0) {
863 std::string var_0_name =
864 _credalNet->current_bn().variable(elt.first).name();
865 delim = var_0_name.find_first_of(
"_");
866 var_0_name = var_0_name.substr(0, delim);
868 if (var_name.compare(var_0_name) == 0) {
874 if (potential->domainSize() == potential2->domainSize()) {
875 _t0[elt.first].push_back(node);
883 for (
const auto& elt :
_t1) {
884 std::string var_0_name =
885 _credalNet->current_bn().variable(elt.first).name();
886 auto delim = var_0_name.find_first_of(
"_");
887 var_0_name = var_0_name.substr(0, delim);
889 if (var_name.compare(var_0_name) == 0) {
895 if (potential->domainSize() == potential2->domainSize()) {
896 _t1[elt.first].push_back(node);
906 template <
typename GUM_SCALAR >
908 const NodeId&
id,
const std::vector< GUM_SCALAR >& vertex) {
909 std::string var_name =
_credalNet->current_bn().variable(
id).name();
910 auto delim = var_name.find_first_of(
"_");
912 var_name = var_name.substr(0, delim);
914 if (
_modal.exists(var_name) ) {
916 auto vsize = vertex.size();
918 for (
Size mod = 0; mod < vsize; mod++)
919 exp += vertex[mod] *
_modal[var_name][mod];
927 template <
typename GUM_SCALAR >
930 const std::vector< GUM_SCALAR >& vertex,
931 const bool& elimRedund) {
933 auto dsize = vertex.size();
937 for (
auto it = nodeCredalSet.cbegin(), itEnd = nodeCredalSet.cend();
942 for (
Size i = 0; i < dsize; i++) {
943 if (std::fabs(vertex[i] - (*it)[i]) > 1e-6) {
952 if (!eq || nodeCredalSet.size() == 0) {
953 nodeCredalSet.push_back(vertex);
959 if (nodeCredalSet.size() == 1)
return;
964 auto itEnd = std::remove_if(
965 nodeCredalSet.begin(),
967 [&](
const std::vector< GUM_SCALAR >& v) ->
bool {
968 for (
auto jt = v.cbegin(),
974 jt != jtEnd && minIt != minItEnd && maxIt != maxItEnd;
975 ++jt, ++minIt, ++maxIt) {
976 if ((std::fabs(*jt - *minIt) < 1e-6 || std::fabs(*jt - *maxIt) < 1e-6)
977 && std::fabs(*minIt - *maxIt) > 1e-6)
983 nodeCredalSet.erase(itEnd, nodeCredalSet.end());
986 if (!elimRedund || nodeCredalSet.size() <= 2)
return;
993 lrsWrapper.
setUpV((
unsigned int)dsize, (
unsigned int)(nodeCredalSet.size()));
995 for (
const auto& vtx : nodeCredalSet)
996 lrsWrapper.
fillV(vtx);
1003 template <
typename GUM_SCALAR >
1009 template <
typename GUM_SCALAR >
1015 template <
typename GUM_SCALAR >
1018 #pragma omp parallel 1020 GUM_SCALAR tEps = 0;
1028 for (
int i = 0; i < nsize; i++) {
1031 for (
Size j = 0; j < dSize; j++) {
1034 delta = (delta < 0) ? (-delta) : delta;
1035 tEps = (tEps < delta) ? delta : tEps;
1039 delta = (delta < 0) ? (-delta) : delta;
1040 tEps = (tEps < delta) ? delta : tEps;
1047 #pragma omp critical(epsilon_max) 1049 #pragma omp flush(eps) 1050 eps = (eps < tEps) ? tEps : eps;
virtual ~InferenceEngine()
Destructor.
const NodeProperty< std::vector< NodeId > > & getT1Cluster() const
Get the _t1 cluster.
aGrUM's Potential is a multi-dimensional array with tensor operators.
const std::vector< GUM_SCALAR > & marginalMax(const NodeId id) const
Get the upper marginals of a given node id.
const GUM_SCALAR & expectationMax(const NodeId id) const
Get the upper expectation of a given node id.
margi _oldMarginalMin
Old lower marginals used to compute epsilon.
void insertModals(const std::map< std::string, std::vector< GUM_SCALAR > > &modals)
Insert variables modalities from map to compute expectations.
Size size() const noexcept
Returns the number of elements stored into the hashtable.
void insertModalsFile(const std::string &path)
Insert variables modalities from file to compute expectations.
bool _storeBNOpt
Iterations limit stopping rule used by some algorithms such as CNMonteCarloSampling.
void _dynamicExpectations()
Rearrange lower and upper expectations to suit dynamic networks.
dynExpe _dynamicExpMin
Lower dynamic expectations.
bool repetitiveInd() const
Get the current independence status.
credalSet _marginalSets
Credal sets vertices, if enabled.
virtual void insertEvidenceFile(const std::string &path)
Insert evidence from file.
const std::vector< GUM_SCALAR > & marginalMin(const NodeId id) const
Get the lower marginals of a given node id.
void insertQueryFile(const std::string &path)
Insert query variables states from file.
margi _marginalMin
Lower marginals.
bool storeVertices() const
Get the number of iterations without changes used to stop some algorithms.
int _timeSteps
The number of time steps of this network (only usefull for dynamic networks).
bool exists(const Key &key) const
Checks whether there exists an element with a given key in the hashtable.
const std::vector< std::vector< GUM_SCALAR > > & vertices(const NodeId id) const
Get the vertice of a given node id.
margi _oldMarginalMax
Old upper marginals used to compute epsilon.
VarMod2BNsMap< GUM_SCALAR > * getVarMod2BNsMap()
Get optimum IBayesNet.
const std::vector< GUM_SCALAR > & dynamicExpMax(const std::string &varName) const
Get the upper dynamic expectation of a given variable prefix (without the time step included...
void dynamicExpectations()
Compute dynamic expectations.
const GUM_SCALAR _computeEpsilon()
Compute approximation scheme epsilon using the old marginals and the new ones.
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
void saveVertices(const std::string &path) const
Saves vertices to file.
void saveMarginals(const std::string &path) const
Saves marginals to file.
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
void _repetitiveInit()
Initialize _t0 and _t1 clusters.
expe _expectationMax
Upper expectations, if some variables modalities were inserted.
InferenceEngine(const CredalNet< GUM_SCALAR > &credalNet)
Construtor.
The class for generic Hash Tables.
cluster _t0
Clusters of nodes used with dynamic networks.
std::string toString() const
Print all nodes marginals to standart output.
void fillV(const std::vector< GUM_SCALAR > &vertex)
Creates the V-representation of a polytope by adding a vertex to the problem input __input...
Class template representing a Credal Network.
Class template acting as a wrapper for Lexicographic Reverse Search by David Avis.
VarMod2BNsMap< GUM_SCALAR > _dbnOpt
Object used to efficiently store optimal bayes net during inference, for some algorithms.
const CredalNet< GUM_SCALAR > * _credalNet
A pointer to the Credal Net used.
const const_iterator & cend() const noexcept
Returns the unsafe const_iterator pointing to the end of the hashtable.
dynExpe _dynamicExpMax
Upper dynamic expectations.
void _initMarginals()
Initialize lower and upper old marginals and marginals before inference, with the lower marginal bein...
void insertQuery(const NodeProperty< std::vector< bool > > &query)
Insert query variables and states from Property.
query _query
Holds the query nodes states.
bool _repetitiveInd
True if using repetitive independence ( dynamic network only ), False otherwise.
void setUpV(const Size &card, const Size &vertices)
Sets up a V-representation.
const_iterator cbegin() const
Returns an unsafe const_iterator pointing to the beginning of the hashtable.
dynExpe _modal
Variables modalities used to compute expectations.
void _updateCredalSets(const NodeId &id, const std::vector< GUM_SCALAR > &vertex, const bool &elimRedund=false)
Given a node id and one of it's possible vertex, update it's credal set.
Abstract class template representing a CredalNet inference engine.
void _initMarginalSets()
Initialize credal set vertices with empty sets.
const CredalNet< GUM_SCALAR > & credalNet()
Get this creadal network.
margi _evidence
Holds observed variables states.
cluster _t1
Clusters of nodes used with dynamic networks.
void clear()
Removes all the elements in the hash table.
expe _expectationMin
Lower expectations, if some variables modalities were inserted.
void elimRedundVrep()
V-Redundancy elimination.
const NodeProperty< std::vector< NodeId > > & getT0Cluster() const
Get the _t0 cluster.
const matrix & getOutput() const
Get the output matrix solution of the problem.
bool _storeVertices
True if credal sets vertices are stored, False otherwise.
void setRepetitiveInd(const bool repetitive)
std::size_t Size
In aGrUM, hashed values are unsigned long int.
const GUM_SCALAR & expectationMin(const NodeId id) const
Get the lower expectation of a given node id.
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
Class used to store optimum IBayesNet during some inference algorithms.
void _initExpectations()
Initialize lower and upper expectations before inference, with the lower expectation being initialize...
void saveExpectations(const std::string &path) const
Saves expectations to file.
bool empty() const noexcept
Indicates whether the hash table is empty.
void insertEvidence(const std::map< std::string, std::vector< GUM_SCALAR > > &eviMap)
Insert evidence from map.
Size NodeId
Type for node ids.
void _updateExpectations(const NodeId &id, const std::vector< GUM_SCALAR > &vertex)
Given a node id and one of it's possible vertex obtained during inference, update this node lower and...
margi _marginalMax
Upper marginals.
#define GUM_ERROR(type, msg)
virtual void eraseAllEvidence()
Erase all inference related data to perform another one.
const std::vector< GUM_SCALAR > & dynamicExpMin(const std::string &varName) const
Get the lower dynamic expectation of a given variable prefix (without the time step included...