28 template <
typename GUM_SCALAR >
35 template <
typename GUM_SCALAR >
42 template <
typename GUM_SCALAR >
47 template <
typename GUM_SCALAR >
50 auto nNodes = cpt->size();
51 _sampleDef.resize(nNodes);
53 for (
NodeId node = 0; node < nNodes; node++) {
54 auto pConfs = (*cpt)[node].size();
55 _sampleDef[node].resize(pConfs);
57 for (
Size pconf = 0; pconf < pConfs; pconf++) {
58 Size nVertices =
Size((*cpt)[node][pconf].size());
60 superiorPow(static_cast< unsigned long >(nVertices), b, c);
62 _sampleDef[node][pconf].resize(nBits);
69 template <
typename GUM_SCALAR >
71 const std::vector< Size >& key) {
72 _currentHash =
Size(_vectHash(bn));
73 std::list< Size >& nets =
74 _myVarHashs.getWithDefault(key, std::list< Size >());
76 for (std::list< Size >::iterator it = nets.begin(); it != nets.end(); ++it) {
77 if (*it == _currentHash)
return false;
81 _myHashNet.set(_currentHash, bn);
83 nets.push_back(_currentHash);
86 .getWithDefault(_currentHash,
87 std::list< varKey >()) .push_back(key);
91 template <
typename GUM_SCALAR >
93 const bool isBetter) {
96 std::list< Size >& old_nets =
97 _myVarHashs.getWithDefault(key, std::list< Size >());
100 for (std::list< Size >::iterator it = old_nets.begin();
101 it != old_nets.end();
104 std::list< varKey >& netKeys =
105 _myHashVars.getWithDefault(*it, std::list< varKey >());
108 if (netKeys.size() == 1) {
109 _myHashVars.erase(*it);
113 for (std::list< varKey >::iterator it2 = netKeys.begin();
114 it2 != netKeys.end();
127 _myHashNet.set(_currentHash,
130 old_nets.push_back(_currentHash);
133 .getWithDefault(_currentHash, std::list< varKey >())
141 std::list< Size >& nets =
142 _myVarHashs.getWithDefault(key, std::list< Size >());
144 for (std::list< Size >::iterator it = nets.begin(); it != nets.end();
146 if (*it == _currentHash)
return false;
150 _myHashNet.set(_currentHash, _currentSample);
152 nets.push_back(_currentHash);
154 _myHashVars.getWithDefault(_currentHash, std::list< varKey >())
169 template <
typename GUM_SCALAR >
171 const std::vector< std::vector< std::vector< bool > > >& sample) {
172 _currentSample.clear();
174 for (
Size i = 0; i < sample.size(); i++)
175 for (
Size j = 0; j < sample[j].size(); j++)
176 for (
Size k = 0; k < sample[i][j].size(); k++)
177 _currentSample.push_back(sample[i][j][k]);
182 _currentHash =
Size(_vectHash(_currentSample));
185 template <
typename GUM_SCALAR >
187 return _currentSample;
190 template <
typename GUM_SCALAR >
191 const std::vector< std::vector< std::vector< bool > > >&
196 template <
typename GUM_SCALAR >
197 const std::vector< std::vector< bool >* >
199 const std::vector< Size >& key) {
201 if (!_myVarHashs.exists(key))
return std::vector< std::vector< bool >* >();
203 std::list< Size >& netsHash = _myVarHashs[key];
205 std::vector< dBN* > nets;
206 nets.resize(netsHash.size());
208 std::list< Size >::iterator it = netsHash.begin();
210 for (
Size i = 0; i < netsHash.size(); i++, ++it) {
211 nets[i] = &_myHashNet [*it];
217 template <
typename GUM_SCALAR >
218 std::vector< std::vector< std::vector< std::vector< bool > > > >
220 const std::vector< Size >& key) {
223 "No CredalNet associated to me ! Can't get FullBNOptsFromKey : " 226 if (!_myVarHashs.exists(key))
227 return std::vector< std::vector< std::vector< std::vector< bool > > > >();
229 std::list< Size >& netsHash = _myVarHashs[key];
231 std::vector< std::vector< std::vector< std::vector< bool > > > > nets;
232 nets.resize(netsHash.size(), _sampleDef);
234 std::list< Size >::iterator it = netsHash.begin();
236 for (
Size i = 0; i < netsHash.size(); i++, ++it) {
238 dBN::iterator it2 = _myHashNet [*it] .begin();
240 for (
Size j = 0; j < _sampleDef.size(); j++) {
241 for (
Size k = 0; k < _sampleDef[j].size(); k++) {
242 for (
Size l = 0; l < _sampleDef[j][k].size(); l++) {
243 nets[i][j][k][l] = *it2;
253 template <
typename GUM_SCALAR >
255 return _myHashNet.size();
~VarMod2BNsMap()
Destructor.
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
Class template representing a Credal Network.
std::vector< std::vector< std::vector< std::vector< bool > > > > getFullBNOptsFromKey(const std::vector< Size > &key)
Get optimum IBayesNet (s) with structure of the given variable, modality for min or max...
const std::vector< dBN *> getBNOptsFromKey(const std::vector< Size > &key)
Get optimum IBayesNet (s) without structure of the given variable, modality for min or max...
Size getEntrySize() const
Get the number of IBayesNet stored.
void superiorPow(unsigned long card, unsigned long &num_bits, unsigned long &new_card)
Compute the superior and closest power of two of an integer.
void setCNet(const CredalNet< GUM_SCALAR > &cn)
Initialize __sampleDef from the CredalNet.
const dBN & getCurrentSample()
Get the current sample as a vector of bits without structure.
VarMod2BNsMap()
Default constructor that should be used only by InferenceEngine since it has a member variable...
const std::vector< std::vector< std::vector< bool > > > & getSampleDef()
Get the sample structure.
Copyright 2005-2019 Pierre-Henri WUILLEMIN et Christophe GONZALES (LIP6) {prenom.nom}_at_lip6.fr.
std::size_t Size
In aGrUM, hashed values are unsigned long int.
bool insert(const std::vector< bool > &bn, const std::vector< Size > &key)
Insert for thread fusion.
Class used to store optimum IBayesNet during some inference algorithms.
const NodeProperty< std::vector< std::vector< std::vector< GUM_SCALAR > > > > & credalNet_currentCpt() const
Size NodeId
Type for node ids.
#define GUM_ERROR(type, msg)
void setCurrentSample(const std::vector< std::vector< std::vector< bool > > > &sample)
Set the current thread sample and it's hash.