23 #include <agrum/tools/core/exceptions.h> 28 template <
typename GUM_SCALAR,
class BNInferenceEngine >
30 const CredalNet< GUM_SCALAR >& credalNet) :
33 _infEs_::repetitiveInd_ =
false;
35 _infEs_::storeVertices_ =
false;
36 _infEs_::storeBNOpt_ =
false;
39 this->enableMaxTime();
42 this->setPeriodSize(1000);
44 GUM_CONSTRUCTOR(CNMonteCarloSampling);
47 template <
typename GUM_SCALAR,
class BNInferenceEngine >
49 GUM_DESTRUCTOR(CNMonteCarloSampling);
52 template <
typename GUM_SCALAR,
class BNInferenceEngine >
54 if (_infEs_::repetitiveInd_) {
56 this->repetitiveInit_();
57 }
catch (InvalidArgument& err) {
59 _infEs_::repetitiveInd_ =
false;
76 auto psize =
this->periodSize();
118 if (
this->continueApproximationScheme(eps)) {
123 #pragma omp parallel for 125 for (
int iter = 0; iter <
int(psize); iter++) {
130 this->updateApproximationScheme(
int(psize));
132 this->updateMarginals_();
134 eps =
this->computeEpsilon_();
136 }
while (
this->continueApproximationScheme(eps));
139 if (!
this->modal_.empty()) {
this->expFusion_(); }
141 if (_infEs_::storeBNOpt_) {
this->optFusion_(); }
143 if (_infEs_::storeVertices_) {
this->verticesFusion_(); }
145 if (!
this->modal_.empty()) {
146 this->dynamicExpectations_();
150 template <
typename GUM_SCALAR,
class BNInferenceEngine >
152 int tId = getThreadNumber();
155 if (
this->l_inferenceEngine_[tId]->evidenceProbability() > 0) {
156 const DAG& tDag =
this->workingSet_[tId]->dag();
158 for (
auto node: tDag.nodes()) {
159 const Potential< GUM_SCALAR >& potential(
this->l_inferenceEngine_[tId]->posterior(node));
160 Instantiation ins(potential);
161 std::vector< GUM_SCALAR > vertex;
163 for (ins.setFirst(); !ins.end(); ++ins) {
164 vertex.push_back(potential[ins]);
171 this->updateThread_(node, vertex,
false);
177 template <
typename GUM_SCALAR,
class BNInferenceEngine >
179 int tId = getThreadNumber();
182 this->l_inferenceEngine_[tId]->eraseAllEvidence();
184 this->l_inferenceEngine_[tId]->makeInference();
187 template <
typename GUM_SCALAR,
class BNInferenceEngine >
193 this->setEpsilon(0.);
194 this->enableEpsilon();
196 this->disableMinEpsilonRate();
197 this->disableMaxIter();
199 this->initApproximationScheme();
202 template <
typename GUM_SCALAR,
class BNInferenceEngine >
207 int this_thread = getThreadNumber();
215 num_threads = getNumberOfRunningThreads();
217 this->initThreadsData_(num_threads, _infEs_::storeVertices_, _infEs_::storeBNOpt_);
218 this->l_inferenceEngine_.resize(num_threads,
nullptr);
231 BayesNet< GUM_SCALAR >* thread_bn =
new BayesNet< GUM_SCALAR >();
232 #pragma omp critical(Init) 236 *thread_bn =
this->credalNet_->current_bn();
238 this->workingSet_[this_thread] = thread_bn;
240 this->l_marginalMin_[this_thread] =
this->marginalMin_;
241 this->l_marginalMax_[this_thread] =
this->marginalMax_;
242 this->l_expectationMin_[this_thread] =
this->expectationMin_;
243 this->l_expectationMax_[this_thread] =
this->expectationMax_;
244 this->l_modal_[this_thread] =
this->modal_;
246 _infEs_::l_clusters_[this_thread].resize(2);
247 _infEs_::l_clusters_[this_thread][0] = _infEs_::t0_;
248 _infEs_::l_clusters_[this_thread][1] = _infEs_::t1_;
250 if (_infEs_::storeVertices_) {
this->l_marginalSets_[this_thread] =
this->marginalSets_; }
252 List<
const Potential< GUM_SCALAR >* >* evi_list
253 =
new List<
const Potential< GUM_SCALAR >* >();
254 this->workingSetE_[this_thread] = evi_list;
258 BNInferenceEngine* inference_engine
259 =
new BNInferenceEngine((
this->workingSet_[this_thread]),
260 RelevantPotentialsFinderType::FIND_ALL);
262 this->l_inferenceEngine_[this_thread] = inference_engine;
264 if (_infEs_::storeBNOpt_) {
265 VarMod2BNsMap< GUM_SCALAR >* threadOpt
266 =
new VarMod2BNsMap< GUM_SCALAR >(*
this->credalNet_);
267 this->l_optimalNet_[this_thread] = threadOpt;
272 template <
typename GUM_SCALAR,
class BNInferenceEngine >
286 template <
typename GUM_SCALAR,
class BNInferenceEngine >
288 int this_thread = getThreadNumber();
289 IBayesNet< GUM_SCALAR >* working_bn =
this->workingSet_[this_thread];
291 const auto cpt = &
this->credalNet_->credalNet_currentCpt();
293 using dBN = std::vector< std::vector< std::vector<
bool > > >;
297 if (_infEs_::storeBNOpt_) { sample = dBN(
this->l_optimalNet_[this_thread]->getSampleDef()); }
299 if (_infEs_::repetitiveInd_) {
300 const auto& t0 = _infEs_::l_clusters_[this_thread][0];
301 const auto& t1 = _infEs_::l_clusters_[this_thread][1];
303 for (
const auto& elt: t0) {
304 auto dSize = working_bn->variable(elt.first).domainSize();
305 Potential< GUM_SCALAR >* potential(
306 const_cast< Potential< GUM_SCALAR >* >(&working_bn->cpt(elt.first)));
307 std::vector< GUM_SCALAR > var_cpt(potential->domainSize());
309 Size pconfs = Size((*cpt)[elt.first].size());
311 for (Size pconf = 0; pconf < pconfs; pconf++) {
312 Size choosen_vertex = rand() % (*cpt)[elt.first][pconf].size();
314 if (_infEs_::storeBNOpt_) { _binaryRep_(sample[elt.first][pconf], choosen_vertex); }
316 for (Size mod = 0; mod < dSize; mod++) {
317 var_cpt[pconf * dSize + mod] = (*cpt)[elt.first][pconf][choosen_vertex][mod];
321 potential->fillWith(var_cpt);
323 Size t0esize = Size(elt.second.size());
325 for (Size pos = 0; pos < t0esize; pos++) {
326 if (_infEs_::storeBNOpt_) { sample[elt.second[pos]] = sample[elt.first]; }
328 Potential< GUM_SCALAR >* potential2(
329 const_cast< Potential< GUM_SCALAR >* >(&working_bn->cpt(elt.second[pos])));
330 potential2->fillWith(var_cpt);
334 for (
const auto& elt: t1) {
335 auto dSize = working_bn->variable(elt.first).domainSize();
336 Potential< GUM_SCALAR >* potential(
337 const_cast< Potential< GUM_SCALAR >* >(&working_bn->cpt(elt.first)));
338 std::vector< GUM_SCALAR > var_cpt(potential->domainSize());
340 for (Size pconf = 0; pconf < (*cpt)[elt.first].size(); pconf++) {
341 Idx choosen_vertex = Idx(rand() % (*cpt)[elt.first][pconf].size());
343 if (_infEs_::storeBNOpt_) { _binaryRep_(sample[elt.first][pconf], choosen_vertex); }
345 for (
decltype(dSize) mod = 0; mod < dSize; mod++) {
346 var_cpt[pconf * dSize + mod] = (*cpt)[elt.first][pconf][choosen_vertex][mod];
350 potential->fillWith(var_cpt);
352 auto t1esize = elt.second.size();
354 for (
decltype(t1esize) pos = 0; pos < t1esize; pos++) {
355 if (_infEs_::storeBNOpt_) { sample[elt.second[pos]] = sample[elt.first]; }
357 Potential< GUM_SCALAR >* potential2(
358 const_cast< Potential< GUM_SCALAR >* >(&working_bn->cpt(elt.second[pos])));
359 potential2->fillWith(var_cpt);
363 if (_infEs_::storeBNOpt_) {
this->l_optimalNet_[this_thread]->setCurrentSample(sample); }
365 for (
auto node: working_bn->nodes()) {
366 auto dSize = working_bn->variable(node).domainSize();
367 Potential< GUM_SCALAR >* potential(
368 const_cast< Potential< GUM_SCALAR >* >(&working_bn->cpt(node)));
369 std::vector< GUM_SCALAR > var_cpt(potential->domainSize());
371 auto pConfs = (*cpt)[node].size();
373 for (
decltype(pConfs) pconf = 0; pconf < pConfs; pconf++) {
374 Size nVertices = Size((*cpt)[node][pconf].size());
375 Idx choosen_vertex = Idx(rand() % nVertices);
377 if (_infEs_::storeBNOpt_) { _binaryRep_(sample[node][pconf], choosen_vertex); }
379 for (
decltype(dSize) mod = 0; mod < dSize; mod++) {
380 var_cpt[pconf * dSize + mod] = (*cpt)[node][pconf][choosen_vertex][mod];
384 potential->fillWith(var_cpt);
387 if (_infEs_::storeBNOpt_) {
this->l_optimalNet_[this_thread]->setCurrentSample(sample); }
391 template <
typename GUM_SCALAR,
class BNInferenceEngine >
393 if (
this->evidence_.size() == 0) {
return; }
395 int this_thread = getThreadNumber();
397 BNInferenceEngine* inference_engine =
this->l_inferenceEngine_[this_thread];
399 IBayesNet< GUM_SCALAR >* working_bn =
this->workingSet_[this_thread];
401 List<
const Potential< GUM_SCALAR >* >* evi_list =
this->workingSetE_[this_thread];
403 if (evi_list->size() > 0) {
404 for (
const auto pot: *evi_list)
405 inference_engine->addEvidence(*pot);
409 for (
const auto& elt:
this->evidence_) {
410 Potential< GUM_SCALAR >* p =
new Potential< GUM_SCALAR >;
411 (*p) << working_bn->variable(elt.first);
414 p->fillWith(elt.second);
415 }
catch (Exception& err) {
423 if (evi_list->size() > 0) {
424 for (
const auto pot: *evi_list)
425 inference_engine->addEvidence(*pot);
void _threadInference_()
Thread performs an inference using BNInferenceEngine.
INLINE void emplace(Args &&... args)
void _mcThreadDataCopy_()
Initialize threads data.
virtual ~CNMonteCarloSampling()
Destructor.
void _mcInitApproximationScheme_()
Initialize approximation Scheme.
CNMonteCarloSampling(const CredalNet< GUM_SCALAR > &credalNet)
Constructor.
void makeInference()
Starts the inference.
<agrum/CN/CNMonteCarloSampling.h>
void _verticesSampling_()
Thread samples a IBayesNet from the CredalNet.
void _insertEvidence_()
Insert CredalNet evidence into a thread BNInferenceEngine.
void _threadUpdate_()
Update thread data after a IBayesNet inference.
namespace for all credal networks entities