aGrUM  0.14.2
regress_tpl.h
Go to the documentation of this file.
1 /****************************************************************************
2  * Copyright (C) 2005 by Pierre-Henri WUILLEMIN et Christophe GONZALES *
3  * {prenom.nom}_at_lip6.fr *
4  * *
5  * This program is free software; you can redistribute it and/or modify *
6  * it under the terms of the GNU General Public License as published by *
7  * the Free Software Foundation; either version 2 of the License, or *
8  * (at your option) any later version. *
9  * *
10  * This program is distributed in the hope that it will be useful, *
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
13  * GNU General Public License for more details. *
14  * *
15  * You should have received a copy of the GNU General Public License *
16  * along with this program; if not, write to the *
17  * Free Software Foundation, Inc., *
18  * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
19  ****************************************************************************/
30 
31 #define ALLOCATE(x) SmallObjectAllocator::instance().allocate(x)
32 #define DEALLOCATE(x, y) SmallObjectAllocator::instance().deallocate(x, y)
33 
34 namespace gum {
35 
36  template < typename GUM_SCALAR,
37  template < typename >
38  class COMBINEOPERATOR,
39  template < typename >
40  class PROJECTOPERATOR,
41  template < typename >
42  class TerminalNodePolicy >
43  INLINE
47  const Set< const DiscreteVariable* >* primedVars,
48  const DiscreteVariable* targetVar,
49  const GUM_SCALAR neutral) :
50  __DG1(DG1),
51  __DG2(DG2), __neutral(neutral), __combine(), __project(),
52  __DG1InstantiationNeeded(DG1->realSize(), true, false),
53  __DG2InstantiationNeeded(DG2->realSize(), true, false) {
54  GUM_CONSTRUCTOR(Regress);
55  __rd =
56  MultiDimFunctionGraph< GUM_SCALAR,
57  TerminalNodePolicy >::getReducedAndOrderedInstance();
58  __nbVar = 0;
59  __default = nullptr;
60  __primedVars = primedVars;
61  __targetVar = targetVar;
62  }
63 
64  template < typename GUM_SCALAR,
65  template < typename >
66  class COMBINEOPERATOR,
67  template < typename >
68  class PROJECTOPERATOR,
69  template < typename >
70  class TerminalNodePolicy >
71  INLINE
74  GUM_DESTRUCTOR(Regress);
75 
76  for (auto instIter = __DG1InstantiationNeeded.beginSafe();
77  instIter != __DG1InstantiationNeeded.endSafe();
78  ++instIter)
79  DEALLOCATE(instIter.val(), sizeof(short int) * __nbVar);
80 
81  for (auto instIter = __DG2InstantiationNeeded.beginSafe();
82  instIter != __DG2InstantiationNeeded.endSafe();
83  ++instIter)
84  DEALLOCATE(instIter.val(), sizeof(short int) * __nbVar);
85 
86  if (__nbVar != 0) DEALLOCATE(__default, sizeof(short int) * __nbVar);
87  }
88 
89 
90  // This function is the main function. To be call every time an operation
91  // between the two given Function Graphs is required
92  template < typename GUM_SCALAR,
93  template < typename >
94  class COMBINEOPERATOR,
95  template < typename >
96  class PROJECTOPERATOR,
97  template < typename >
98  class TerminalNodePolicy >
105 
106  Idx* varInst = nullptr;
107  if (__nbVar != 0) {
108  varInst = static_cast< Idx* >(ALLOCATE(sizeof(Idx) * __nbVar));
109  for (Idx i = 0; i < __nbVar; i++)
110  varInst[i] = (Idx)0;
111  }
112 
113  O4DGContext conti(varInst, __nbVar);
114  conti.setDG1Node(__DG1->root());
115  conti.setDG2Node(__DG2->root());
116 
117  NodeId root = __compute(conti, (Idx)0 - 1);
118  __rd->manager()->setRootNode(root);
119 
120  if (__nbVar != 0) DEALLOCATE(varInst, sizeof(Idx) * __nbVar);
121 
122  __rd->erase(*__targetVar);
123 
124  return __rd;
125  }
126 
127  // This function computes an efficient order for the final decision diagrams.
128  // Its main criterion to do so is the number of
129  // re-exploration to be done
130  template < typename GUM_SCALAR,
131  template < typename >
132  class COMBINEOPERATOR,
133  template < typename >
134  class PROJECTOPERATOR,
135  template < typename >
136  class TerminalNodePolicy >
137  INLINE void
141  __DG1->variablesSequence().beginSafe();
143  __DG2->variablesSequence().beginSafe();
144 
145  while (fite != __DG1->variablesSequence().endSafe()
146  && site != __DG2->variablesSequence().endSafe()) {
147  // Test : if var from first order is already in final order
148  // we move onto the next one
149  if (__rd->variablesSequence().exists(*fite)) {
150  ++fite;
151  continue;
152  }
153 
154  // Test : if var from second order is already in final order
155  // we move onto the next one
156  if (__rd->variablesSequence().exists(*site)) {
157  ++site;
158  continue;
159  }
160 
161  // Test : is current var of the first order present in the second order.
162  // if not we add it to final order
163  if (!__DG2->variablesSequence().exists(*fite)
164  && !__primedVars->exists(*fite)) {
165  __rd->add(**fite);
166  ++fite;
167  continue;
168  }
169 
170  // Test : is current var of the second order present in the first order.
171  // if not we add it to final order
172  if (!__DG1->variablesSequence().exists(*site)
173  && !__primedVars->exists(*site)) {
174  __rd->add(**site);
175  ++site;
176  continue;
177  }
178 
179  // Test : is current var of the second order present in the first order.
180  // if not we add it to final order
181  if (*fite == *site) {
182  __rd->add(**fite);
183  ++fite;
184  ++site;
185  continue;
186  }
187 
188  // Test : if chosing first order var cost less in terms or re exploration,
189  // we chose it
190  __rd->add(**fite);
191  ++fite;
192  }
193 
194  // Whenever an iterator has finished its sequence,
195  // the other may still be in the middle of its one.
196  // Hence, this part ensures that any variables remaining
197  // will be added to the final sequence if needed.
198  if (fite == __DG1->variablesSequence().endSafe()) {
199  for (; site != __DG2->variablesSequence().endSafe(); ++site)
200  if (!__rd->variablesSequence().exists(*site)) __rd->add(**site);
201  } else {
202  for (; fite != __DG1->variablesSequence().endSafe(); ++fite)
203  if (!__rd->variablesSequence().exists(*fite)) __rd->add(**fite);
204  }
205 
206  // Various initialization needed now that we have a bigger picture
207  __nbVar = __rd->variablesSequence().size();
208 
209  if (__nbVar != 0) {
210  __default = static_cast< short int* >(ALLOCATE(sizeof(short int) * __nbVar));
211  for (Idx i = 0; i < __nbVar; i++)
212  __default[i] = (short int)0;
213  }
214  }
215 
216  // This function computes for every nodes if any retrograde variable is
217  // present below
218  template < typename GUM_SCALAR,
219  template < typename >
220  class COMBINEOPERATOR,
221  template < typename >
222  class PROJECTOPERATOR,
223  template < typename >
224  class TerminalNodePolicy >
225  INLINE void
229  HashTable< NodeId, short int* >& dgInstNeed) {
230  HashTable< NodeId, short int* > nodesVarDescendant;
231  Size tableSize = Size(__nbVar * sizeof(short int));
232 
233  for (auto varIter = dg->variablesSequence().rbeginSafe();
234  varIter != dg->variablesSequence().rendSafe();
235  --varIter) {
236  Idx varPos = __rd->variablesSequence().pos(*varIter);
237 
238  const Link< NodeId >* nodeIter = dg->varNodeListe(*varIter)->list();
239  while (nodeIter != nullptr) {
240  short int* instantiationNeeded =
241  static_cast< short int* >(ALLOCATE(tableSize));
242  dgInstNeed.insert(nodeIter->element(), instantiationNeeded);
243  short int* varDescendant = static_cast< short int* >(ALLOCATE(tableSize));
244  nodesVarDescendant.insert(nodeIter->element(), varDescendant);
245  for (Idx j = 0; j < __nbVar; j++) {
246  instantiationNeeded[j] = (short int)0;
247  varDescendant[j] = (short int)0;
248  }
249 
250 
251  varDescendant[varPos] = (short int)1;
252  for (Idx modality = 0; modality < dg->node(nodeIter->element())->nbSons();
253  ++modality) {
254  if (!dg->isTerminalNode(dg->node(nodeIter->element())->son(modality))) {
255  short int* sonVarDescendant =
256  nodesVarDescendant[dg->node(nodeIter->element())->son(modality)];
257  for (Idx varIdx = 0; varIdx < __nbVar; varIdx++) {
258  varDescendant[varIdx] += sonVarDescendant[varIdx];
259  if (varDescendant[varIdx] && varIdx < varPos)
260  instantiationNeeded[varIdx] = (short int)1;
261  }
262  }
263  }
264  nodeIter = nodeIter->nextLink();
265  }
266  }
267 
268  for (auto varIter = dg->variablesSequence().beginSafe();
269  varIter != dg->variablesSequence().endSafe();
270  ++varIter) {
271  const Link< NodeId >* nodeIter = dg->varNodeListe(*varIter)->list();
272  while (nodeIter != nullptr) {
273  for (Idx modality = 0; modality < dg->node(nodeIter->element())->nbSons();
274  ++modality) {
275  NodeId sonId = dg->node(nodeIter->element())->son(modality);
276  if (!dg->isTerminalNode(sonId)) {
277  for (Idx varIdx = 0; varIdx < __nbVar; ++varIdx) {
278  if (dgInstNeed[nodeIter->element()][varIdx]
279  && nodesVarDescendant[sonId][varIdx]) {
280  dgInstNeed[sonId][varIdx] = (short int)1;
281  }
282  }
283  }
284  }
285  nodeIter = nodeIter->nextLink();
286  }
287  }
288 
289  for (HashTableIterator< NodeId, short int* > it = nodesVarDescendant.begin();
290  it != nodesVarDescendant.end();
291  ++it) {
292  DEALLOCATE(it.val(), tableSize);
293  }
294 
295  nodesVarDescendant.clear();
296  }
297 
298  // A key is used for prunning uneccesary operations since once a node has been
299  // visited in a given context, there's no use to revisit him,
300  // the result will be the same node, so we just have to do an association
301  // context - node.
302  // The context consists in :
303  // _ Leader node we are visiting.
304  // _ Follower node we are visiting.
305  // _ For all retrograde variables, if it has been instanciated
306  // before, current modality instanciated, meaning :
307  // _ 0 means the variable hasn't be instanciated yet,
308  // _ From 1 to domainSize + 1 means that current modality
309  // index of variable is value - 1,
310  // _ domainSize + 2 means variable is on default mode.
311  // A key - node association is made each time we create a node in resulting
312  // diagram.
313  // Since GUM_MULTI_DIM_DECISION_DIAGRAM_RECUR_FUNCTION is a corner step in
314  // algorithm ( meaning each time we explore a node we go trought
315  // this function ), check only have to be at the beginning of that function.
316  template < typename GUM_SCALAR,
317  template < typename >
318  class COMBINEOPERATOR,
319  template < typename >
320  class PROJECTOPERATOR,
321  template < typename >
322  class TerminalNodePolicy >
323  INLINE NodeId
325  __compute(O4DGContext& currentSituation, Idx lastInstVarPos) {
326  NodeId newNode = 0;
327 
328  // If both current nodes are terminal,
329  // we only have to compute the resulting value
330  if (__DG1->isTerminalNode(currentSituation.DG1Node())
331  && __DG2->isTerminalNode(currentSituation.DG2Node())) {
332  // We have to compute new valueand we insert a new node in diagram with
333  // this value, ...
334  GUM_SCALAR newVal = __neutral;
335  GUM_SCALAR tempVal = __combine(__DG1->nodeValue(currentSituation.DG1Node()),
336  __DG2->nodeValue(currentSituation.DG2Node()));
337  for (Idx targetModa = 0; targetModa < __targetVar->domainSize();
338  ++targetModa)
339  newVal = __project(newVal, tempVal);
340  return __rd->manager()->addTerminalNode(newVal);
341  }
342 
343  // If not,
344  // we'll have to do some exploration
345 
346  // First we ensure that we hadn't already visit this pair of node under hte
347  // same circumstances
348  short int* dg1NeededVar =
349  __DG1InstantiationNeeded.exists(currentSituation.DG1Node())
350  ? __DG1InstantiationNeeded[currentSituation.DG1Node()]
351  : __default;
352  Idx dg1CurrentVarPos =
353  __DG1->isTerminalNode(currentSituation.DG1Node())
354  ? __nbVar
355  : __rd->variablesSequence().pos(
356  __DG1->node(currentSituation.DG1Node())->nodeVar());
357  short int* dg2NeededVar =
358  __DG2InstantiationNeeded.exists(currentSituation.DG2Node())
359  ? __DG2InstantiationNeeded[currentSituation.DG2Node()]
360  : __default;
361  Idx dg2CurrentVarPos =
362  __DG2->isTerminalNode(currentSituation.DG2Node())
363  ? __nbVar
364  : __rd->variablesSequence().pos(
365  __DG2->node(currentSituation.DG2Node())->nodeVar());
366 
367  short int* instNeeded =
368  static_cast< short int* >(ALLOCATE(sizeof(short int) * __nbVar));
369 
370  for (Idx i = 0; i < __nbVar; i++) {
371  instNeeded[i] = dg1NeededVar[i] + dg2NeededVar[i];
372  }
373 
374  double curSitKey = currentSituation.key(instNeeded);
375 
376  if (__explorationTable.exists(curSitKey)) {
377  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
378 
379  return __explorationTable[curSitKey];
380  }
381 
382  // ====================================================
383 
384  NodeId origDG1 = currentSituation.DG1Node(),
385  origDG2 = currentSituation.DG2Node();
386 
388  nullptr;
389  NodeId leadNodeId = 0;
390  Idx leadVarPos = __rd->variablesSequence().size();
391  typedef void (O4DGContext::*SetNodeFunction)(const NodeId&);
392  SetNodeFunction leadFunction = nullptr;
393 
394  bool sameVar = false;
395 
396  if (!__DG1->isTerminalNode(currentSituation.DG1Node())) {
397  if (currentSituation.varModality(dg1CurrentVarPos) != 0) {
398  // If var associated to current node has already been instanciated, we
399  // have to jump it
400  currentSituation.setDG1Node(
401  __DG1->node(currentSituation.DG1Node())
402  ->son(currentSituation.varModality(dg1CurrentVarPos) - 1));
403 
404  newNode = __compute(currentSituation, lastInstVarPos);
405  __explorationTable.insert(curSitKey, newNode);
406  currentSituation.setDG1Node(origDG1);
407  currentSituation.setDG2Node(origDG2);
408 
409  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
410 
411  return newNode;
412  }
413 
414  leaddg = __DG1;
415  leadNodeId = currentSituation.DG1Node();
416  leadVarPos = dg1CurrentVarPos;
417  leadFunction = &O4DGContext::setDG1Node;
418  }
419 
420  if (!__DG2->isTerminalNode(currentSituation.DG2Node())) {
421  if (currentSituation.varModality(dg2CurrentVarPos) != 0) {
422  // If var associated to current node has already been instanciated, we
423  // have to jump it
424  currentSituation.setDG2Node(
425  __DG2->node(currentSituation.DG2Node())
426  ->son(currentSituation.varModality(dg2CurrentVarPos) - 1));
427 
428  newNode = __compute(currentSituation, lastInstVarPos);
429  __explorationTable.insert(curSitKey, newNode);
430  currentSituation.setDG1Node(origDG1);
431  currentSituation.setDG2Node(origDG2);
432 
433  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
434 
435  return newNode;
436  }
437 
438  if (leadVarPos == dg2CurrentVarPos) { sameVar = true; }
439 
440  if (leadVarPos > dg2CurrentVarPos) {
441  leaddg = __DG2;
442  leadNodeId = currentSituation.DG2Node();
443  leadVarPos = dg2CurrentVarPos;
444  leadFunction = &O4DGContext::setDG2Node;
445  }
446  }
447 
448  // ====================================================
449  // Anticipated Exploration
450 
451  // Before exploring nodes, we have to ensure that every anticipated
452  // exploration is done
453  for (Idx varPos = lastInstVarPos + 1; varPos < leadVarPos; ++varPos) {
454  if (instNeeded[varPos]) {
455  const DiscreteVariable* curVar = __rd->variablesSequence().atPos(varPos);
456  NodeId* sonsIds =
457  static_cast< NodeId* >(ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
458 
459  for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
460  currentSituation.chgVarModality(varPos, modality + 1);
461 
462  sonsIds[modality] = __compute(currentSituation, varPos);
463  }
464 
465  newNode = __rd->manager()->addInternalNode(curVar, sonsIds);
466 
467  __explorationTable.insert(curSitKey, newNode);
468  currentSituation.chgVarModality(varPos, 0);
469  currentSituation.setDG1Node(origDG1);
470  currentSituation.setDG2Node(origDG2);
471 
472  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
473 
474  return newNode;
475  }
476  }
477 
478  // ====================================================
479  // Terminal Exploration
480  if (sameVar && __DG1->node(origDG1)->nodeVar() == __targetVar) {
481  GUM_SCALAR newVal = __neutral;
482  for (Idx targetModa = 0; targetModa < __targetVar->domainSize();
483  ++targetModa)
484  newVal = __project(
485  newVal,
486  __combine(__DG1->nodeValue(__DG1->node(origDG1)->son(targetModa)),
487  __DG2->nodeValue(__DG2->node(origDG2)->son(targetModa))));
488  newNode = __rd->manager()->addTerminalNode(newVal);
489  __explorationTable.insert(curSitKey, newNode);
490  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
491  return newNode;
492  }
493  if (__DG1->isTerminalNode(origDG1)) {
494  if (__DG2->node(origDG2)->nodeVar() == __targetVar) {
495  GUM_SCALAR newVal = __neutral;
496  for (Idx targetModa = 0; targetModa < __targetVar->domainSize();
497  ++targetModa)
498  newVal = __project(
499  newVal,
500  __combine(__DG1->nodeValue(origDG1),
501  __DG2->nodeValue(__DG2->node(origDG2)->son(targetModa))));
502  newNode = __rd->manager()->addTerminalNode(newVal);
503  __explorationTable.insert(curSitKey, newNode);
504  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
505  return newNode;
506  }
507  } else {
508  if (__DG1->node(origDG1)->nodeVar() == __targetVar
509  && __DG2->isTerminalNode(origDG2)) {
510  GUM_SCALAR newVal = __neutral;
511  for (Idx targetModa = 0; targetModa < __targetVar->domainSize();
512  ++targetModa)
513  newVal = __project(
514  newVal,
515  __combine(__DG1->nodeValue(__DG1->node(origDG1)->son(targetModa)),
516  __DG2->nodeValue(origDG2)));
517  newNode = __rd->manager()->addTerminalNode(newVal);
518  __explorationTable.insert(curSitKey, newNode);
519  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
520  return newNode;
521  }
522  }
523 
524  // ====================================================
525  // Normal Exploration
526 
527  // If only one of the current node is terminal,
528  // we have to pursue deeper on the other diagram
529  if (sameVar) {
530  // If so - meaning it's the same variable - we have to go
531  // down on both
532  const InternalNode* dg1Node = __DG1->node(origDG1);
533  const InternalNode* dg2Node = __DG2->node(origDG2);
534 
535  const DiscreteVariable* curVar = dg1Node->nodeVar();
536  Idx varPos = __rd->variablesSequence().pos(curVar);
537  NodeId* sonsIds =
538  static_cast< NodeId* >(ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
539 
540  for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
541  currentSituation.chgVarModality(varPos, modality + 1);
542  currentSituation.setDG1Node(dg1Node->son(modality));
543  currentSituation.setDG2Node(dg2Node->son(modality));
544 
545  sonsIds[modality] = __compute(currentSituation, varPos);
546  }
547 
548  newNode = __rd->manager()->addInternalNode(curVar, sonsIds);
549 
550  __explorationTable.insert(curSitKey, newNode);
551  currentSituation.chgVarModality(varPos, 0);
552  currentSituation.setDG1Node(origDG1);
553  currentSituation.setDG2Node(origDG2);
554 
555  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
556 
557  return newNode;
558  }
559  // ====================================================
560  else {
561  const InternalNode* leaddgNode = leaddg->node(leadNodeId);
562 
563  const DiscreteVariable* curVar = leaddgNode->nodeVar();
564  NodeId* sonsIds =
565  static_cast< NodeId* >(ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
566 
567  for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
568  currentSituation.chgVarModality(leadVarPos, modality + 1);
569  (currentSituation.*leadFunction)(leaddgNode->son(modality));
570 
571  sonsIds[modality] = __compute(currentSituation, leadVarPos);
572  }
573 
574  newNode = __rd->manager()->addInternalNode(curVar, sonsIds);
575 
576  __explorationTable.insert(curSitKey, newNode);
577  currentSituation.chgVarModality(leadVarPos, 0);
578  currentSituation.setDG1Node(origDG1);
579  currentSituation.setDG2Node(origDG2);
580 
581  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
582 
583  return newNode;
584  }
585  }
586 
587 } // namespace gum
iterator begin()
Returns an unsafe iterator pointing to the beginning of the hashtable.
const NodeId & DG2Node() const
Get DG2 diagram current explored Node.
Definition: o4DGContext.h:89
const PROJECTOPERATOR< GUM_SCALAR > __project
Definition: regress.h:121
Safe iterators for Sequence.
Definition: sequence.h:1203
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __rd
The resulting function graph.
Definition: regress.h:105
const iterator & end() noexcept
Returns the unsafe iterator pointing to the end of the hashtable.
HashTable< NodeId, short int *> __DG2InstantiationNeeded
Definition: regress.h:130
const iterator_safe & endSafe() noexcept
Returns the safe iterator pointing to the end of the hashtable.
const DiscreteVariable * nodeVar() const
Returns the node variable.
const DiscreteVariable * __targetVar
The variable we work on to eleminate.
Definition: regress.h:111
Unsafe Iterators for hashtablesHashTableIterator provides a fast but unsafe way to parse HashTables...
Definition: hashTable.h:2747
const GUM_SCALAR __neutral
The function to be performed on the leaves.
Definition: regress.h:114
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * compute()
Computes and builds the Function Graph that is the result of the operation.
Definition: regress_tpl.h:101
NodeId son(Idx modality) const
Returns the son at a given index.
Headers of the InternalNode class.
bool exists(const Key &key) const
Checks whether there exists an element with a given key in the hashtable.
const COMBINEOPERATOR< GUM_SCALAR > __combine
The functions to be performed on the leaves.
Definition: regress.h:120
const double & key(short int *instNeeded)
Returns o4DGContext key.
void chgVarModality(Idx, Idx)
Changes given variable modality.
HashTable< NodeId, short int *> __DG1InstantiationNeeded
Table uses to know if a given node of given function graph has retrograde variables.
Definition: regress.h:129
Base class for discrete random variable.
gum is the global namespace for all aGrUM entities
Definition: agrum.h:25
#define ALLOCATE(x)
Definition: regress_tpl.h:31
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __DG2
The other one.
Definition: regress.h:102
Representation of a setA Set is a structure that contains arbitrary elements.
Definition: set.h:162
Regress(const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *vfunction, const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *probDist, const Set< const DiscreteVariable * > *primedVars, const DiscreteVariable *targetVar, const GUM_SCALAR neutral)
Default constructor.
Definition: regress_tpl.h:45
virtual Size domainSize() const =0
const NodeId & DG1Node() const
Get DG1 diagram current explored Node.
Definition: o4DGContext.h:83
#define DEALLOCATE(x, y)
Definition: regress_tpl.h:32
void setDG1Node(const NodeId &)
Set DG1 diagram current explored Node.
Structure used to represent a node internal structure.
Definition: internalNode.h:100
void __establishVarOrder()
Computes an order for the final Decision graph that will minimize the number of re exploration...
Definition: regress_tpl.h:139
Class implementingting a function graph.
void __findRetrogradeVariables(const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *dg, HashTable< NodeId, short int * > &dgInstNeed)
Establish for each node in both function graph if it has retrograde variables beneath it...
Definition: regress_tpl.h:227
NodeId __compute(O4DGContext &currentSituation, Idx lastInstVarPos)
The main recursion function.
Definition: regress_tpl.h:325
Idx __nbVar
The total number of variable implied in the operation.
Definition: regress.h:117
Class used to perform Function Graph Operations in the FMDP Framework.
Definition: regress.h:51
void setDG2Node(const NodeId &)
Set DG2 diagram current explored Node.
const Set< const DiscreteVariable *> * __primedVars
The set of variables we want to keep at the end.
Definition: regress.h:108
void clear()
Removes all the elements in the hash table.
iterator_safe beginSafe()
Returns the safe iterator pointing to the beginning of the hashtable.
Class used to compute the operation between two decision diagrams.
HashTable< double, NodeId > __explorationTable
The hashtable used to know if two pair of nodes have already been visited.
Definition: regress.h:125
Size Idx
Type for indexes.
Definition: types.h:50
short int * __default
Just a computationnal trick.
Definition: regress.h:133
std::size_t Size
In aGrUM, hashed values are unsigned long int.
Definition: types.h:45
Class used to manipulate context during Function Graph Operations.
Definition: o4DGContext.h:46
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __DG1
One of the two function graphs used for the operation.
Definition: regress.h:99
Size NodeId
Type for node ids.
Definition: graphElements.h:97
~Regress()
Default destructor.
Definition: regress_tpl.h:73
Idx varModality(Idx)
Changes given variable modality.