aGrUM  0.14.2
gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy > Class Template Reference

Class used to perform Function Graph Operations in the FMDP Framework. More...

#include <agrum/multidim/patterns/regress.h>

+ Collaboration diagram for gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >:

Public Member Functions

Constructors / Destructors
 Regress (const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *vfunction, const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *probDist, const Set< const DiscreteVariable * > *primedVars, const DiscreteVariable *targetVar, const GUM_SCALAR neutral)
 Default constructor. More...
 
 ~Regress ()
 Default destructor. More...
 
Main Method
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * compute ()
 Computes and builds the Function Graph that is the result of the operation. More...
 

Detailed Description

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
class gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >

Class used to perform Function Graph Operations in the FMDP Framework.

Definition at line 51 of file regress.h.

Constructor & Destructor Documentation

◆ Regress()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::Regress ( const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *  vfunction,
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *  probDist,
const Set< const DiscreteVariable * > *  primedVars,
const DiscreteVariable targetVar,
const GUM_SCALAR  neutral 
)

Default constructor.

Definition at line 45 of file regress_tpl.h.

References gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__default, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__nbVar, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__primedVars, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__rd, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__targetVar, and gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::~Regress().

49  :
50  __DG1(DG1),
51  __DG2(DG2), __neutral(neutral), __combine(), __project(),
52  __DG1InstantiationNeeded(DG1->realSize(), true, false),
53  __DG2InstantiationNeeded(DG2->realSize(), true, false) {
54  GUM_CONSTRUCTOR(Regress);
55  __rd =
56  MultiDimFunctionGraph< GUM_SCALAR,
57  TerminalNodePolicy >::getReducedAndOrderedInstance();
58  __nbVar = 0;
59  __default = nullptr;
60  __primedVars = primedVars;
61  __targetVar = targetVar;
62  }
const PROJECTOPERATOR< GUM_SCALAR > __project
Definition: regress.h:121
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __rd
The resulting function graph.
Definition: regress.h:105
HashTable< NodeId, short int *> __DG2InstantiationNeeded
Definition: regress.h:130
const DiscreteVariable * __targetVar
The variable we work on to eleminate.
Definition: regress.h:111
const GUM_SCALAR __neutral
The function to be performed on the leaves.
Definition: regress.h:114
const COMBINEOPERATOR< GUM_SCALAR > __combine
The functions to be performed on the leaves.
Definition: regress.h:120
HashTable< NodeId, short int *> __DG1InstantiationNeeded
Table uses to know if a given node of given function graph has retrograde variables.
Definition: regress.h:129
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __DG2
The other one.
Definition: regress.h:102
Regress(const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *vfunction, const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *probDist, const Set< const DiscreteVariable * > *primedVars, const DiscreteVariable *targetVar, const GUM_SCALAR neutral)
Default constructor.
Definition: regress_tpl.h:45
Idx __nbVar
The total number of variable implied in the operation.
Definition: regress.h:117
const Set< const DiscreteVariable *> * __primedVars
The set of variables we want to keep at the end.
Definition: regress.h:108
short int * __default
Just a computationnal trick.
Definition: regress.h:133
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __DG1
One of the two function graphs used for the operation.
Definition: regress.h:99
+ Here is the call graph for this function:

◆ ~Regress()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::~Regress ( )

Default destructor.

Definition at line 73 of file regress_tpl.h.

References gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__default, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG1InstantiationNeeded, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG2InstantiationNeeded, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__nbVar, gum::HashTable< Key, Val, Alloc >::beginSafe(), gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::compute(), DEALLOCATE, and gum::HashTable< Key, Val, Alloc >::endSafe().

Referenced by gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::Regress().

73  {
74  GUM_DESTRUCTOR(Regress);
75 
76  for (auto instIter = __DG1InstantiationNeeded.beginSafe();
77  instIter != __DG1InstantiationNeeded.endSafe();
78  ++instIter)
79  DEALLOCATE(instIter.val(), sizeof(short int) * __nbVar);
80 
81  for (auto instIter = __DG2InstantiationNeeded.beginSafe();
82  instIter != __DG2InstantiationNeeded.endSafe();
83  ++instIter)
84  DEALLOCATE(instIter.val(), sizeof(short int) * __nbVar);
85 
86  if (__nbVar != 0) DEALLOCATE(__default, sizeof(short int) * __nbVar);
87  }
HashTable< NodeId, short int *> __DG2InstantiationNeeded
Definition: regress.h:130
const iterator_safe & endSafe() noexcept
Returns the safe iterator pointing to the end of the hashtable.
HashTable< NodeId, short int *> __DG1InstantiationNeeded
Table uses to know if a given node of given function graph has retrograde variables.
Definition: regress.h:129
Regress(const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *vfunction, const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *probDist, const Set< const DiscreteVariable * > *primedVars, const DiscreteVariable *targetVar, const GUM_SCALAR neutral)
Default constructor.
Definition: regress_tpl.h:45
#define DEALLOCATE(x, y)
Definition: regress_tpl.h:32
Idx __nbVar
The total number of variable implied in the operation.
Definition: regress.h:117
iterator_safe beginSafe()
Returns the safe iterator pointing to the beginning of the hashtable.
short int * __default
Just a computationnal trick.
Definition: regress.h:133
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

Member Function Documentation

◆ __compute()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE NodeId gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__compute ( O4DGContext currentSituation,
Idx  lastInstVarPos 
)
private

The main recursion function.

Definition at line 325 of file regress_tpl.h.

References gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__combine, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__default, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG1, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG1InstantiationNeeded, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG2, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG2InstantiationNeeded, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__explorationTable, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__nbVar, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__neutral, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__project, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__rd, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__targetVar, ALLOCATE, gum::O4DGContext::chgVarModality(), DEALLOCATE, gum::O4DGContext::DG1Node(), gum::O4DGContext::DG2Node(), gum::DiscreteVariable::domainSize(), gum::HashTable< Key, Val, Alloc >::exists(), gum::HashTable< Key, Val, Alloc >::insert(), gum::O4DGContext::key(), gum::InternalNode::nodeVar(), gum::O4DGContext::setDG1Node(), gum::O4DGContext::setDG2Node(), gum::InternalNode::son(), and gum::O4DGContext::varModality().

Referenced by gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__findRetrogradeVariables(), and gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::compute().

325  {
326  NodeId newNode = 0;
327 
328  // If both current nodes are terminal,
329  // we only have to compute the resulting value
330  if (__DG1->isTerminalNode(currentSituation.DG1Node())
331  && __DG2->isTerminalNode(currentSituation.DG2Node())) {
332  // We have to compute new valueand we insert a new node in diagram with
333  // this value, ...
334  GUM_SCALAR newVal = __neutral;
335  GUM_SCALAR tempVal = __combine(__DG1->nodeValue(currentSituation.DG1Node()),
336  __DG2->nodeValue(currentSituation.DG2Node()));
337  for (Idx targetModa = 0; targetModa < __targetVar->domainSize();
338  ++targetModa)
339  newVal = __project(newVal, tempVal);
340  return __rd->manager()->addTerminalNode(newVal);
341  }
342 
343  // If not,
344  // we'll have to do some exploration
345 
346  // First we ensure that we hadn't already visit this pair of node under hte
347  // same circumstances
348  short int* dg1NeededVar =
349  __DG1InstantiationNeeded.exists(currentSituation.DG1Node())
350  ? __DG1InstantiationNeeded[currentSituation.DG1Node()]
351  : __default;
352  Idx dg1CurrentVarPos =
353  __DG1->isTerminalNode(currentSituation.DG1Node())
354  ? __nbVar
355  : __rd->variablesSequence().pos(
356  __DG1->node(currentSituation.DG1Node())->nodeVar());
357  short int* dg2NeededVar =
358  __DG2InstantiationNeeded.exists(currentSituation.DG2Node())
359  ? __DG2InstantiationNeeded[currentSituation.DG2Node()]
360  : __default;
361  Idx dg2CurrentVarPos =
362  __DG2->isTerminalNode(currentSituation.DG2Node())
363  ? __nbVar
364  : __rd->variablesSequence().pos(
365  __DG2->node(currentSituation.DG2Node())->nodeVar());
366 
367  short int* instNeeded =
368  static_cast< short int* >(ALLOCATE(sizeof(short int) * __nbVar));
369 
370  for (Idx i = 0; i < __nbVar; i++) {
371  instNeeded[i] = dg1NeededVar[i] + dg2NeededVar[i];
372  }
373 
374  double curSitKey = currentSituation.key(instNeeded);
375 
376  if (__explorationTable.exists(curSitKey)) {
377  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
378 
379  return __explorationTable[curSitKey];
380  }
381 
382  // ====================================================
383 
384  NodeId origDG1 = currentSituation.DG1Node(),
385  origDG2 = currentSituation.DG2Node();
386 
387  const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy >* leaddg =
388  nullptr;
389  NodeId leadNodeId = 0;
390  Idx leadVarPos = __rd->variablesSequence().size();
391  typedef void (O4DGContext::*SetNodeFunction)(const NodeId&);
392  SetNodeFunction leadFunction = nullptr;
393 
394  bool sameVar = false;
395 
396  if (!__DG1->isTerminalNode(currentSituation.DG1Node())) {
397  if (currentSituation.varModality(dg1CurrentVarPos) != 0) {
398  // If var associated to current node has already been instanciated, we
399  // have to jump it
400  currentSituation.setDG1Node(
401  __DG1->node(currentSituation.DG1Node())
402  ->son(currentSituation.varModality(dg1CurrentVarPos) - 1));
403 
404  newNode = __compute(currentSituation, lastInstVarPos);
405  __explorationTable.insert(curSitKey, newNode);
406  currentSituation.setDG1Node(origDG1);
407  currentSituation.setDG2Node(origDG2);
408 
409  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
410 
411  return newNode;
412  }
413 
414  leaddg = __DG1;
415  leadNodeId = currentSituation.DG1Node();
416  leadVarPos = dg1CurrentVarPos;
417  leadFunction = &O4DGContext::setDG1Node;
418  }
419 
420  if (!__DG2->isTerminalNode(currentSituation.DG2Node())) {
421  if (currentSituation.varModality(dg2CurrentVarPos) != 0) {
422  // If var associated to current node has already been instanciated, we
423  // have to jump it
424  currentSituation.setDG2Node(
425  __DG2->node(currentSituation.DG2Node())
426  ->son(currentSituation.varModality(dg2CurrentVarPos) - 1));
427 
428  newNode = __compute(currentSituation, lastInstVarPos);
429  __explorationTable.insert(curSitKey, newNode);
430  currentSituation.setDG1Node(origDG1);
431  currentSituation.setDG2Node(origDG2);
432 
433  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
434 
435  return newNode;
436  }
437 
438  if (leadVarPos == dg2CurrentVarPos) { sameVar = true; }
439 
440  if (leadVarPos > dg2CurrentVarPos) {
441  leaddg = __DG2;
442  leadNodeId = currentSituation.DG2Node();
443  leadVarPos = dg2CurrentVarPos;
444  leadFunction = &O4DGContext::setDG2Node;
445  }
446  }
447 
448  // ====================================================
449  // Anticipated Exploration
450 
451  // Before exploring nodes, we have to ensure that every anticipated
452  // exploration is done
453  for (Idx varPos = lastInstVarPos + 1; varPos < leadVarPos; ++varPos) {
454  if (instNeeded[varPos]) {
455  const DiscreteVariable* curVar = __rd->variablesSequence().atPos(varPos);
456  NodeId* sonsIds =
457  static_cast< NodeId* >(ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
458 
459  for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
460  currentSituation.chgVarModality(varPos, modality + 1);
461 
462  sonsIds[modality] = __compute(currentSituation, varPos);
463  }
464 
465  newNode = __rd->manager()->addInternalNode(curVar, sonsIds);
466 
467  __explorationTable.insert(curSitKey, newNode);
468  currentSituation.chgVarModality(varPos, 0);
469  currentSituation.setDG1Node(origDG1);
470  currentSituation.setDG2Node(origDG2);
471 
472  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
473 
474  return newNode;
475  }
476  }
477 
478  // ====================================================
479  // Terminal Exploration
480  if (sameVar && __DG1->node(origDG1)->nodeVar() == __targetVar) {
481  GUM_SCALAR newVal = __neutral;
482  for (Idx targetModa = 0; targetModa < __targetVar->domainSize();
483  ++targetModa)
484  newVal = __project(
485  newVal,
486  __combine(__DG1->nodeValue(__DG1->node(origDG1)->son(targetModa)),
487  __DG2->nodeValue(__DG2->node(origDG2)->son(targetModa))));
488  newNode = __rd->manager()->addTerminalNode(newVal);
489  __explorationTable.insert(curSitKey, newNode);
490  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
491  return newNode;
492  }
493  if (__DG1->isTerminalNode(origDG1)) {
494  if (__DG2->node(origDG2)->nodeVar() == __targetVar) {
495  GUM_SCALAR newVal = __neutral;
496  for (Idx targetModa = 0; targetModa < __targetVar->domainSize();
497  ++targetModa)
498  newVal = __project(
499  newVal,
500  __combine(__DG1->nodeValue(origDG1),
501  __DG2->nodeValue(__DG2->node(origDG2)->son(targetModa))));
502  newNode = __rd->manager()->addTerminalNode(newVal);
503  __explorationTable.insert(curSitKey, newNode);
504  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
505  return newNode;
506  }
507  } else {
508  if (__DG1->node(origDG1)->nodeVar() == __targetVar
509  && __DG2->isTerminalNode(origDG2)) {
510  GUM_SCALAR newVal = __neutral;
511  for (Idx targetModa = 0; targetModa < __targetVar->domainSize();
512  ++targetModa)
513  newVal = __project(
514  newVal,
515  __combine(__DG1->nodeValue(__DG1->node(origDG1)->son(targetModa)),
516  __DG2->nodeValue(origDG2)));
517  newNode = __rd->manager()->addTerminalNode(newVal);
518  __explorationTable.insert(curSitKey, newNode);
519  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
520  return newNode;
521  }
522  }
523 
524  // ====================================================
525  // Normal Exploration
526 
527  // If only one of the current node is terminal,
528  // we have to pursue deeper on the other diagram
529  if (sameVar) {
530  // If so - meaning it's the same variable - we have to go
531  // down on both
532  const InternalNode* dg1Node = __DG1->node(origDG1);
533  const InternalNode* dg2Node = __DG2->node(origDG2);
534 
535  const DiscreteVariable* curVar = dg1Node->nodeVar();
536  Idx varPos = __rd->variablesSequence().pos(curVar);
537  NodeId* sonsIds =
538  static_cast< NodeId* >(ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
539 
540  for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
541  currentSituation.chgVarModality(varPos, modality + 1);
542  currentSituation.setDG1Node(dg1Node->son(modality));
543  currentSituation.setDG2Node(dg2Node->son(modality));
544 
545  sonsIds[modality] = __compute(currentSituation, varPos);
546  }
547 
548  newNode = __rd->manager()->addInternalNode(curVar, sonsIds);
549 
550  __explorationTable.insert(curSitKey, newNode);
551  currentSituation.chgVarModality(varPos, 0);
552  currentSituation.setDG1Node(origDG1);
553  currentSituation.setDG2Node(origDG2);
554 
555  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
556 
557  return newNode;
558  }
559  // ====================================================
560  else {
561  const InternalNode* leaddgNode = leaddg->node(leadNodeId);
562 
563  const DiscreteVariable* curVar = leaddgNode->nodeVar();
564  NodeId* sonsIds =
565  static_cast< NodeId* >(ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
566 
567  for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
568  currentSituation.chgVarModality(leadVarPos, modality + 1);
569  (currentSituation.*leadFunction)(leaddgNode->son(modality));
570 
571  sonsIds[modality] = __compute(currentSituation, leadVarPos);
572  }
573 
574  newNode = __rd->manager()->addInternalNode(curVar, sonsIds);
575 
576  __explorationTable.insert(curSitKey, newNode);
577  currentSituation.chgVarModality(leadVarPos, 0);
578  currentSituation.setDG1Node(origDG1);
579  currentSituation.setDG2Node(origDG2);
580 
581  DEALLOCATE(instNeeded, sizeof(short int) * __nbVar);
582 
583  return newNode;
584  }
585  }
const PROJECTOPERATOR< GUM_SCALAR > __project
Definition: regress.h:121
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __rd
The resulting function graph.
Definition: regress.h:105
HashTable< NodeId, short int *> __DG2InstantiationNeeded
Definition: regress.h:130
const DiscreteVariable * __targetVar
The variable we work on to eleminate.
Definition: regress.h:111
const GUM_SCALAR __neutral
The function to be performed on the leaves.
Definition: regress.h:114
bool exists(const Key &key) const
Checks whether there exists an element with a given key in the hashtable.
const COMBINEOPERATOR< GUM_SCALAR > __combine
The functions to be performed on the leaves.
Definition: regress.h:120
HashTable< NodeId, short int *> __DG1InstantiationNeeded
Table uses to know if a given node of given function graph has retrograde variables.
Definition: regress.h:129
#define ALLOCATE(x)
Definition: regress_tpl.h:31
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __DG2
The other one.
Definition: regress.h:102
virtual Size domainSize() const =0
#define DEALLOCATE(x, y)
Definition: regress_tpl.h:32
void setDG1Node(const NodeId &)
Set DG1 diagram current explored Node.
NodeId __compute(O4DGContext &currentSituation, Idx lastInstVarPos)
The main recursion function.
Definition: regress_tpl.h:325
Idx __nbVar
The total number of variable implied in the operation.
Definition: regress.h:117
void setDG2Node(const NodeId &)
Set DG2 diagram current explored Node.
HashTable< double, NodeId > __explorationTable
The hashtable used to know if two pair of nodes have already been visited.
Definition: regress.h:125
short int * __default
Just a computationnal trick.
Definition: regress.h:133
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __DG1
One of the two function graphs used for the operation.
Definition: regress.h:99
Size NodeId
Type for node ids.
Definition: graphElements.h:97
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ __establishVarOrder()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE void gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__establishVarOrder ( )
private

Computes an order for the final Decision graph that will minimize the number of re exploration.

Definition at line 139 of file regress_tpl.h.

References gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__default, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG1, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG2, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__findRetrogradeVariables(), gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__nbVar, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__primedVars, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__rd, and ALLOCATE.

Referenced by gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::compute().

139  {
140  SequenceIteratorSafe< const DiscreteVariable* > fite =
141  __DG1->variablesSequence().beginSafe();
142  SequenceIteratorSafe< const DiscreteVariable* > site =
143  __DG2->variablesSequence().beginSafe();
144 
145  while (fite != __DG1->variablesSequence().endSafe()
146  && site != __DG2->variablesSequence().endSafe()) {
147  // Test : if var from first order is already in final order
148  // we move onto the next one
149  if (__rd->variablesSequence().exists(*fite)) {
150  ++fite;
151  continue;
152  }
153 
154  // Test : if var from second order is already in final order
155  // we move onto the next one
156  if (__rd->variablesSequence().exists(*site)) {
157  ++site;
158  continue;
159  }
160 
161  // Test : is current var of the first order present in the second order.
162  // if not we add it to final order
163  if (!__DG2->variablesSequence().exists(*fite)
164  && !__primedVars->exists(*fite)) {
165  __rd->add(**fite);
166  ++fite;
167  continue;
168  }
169 
170  // Test : is current var of the second order present in the first order.
171  // if not we add it to final order
172  if (!__DG1->variablesSequence().exists(*site)
173  && !__primedVars->exists(*site)) {
174  __rd->add(**site);
175  ++site;
176  continue;
177  }
178 
179  // Test : is current var of the second order present in the first order.
180  // if not we add it to final order
181  if (*fite == *site) {
182  __rd->add(**fite);
183  ++fite;
184  ++site;
185  continue;
186  }
187 
188  // Test : if chosing first order var cost less in terms or re exploration,
189  // we chose it
190  __rd->add(**fite);
191  ++fite;
192  }
193 
194  // Whenever an iterator has finished its sequence,
195  // the other may still be in the middle of its one.
196  // Hence, this part ensures that any variables remaining
197  // will be added to the final sequence if needed.
198  if (fite == __DG1->variablesSequence().endSafe()) {
199  for (; site != __DG2->variablesSequence().endSafe(); ++site)
200  if (!__rd->variablesSequence().exists(*site)) __rd->add(**site);
201  } else {
202  for (; fite != __DG1->variablesSequence().endSafe(); ++fite)
203  if (!__rd->variablesSequence().exists(*fite)) __rd->add(**fite);
204  }
205 
206  // Various initialization needed now that we have a bigger picture
207  __nbVar = __rd->variablesSequence().size();
208 
209  if (__nbVar != 0) {
210  __default = static_cast< short int* >(ALLOCATE(sizeof(short int) * __nbVar));
211  for (Idx i = 0; i < __nbVar; i++)
212  __default[i] = (short int)0;
213  }
214  }
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __rd
The resulting function graph.
Definition: regress.h:105
#define ALLOCATE(x)
Definition: regress_tpl.h:31
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __DG2
The other one.
Definition: regress.h:102
Idx __nbVar
The total number of variable implied in the operation.
Definition: regress.h:117
const Set< const DiscreteVariable *> * __primedVars
The set of variables we want to keep at the end.
Definition: regress.h:108
short int * __default
Just a computationnal trick.
Definition: regress.h:133
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __DG1
One of the two function graphs used for the operation.
Definition: regress.h:99
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ __findRetrogradeVariables()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE void gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__findRetrogradeVariables ( const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *  dg,
HashTable< NodeId, short int * > &  dgInstNeed 
)
private

Establish for each node in both function graph if it has retrograde variables beneath it.

Definition at line 227 of file regress_tpl.h.

References gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__compute(), gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__nbVar, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__rd, ALLOCATE, gum::HashTable< Key, Val, Alloc >::begin(), gum::HashTable< Key, Val, Alloc >::clear(), DEALLOCATE, gum::Link< T >::element(), gum::HashTable< Key, Val, Alloc >::end(), gum::HashTable< Key, Val, Alloc >::insert(), and gum::Link< T >::nextLink().

Referenced by gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__establishVarOrder(), and gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::compute().

229  {
230  HashTable< NodeId, short int* > nodesVarDescendant;
231  Size tableSize = Size(__nbVar * sizeof(short int));
232 
233  for (auto varIter = dg->variablesSequence().rbeginSafe();
234  varIter != dg->variablesSequence().rendSafe();
235  --varIter) {
236  Idx varPos = __rd->variablesSequence().pos(*varIter);
237 
238  const Link< NodeId >* nodeIter = dg->varNodeListe(*varIter)->list();
239  while (nodeIter != nullptr) {
240  short int* instantiationNeeded =
241  static_cast< short int* >(ALLOCATE(tableSize));
242  dgInstNeed.insert(nodeIter->element(), instantiationNeeded);
243  short int* varDescendant = static_cast< short int* >(ALLOCATE(tableSize));
244  nodesVarDescendant.insert(nodeIter->element(), varDescendant);
245  for (Idx j = 0; j < __nbVar; j++) {
246  instantiationNeeded[j] = (short int)0;
247  varDescendant[j] = (short int)0;
248  }
249 
250 
251  varDescendant[varPos] = (short int)1;
252  for (Idx modality = 0; modality < dg->node(nodeIter->element())->nbSons();
253  ++modality) {
254  if (!dg->isTerminalNode(dg->node(nodeIter->element())->son(modality))) {
255  short int* sonVarDescendant =
256  nodesVarDescendant[dg->node(nodeIter->element())->son(modality)];
257  for (Idx varIdx = 0; varIdx < __nbVar; varIdx++) {
258  varDescendant[varIdx] += sonVarDescendant[varIdx];
259  if (varDescendant[varIdx] && varIdx < varPos)
260  instantiationNeeded[varIdx] = (short int)1;
261  }
262  }
263  }
264  nodeIter = nodeIter->nextLink();
265  }
266  }
267 
268  for (auto varIter = dg->variablesSequence().beginSafe();
269  varIter != dg->variablesSequence().endSafe();
270  ++varIter) {
271  const Link< NodeId >* nodeIter = dg->varNodeListe(*varIter)->list();
272  while (nodeIter != nullptr) {
273  for (Idx modality = 0; modality < dg->node(nodeIter->element())->nbSons();
274  ++modality) {
275  NodeId sonId = dg->node(nodeIter->element())->son(modality);
276  if (!dg->isTerminalNode(sonId)) {
277  for (Idx varIdx = 0; varIdx < __nbVar; ++varIdx) {
278  if (dgInstNeed[nodeIter->element()][varIdx]
279  && nodesVarDescendant[sonId][varIdx]) {
280  dgInstNeed[sonId][varIdx] = (short int)1;
281  }
282  }
283  }
284  }
285  nodeIter = nodeIter->nextLink();
286  }
287  }
288 
289  for (HashTableIterator< NodeId, short int* > it = nodesVarDescendant.begin();
290  it != nodesVarDescendant.end();
291  ++it) {
292  DEALLOCATE(it.val(), tableSize);
293  }
294 
295  nodesVarDescendant.clear();
296  }
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __rd
The resulting function graph.
Definition: regress.h:105
#define ALLOCATE(x)
Definition: regress_tpl.h:31
#define DEALLOCATE(x, y)
Definition: regress_tpl.h:32
Idx __nbVar
The total number of variable implied in the operation.
Definition: regress.h:117
std::size_t Size
In aGrUM, hashed values are unsigned long int.
Definition: types.h:45
Size NodeId
Type for node ids.
Definition: graphElements.h:97
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ compute()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::compute ( )

Computes and builds the Function Graph that is the result of the operation.

Definition at line 101 of file regress_tpl.h.

References gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__compute(), gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG1, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG1InstantiationNeeded, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG2, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG2InstantiationNeeded, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__establishVarOrder(), gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__findRetrogradeVariables(), gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__nbVar, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__rd, gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__targetVar, ALLOCATE, DEALLOCATE, gum::O4DGContext::setDG1Node(), and gum::O4DGContext::setDG2Node().

Referenced by gum::MDDOperatorStrategy< GUM_SCALAR >::regress(), and gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::~Regress().

101  {
105 
106  Idx* varInst = nullptr;
107  if (__nbVar != 0) {
108  varInst = static_cast< Idx* >(ALLOCATE(sizeof(Idx) * __nbVar));
109  for (Idx i = 0; i < __nbVar; i++)
110  varInst[i] = (Idx)0;
111  }
112 
113  O4DGContext conti(varInst, __nbVar);
114  conti.setDG1Node(__DG1->root());
115  conti.setDG2Node(__DG2->root());
116 
117  NodeId root = __compute(conti, (Idx)0 - 1);
118  __rd->manager()->setRootNode(root);
119 
120  if (__nbVar != 0) DEALLOCATE(varInst, sizeof(Idx) * __nbVar);
121 
122  __rd->erase(*__targetVar);
123 
124  return __rd;
125  }
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __rd
The resulting function graph.
Definition: regress.h:105
HashTable< NodeId, short int *> __DG2InstantiationNeeded
Definition: regress.h:130
const DiscreteVariable * __targetVar
The variable we work on to eleminate.
Definition: regress.h:111
HashTable< NodeId, short int *> __DG1InstantiationNeeded
Table uses to know if a given node of given function graph has retrograde variables.
Definition: regress.h:129
#define ALLOCATE(x)
Definition: regress_tpl.h:31
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __DG2
The other one.
Definition: regress.h:102
#define DEALLOCATE(x, y)
Definition: regress_tpl.h:32
void __establishVarOrder()
Computes an order for the final Decision graph that will minimize the number of re exploration...
Definition: regress_tpl.h:139
void __findRetrogradeVariables(const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *dg, HashTable< NodeId, short int * > &dgInstNeed)
Establish for each node in both function graph if it has retrograde variables beneath it...
Definition: regress_tpl.h:227
NodeId __compute(O4DGContext &currentSituation, Idx lastInstVarPos)
The main recursion function.
Definition: regress_tpl.h:325
Idx __nbVar
The total number of variable implied in the operation.
Definition: regress.h:117
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * __DG1
One of the two function graphs used for the operation.
Definition: regress.h:99
Size NodeId
Type for node ids.
Definition: graphElements.h:97
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

Member Data Documentation

◆ __combine

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const COMBINEOPERATOR< GUM_SCALAR > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__combine
private

The functions to be performed on the leaves.

Definition at line 120 of file regress.h.

Referenced by gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__compute().

◆ __default

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
short int* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__default
private

◆ __DG1

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy >* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG1
private

◆ __DG1InstantiationNeeded

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
HashTable< NodeId, short int* > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG1InstantiationNeeded
private

◆ __DG2

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy >* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG2
private

◆ __DG2InstantiationNeeded

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
HashTable< NodeId, short int* > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__DG2InstantiationNeeded
private

◆ __explorationTable

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
HashTable< double, NodeId > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__explorationTable
private

The hashtable used to know if two pair of nodes have already been visited.

Definition at line 125 of file regress.h.

Referenced by gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__compute().

◆ __nbVar

◆ __neutral

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const GUM_SCALAR gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__neutral
private

The function to be performed on the leaves.

Definition at line 114 of file regress.h.

Referenced by gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__compute().

◆ __primedVars

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const Set< const DiscreteVariable* >* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__primedVars
private

◆ __project

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const PROJECTOPERATOR< GUM_SCALAR > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__project
private

◆ __rd

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy >* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__rd
private

◆ __targetVar

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const DiscreteVariable* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::__targetVar
private

The documentation for this class was generated from the following files: