aGrUM  0.20.2
a C++ library for (probabilistic) graphical models
gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy > Class Template Reference

Class used to perform Function Graph Operations in the FMDP Framework. More...

#include <agrum/tools/multidim/patterns/regress.h>

+ Collaboration diagram for gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >:

Public Member Functions

Constructors / Destructors
 Regress (const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *vfunction, const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *probDist, const Set< const DiscreteVariable * > *primedVars, const DiscreteVariable *targetVar, const GUM_SCALAR neutral)
 Default constructor. More...
 
 ~Regress ()
 Default destructor. More...
 
Main Method
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * compute ()
 Computes and builds the Function Graph that is the result of the operation. More...
 

Detailed Description

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
class gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >

Class used to perform Function Graph Operations in the FMDP Framework.

Definition at line 54 of file regress.h.

Constructor & Destructor Documentation

◆ Regress()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::Regress ( const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *  vfunction,
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *  probDist,
const Set< const DiscreteVariable * > *  primedVars,
const DiscreteVariable targetVar,
const GUM_SCALAR  neutral 
)

Default constructor.

Definition at line 48 of file regress_tpl.h.

References gum::Set< Key, Alloc >::emplace().

52  :
53  DG1__(DG1),
54  DG2__(DG2), neutral__(neutral), combine__(), project__(),
55  DG1InstantiationNeeded__(DG1->realSize(), true, false),
56  DG2InstantiationNeeded__(DG2->realSize(), true, false) {
57  GUM_CONSTRUCTOR(Regress);
60  nbVar__ = 0;
61  default__ = nullptr;
62  primedVars__ = primedVars;
63  targetVar__ = targetVar;
64  }
Idx nbVar__
The total number of variable implied in the operation.
Definition: regress.h:120
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * rd__
The resulting function graph.
Definition: regress.h:108
HashTable< NodeId, short int *> DG2InstantiationNeeded__
Definition: regress.h:133
const Set< const DiscreteVariable *> * primedVars__
The set of variables we want to keep at the end.
Definition: regress.h:111
Regress(const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *vfunction, const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *probDist, const Set< const DiscreteVariable * > *primedVars, const DiscreteVariable *targetVar, const GUM_SCALAR neutral)
Default constructor.
Definition: regress_tpl.h:48
const DiscreteVariable * targetVar__
The variable we work on to eleminate.
Definition: regress.h:114
const PROJECTOPERATOR< GUM_SCALAR > project__
Definition: regress.h:124
const GUM_SCALAR neutral__
The function to be performed on the leaves.
Definition: regress.h:117
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * DG1__
One of the two function graphs used for the operation.
Definition: regress.h:102
short int * default__
Just a computationnal trick.
Definition: regress.h:136
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * DG2__
The other one.
Definition: regress.h:105
HashTable< NodeId, short int *> DG1InstantiationNeeded__
Table uses to know if a given node of given function graph has retrograde variables.
Definition: regress.h:132
const COMBINEOPERATOR< GUM_SCALAR > combine__
The functions to be performed on the leaves.
Definition: regress.h:123
static MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * getReducedAndOrderedInstance()
Returns a reduced and ordered instance.
+ Here is the call graph for this function:

◆ ~Regress()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::~Regress ( )

Default destructor.

Definition at line 75 of file regress_tpl.h.

References gum::Set< Key, Alloc >::emplace().

75  {
76  GUM_DESTRUCTOR(Regress);
77 
78  for (auto instIter = DG1InstantiationNeeded__.beginSafe();
79  instIter != DG1InstantiationNeeded__.endSafe();
80  ++instIter)
81  DEALLOCATE(instIter.val(), sizeof(short int) * nbVar__);
82 
83  for (auto instIter = DG2InstantiationNeeded__.beginSafe();
84  instIter != DG2InstantiationNeeded__.endSafe();
85  ++instIter)
86  DEALLOCATE(instIter.val(), sizeof(short int) * nbVar__);
87 
88  if (nbVar__ != 0) DEALLOCATE(default__, sizeof(short int) * nbVar__);
89  }
Idx nbVar__
The total number of variable implied in the operation.
Definition: regress.h:120
const iterator_safe & endSafe() noexcept
Returns the safe iterator pointing to the end of the hashtable.
HashTable< NodeId, short int *> DG2InstantiationNeeded__
Definition: regress.h:133
Regress(const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *vfunction, const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *probDist, const Set< const DiscreteVariable * > *primedVars, const DiscreteVariable *targetVar, const GUM_SCALAR neutral)
Default constructor.
Definition: regress_tpl.h:48
#define DEALLOCATE(x, y)
Definition: regress_tpl.h:35
iterator_safe beginSafe()
Returns the safe iterator pointing to the beginning of the hashtable.
short int * default__
Just a computationnal trick.
Definition: regress.h:136
HashTable< NodeId, short int *> DG1InstantiationNeeded__
Table uses to know if a given node of given function graph has retrograde variables.
Definition: regress.h:132
+ Here is the call graph for this function:

Member Function Documentation

◆ compute()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::compute ( )

Computes and builds the Function Graph that is the result of the operation.

Definition at line 103 of file regress_tpl.h.

References gum::Set< Key, Alloc >::emplace().

103  {
107 
108  Idx* varInst = nullptr;
109  if (nbVar__ != 0) {
110  varInst = static_cast< Idx* >(ALLOCATE(sizeof(Idx) * nbVar__));
111  for (Idx i = 0; i < nbVar__; i++)
112  varInst[i] = (Idx)0;
113  }
114 
115  O4DGContext conti(varInst, nbVar__);
116  conti.setDG1Node(DG1__->root());
117  conti.setDG2Node(DG2__->root());
118 
119  NodeId root = compute__(conti, (Idx)0 - 1);
120  rd__->manager()->setRootNode(root);
121 
122  if (nbVar__ != 0) DEALLOCATE(varInst, sizeof(Idx) * nbVar__);
123 
124  rd__->erase(*targetVar__);
125 
126  return rd__;
127  }
Idx nbVar__
The total number of variable implied in the operation.
Definition: regress.h:120
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * rd__
The resulting function graph.
Definition: regress.h:108
NodeId compute__(O4DGContext &currentSituation, Idx lastInstVarPos)
The main recursion function.
Definition: regress_tpl.h:327
HashTable< NodeId, short int *> DG2InstantiationNeeded__
Definition: regress.h:133
#define ALLOCATE(x)
Definition: regress_tpl.h:34
const DiscreteVariable * targetVar__
The variable we work on to eleminate.
Definition: regress.h:114
void establishVarOrder__()
Computes an order for the final Decision graph that will minimize the number of re exploration...
Definition: regress_tpl.h:141
#define DEALLOCATE(x, y)
Definition: regress_tpl.h:35
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * DG1__
One of the two function graphs used for the operation.
Definition: regress.h:102
void findRetrogradeVariables__(const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *dg, HashTable< NodeId, short int * > &dgInstNeed)
Establish for each node in both function graph if it has retrograde variables beneath it...
Definition: regress_tpl.h:229
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * DG2__
The other one.
Definition: regress.h:105
HashTable< NodeId, short int *> DG1InstantiationNeeded__
Table uses to know if a given node of given function graph has retrograde variables.
Definition: regress.h:132
Size NodeId
Type for node ids.
Definition: graphElements.h:97
+ Here is the call graph for this function:

◆ compute__()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE NodeId gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::compute__ ( O4DGContext currentSituation,
Idx  lastInstVarPos 
)
private

The main recursion function.

Definition at line 327 of file regress_tpl.h.

References gum::Set< Key, Alloc >::emplace().

327  {
328  NodeId newNode = 0;
329 
330  // If both current nodes are terminal,
331  // we only have to compute the resulting value
332  if (DG1__->isTerminalNode(currentSituation.DG1Node())
333  && DG2__->isTerminalNode(currentSituation.DG2Node())) {
334  // We have to compute new valueand we insert a new node in diagram with
335  // this value, ...
336  GUM_SCALAR newVal = neutral__;
337  GUM_SCALAR tempVal = combine__(DG1__->nodeValue(currentSituation.DG1Node()),
338  DG2__->nodeValue(currentSituation.DG2Node()));
339  for (Idx targetModa = 0; targetModa < targetVar__->domainSize();
340  ++targetModa)
341  newVal = project__(newVal, tempVal);
342  return rd__->manager()->addTerminalNode(newVal);
343  }
344 
345  // If not,
346  // we'll have to do some exploration
347 
348  // First we ensure that we hadn't already visit this pair of node under hte
349  // same circumstances
350  short int* dg1NeededVar
351  = DG1InstantiationNeeded__.exists(currentSituation.DG1Node())
352  ? DG1InstantiationNeeded__[currentSituation.DG1Node()]
353  : default__;
354  Idx dg1CurrentVarPos = DG1__->isTerminalNode(currentSituation.DG1Node())
355  ? nbVar__
356  : rd__->variablesSequence().pos(
357  DG1__->node(currentSituation.DG1Node())->nodeVar());
358  short int* dg2NeededVar
359  = DG2InstantiationNeeded__.exists(currentSituation.DG2Node())
360  ? DG2InstantiationNeeded__[currentSituation.DG2Node()]
361  : default__;
362  Idx dg2CurrentVarPos = DG2__->isTerminalNode(currentSituation.DG2Node())
363  ? nbVar__
364  : rd__->variablesSequence().pos(
365  DG2__->node(currentSituation.DG2Node())->nodeVar());
366 
367  short int* instNeeded
368  = static_cast< short int* >(ALLOCATE(sizeof(short int) * nbVar__));
369 
370  for (Idx i = 0; i < nbVar__; i++) {
371  instNeeded[i] = dg1NeededVar[i] + dg2NeededVar[i];
372  }
373 
374  double curSitKey = currentSituation.key(instNeeded);
375 
376  if (explorationTable__.exists(curSitKey)) {
377  DEALLOCATE(instNeeded, sizeof(short int) * nbVar__);
378 
379  return explorationTable__[curSitKey];
380  }
381 
382  // ====================================================
383 
384  NodeId origDG1 = currentSituation.DG1Node(),
385  origDG2 = currentSituation.DG2Node();
386 
387  const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy >* leaddg
388  = nullptr;
389  NodeId leadNodeId = 0;
390  Idx leadVarPos = rd__->variablesSequence().size();
391  typedef void (O4DGContext::*SetNodeFunction)(const NodeId&);
392  SetNodeFunction leadFunction = nullptr;
393 
394  bool sameVar = false;
395 
396  if (!DG1__->isTerminalNode(currentSituation.DG1Node())) {
397  if (currentSituation.varModality(dg1CurrentVarPos) != 0) {
398  // If var associated to current node has already been instanciated, we
399  // have to jump it
400  currentSituation.setDG1Node(
401  DG1__->node(currentSituation.DG1Node())
402  ->son(currentSituation.varModality(dg1CurrentVarPos) - 1));
403 
404  newNode = compute__(currentSituation, lastInstVarPos);
405  explorationTable__.insert(curSitKey, newNode);
406  currentSituation.setDG1Node(origDG1);
407  currentSituation.setDG2Node(origDG2);
408 
409  DEALLOCATE(instNeeded, sizeof(short int) * nbVar__);
410 
411  return newNode;
412  }
413 
414  leaddg = DG1__;
415  leadNodeId = currentSituation.DG1Node();
416  leadVarPos = dg1CurrentVarPos;
417  leadFunction = &O4DGContext::setDG1Node;
418  }
419 
420  if (!DG2__->isTerminalNode(currentSituation.DG2Node())) {
421  if (currentSituation.varModality(dg2CurrentVarPos) != 0) {
422  // If var associated to current node has already been instanciated, we
423  // have to jump it
424  currentSituation.setDG2Node(
425  DG2__->node(currentSituation.DG2Node())
426  ->son(currentSituation.varModality(dg2CurrentVarPos) - 1));
427 
428  newNode = compute__(currentSituation, lastInstVarPos);
429  explorationTable__.insert(curSitKey, newNode);
430  currentSituation.setDG1Node(origDG1);
431  currentSituation.setDG2Node(origDG2);
432 
433  DEALLOCATE(instNeeded, sizeof(short int) * nbVar__);
434 
435  return newNode;
436  }
437 
438  if (leadVarPos == dg2CurrentVarPos) { sameVar = true; }
439 
440  if (leadVarPos > dg2CurrentVarPos) {
441  leaddg = DG2__;
442  leadNodeId = currentSituation.DG2Node();
443  leadVarPos = dg2CurrentVarPos;
444  leadFunction = &O4DGContext::setDG2Node;
445  }
446  }
447 
448  // ====================================================
449  // Anticipated Exploration
450 
451  // Before exploring nodes, we have to ensure that every anticipated
452  // exploration is done
453  for (Idx varPos = lastInstVarPos + 1; varPos < leadVarPos; ++varPos) {
454  if (instNeeded[varPos]) {
455  const DiscreteVariable* curVar = rd__->variablesSequence().atPos(varPos);
456  NodeId* sonsIds = static_cast< NodeId* >(
457  ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
458 
459  for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
460  currentSituation.chgVarModality(varPos, modality + 1);
461 
462  sonsIds[modality] = compute__(currentSituation, varPos);
463  }
464 
465  newNode = rd__->manager()->addInternalNode(curVar, sonsIds);
466 
467  explorationTable__.insert(curSitKey, newNode);
468  currentSituation.chgVarModality(varPos, 0);
469  currentSituation.setDG1Node(origDG1);
470  currentSituation.setDG2Node(origDG2);
471 
472  DEALLOCATE(instNeeded, sizeof(short int) * nbVar__);
473 
474  return newNode;
475  }
476  }
477 
478  // ====================================================
479  // Terminal Exploration
480  if (sameVar && DG1__->node(origDG1)->nodeVar() == targetVar__) {
481  GUM_SCALAR newVal = neutral__;
482  for (Idx targetModa = 0; targetModa < targetVar__->domainSize();
483  ++targetModa)
484  newVal = project__(
485  newVal,
486  combine__(DG1__->nodeValue(DG1__->node(origDG1)->son(targetModa)),
487  DG2__->nodeValue(DG2__->node(origDG2)->son(targetModa))));
488  newNode = rd__->manager()->addTerminalNode(newVal);
489  explorationTable__.insert(curSitKey, newNode);
490  DEALLOCATE(instNeeded, sizeof(short int) * nbVar__);
491  return newNode;
492  }
493  if (DG1__->isTerminalNode(origDG1)) {
494  if (DG2__->node(origDG2)->nodeVar() == targetVar__) {
495  GUM_SCALAR newVal = neutral__;
496  for (Idx targetModa = 0; targetModa < targetVar__->domainSize();
497  ++targetModa)
498  newVal = project__(
499  newVal,
500  combine__(DG1__->nodeValue(origDG1),
501  DG2__->nodeValue(DG2__->node(origDG2)->son(targetModa))));
502  newNode = rd__->manager()->addTerminalNode(newVal);
503  explorationTable__.insert(curSitKey, newNode);
504  DEALLOCATE(instNeeded, sizeof(short int) * nbVar__);
505  return newNode;
506  }
507  } else {
508  if (DG1__->node(origDG1)->nodeVar() == targetVar__
509  && DG2__->isTerminalNode(origDG2)) {
510  GUM_SCALAR newVal = neutral__;
511  for (Idx targetModa = 0; targetModa < targetVar__->domainSize();
512  ++targetModa)
513  newVal = project__(
514  newVal,
515  combine__(DG1__->nodeValue(DG1__->node(origDG1)->son(targetModa)),
516  DG2__->nodeValue(origDG2)));
517  newNode = rd__->manager()->addTerminalNode(newVal);
518  explorationTable__.insert(curSitKey, newNode);
519  DEALLOCATE(instNeeded, sizeof(short int) * nbVar__);
520  return newNode;
521  }
522  }
523 
524  // ====================================================
525  // Normal Exploration
526 
527  // If only one of the current node is terminal,
528  // we have to pursue deeper on the other diagram
529  if (sameVar) {
530  // If so - meaning it's the same variable - we have to go
531  // down on both
532  const InternalNode* dg1Node = DG1__->node(origDG1);
533  const InternalNode* dg2Node = DG2__->node(origDG2);
534 
535  const DiscreteVariable* curVar = dg1Node->nodeVar();
536  Idx varPos = rd__->variablesSequence().pos(curVar);
537  NodeId* sonsIds
538  = static_cast< NodeId* >(ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
539 
540  for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
541  currentSituation.chgVarModality(varPos, modality + 1);
542  currentSituation.setDG1Node(dg1Node->son(modality));
543  currentSituation.setDG2Node(dg2Node->son(modality));
544 
545  sonsIds[modality] = compute__(currentSituation, varPos);
546  }
547 
548  newNode = rd__->manager()->addInternalNode(curVar, sonsIds);
549 
550  explorationTable__.insert(curSitKey, newNode);
551  currentSituation.chgVarModality(varPos, 0);
552  currentSituation.setDG1Node(origDG1);
553  currentSituation.setDG2Node(origDG2);
554 
555  DEALLOCATE(instNeeded, sizeof(short int) * nbVar__);
556 
557  return newNode;
558  }
559  // ====================================================
560  else {
561  const InternalNode* leaddgNode = leaddg->node(leadNodeId);
562 
563  const DiscreteVariable* curVar = leaddgNode->nodeVar();
564  NodeId* sonsIds
565  = static_cast< NodeId* >(ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
566 
567  for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
568  currentSituation.chgVarModality(leadVarPos, modality + 1);
569  (currentSituation.*leadFunction)(leaddgNode->son(modality));
570 
571  sonsIds[modality] = compute__(currentSituation, leadVarPos);
572  }
573 
574  newNode = rd__->manager()->addInternalNode(curVar, sonsIds);
575 
576  explorationTable__.insert(curSitKey, newNode);
577  currentSituation.chgVarModality(leadVarPos, 0);
578  currentSituation.setDG1Node(origDG1);
579  currentSituation.setDG2Node(origDG2);
580 
581  DEALLOCATE(instNeeded, sizeof(short int) * nbVar__);
582 
583  return newNode;
584  }
585  }
Idx nbVar__
The total number of variable implied in the operation.
Definition: regress.h:120
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * rd__
The resulting function graph.
Definition: regress.h:108
NodeId compute__(O4DGContext &currentSituation, Idx lastInstVarPos)
The main recursion function.
Definition: regress_tpl.h:327
HashTable< NodeId, short int *> DG2InstantiationNeeded__
Definition: regress.h:133
bool exists(const Key &key) const
Checks whether there exists an element with a given key in the hashtable.
#define ALLOCATE(x)
Definition: regress_tpl.h:34
HashTable< double, NodeId > explorationTable__
The hashtable used to know if two pair of nodes have already been visited.
Definition: regress.h:128
virtual Size domainSize() const =0
const DiscreteVariable * targetVar__
The variable we work on to eleminate.
Definition: regress.h:114
#define DEALLOCATE(x, y)
Definition: regress_tpl.h:35
void setDG1Node(const NodeId &)
Set DG1 diagram current explored Node.
const PROJECTOPERATOR< GUM_SCALAR > project__
Definition: regress.h:124
const GUM_SCALAR neutral__
The function to be performed on the leaves.
Definition: regress.h:117
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * DG1__
One of the two function graphs used for the operation.
Definition: regress.h:102
void setDG2Node(const NodeId &)
Set DG2 diagram current explored Node.
short int * default__
Just a computationnal trick.
Definition: regress.h:136
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * DG2__
The other one.
Definition: regress.h:105
HashTable< NodeId, short int *> DG1InstantiationNeeded__
Table uses to know if a given node of given function graph has retrograde variables.
Definition: regress.h:132
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
const COMBINEOPERATOR< GUM_SCALAR > combine__
The functions to be performed on the leaves.
Definition: regress.h:123
Size NodeId
Type for node ids.
Definition: graphElements.h:97
+ Here is the call graph for this function:

◆ establishVarOrder__()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE void gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::establishVarOrder__ ( )
private

Computes an order for the final Decision graph that will minimize the number of re exploration.

Definition at line 141 of file regress_tpl.h.

References gum::Set< Key, Alloc >::emplace().

141  {
142  SequenceIteratorSafe< const DiscreteVariable* > fite
143  = DG1__->variablesSequence().beginSafe();
144  SequenceIteratorSafe< const DiscreteVariable* > site
145  = DG2__->variablesSequence().beginSafe();
146 
147  while (fite != DG1__->variablesSequence().endSafe()
148  && site != DG2__->variablesSequence().endSafe()) {
149  // Test : if var from first order is already in final order
150  // we move onto the next one
151  if (rd__->variablesSequence().exists(*fite)) {
152  ++fite;
153  continue;
154  }
155 
156  // Test : if var from second order is already in final order
157  // we move onto the next one
158  if (rd__->variablesSequence().exists(*site)) {
159  ++site;
160  continue;
161  }
162 
163  // Test : is current var of the first order present in the second order.
164  // if not we add it to final order
165  if (!DG2__->variablesSequence().exists(*fite)
166  && !primedVars__->exists(*fite)) {
167  rd__->add(**fite);
168  ++fite;
169  continue;
170  }
171 
172  // Test : is current var of the second order present in the first order.
173  // if not we add it to final order
174  if (!DG1__->variablesSequence().exists(*site)
175  && !primedVars__->exists(*site)) {
176  rd__->add(**site);
177  ++site;
178  continue;
179  }
180 
181  // Test : is current var of the second order present in the first order.
182  // if not we add it to final order
183  if (*fite == *site) {
184  rd__->add(**fite);
185  ++fite;
186  ++site;
187  continue;
188  }
189 
190  // Test : if chosing first order var cost less in terms or re exploration,
191  // we chose it
192  rd__->add(**fite);
193  ++fite;
194  }
195 
196  // Whenever an iterator has finished its sequence,
197  // the other may still be in the middle of its one.
198  // Hence, this part ensures that any variables remaining
199  // will be added to the final sequence if needed.
200  if (fite == DG1__->variablesSequence().endSafe()) {
201  for (; site != DG2__->variablesSequence().endSafe(); ++site)
202  if (!rd__->variablesSequence().exists(*site)) rd__->add(**site);
203  } else {
204  for (; fite != DG1__->variablesSequence().endSafe(); ++fite)
205  if (!rd__->variablesSequence().exists(*fite)) rd__->add(**fite);
206  }
207 
208  // Various initialization needed now that we have a bigger picture
209  nbVar__ = rd__->variablesSequence().size();
210 
211  if (nbVar__ != 0) {
212  default__ = static_cast< short int* >(ALLOCATE(sizeof(short int) * nbVar__));
213  for (Idx i = 0; i < nbVar__; i++)
214  default__[i] = (short int)0;
215  }
216  }
Idx nbVar__
The total number of variable implied in the operation.
Definition: regress.h:120
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * rd__
The resulting function graph.
Definition: regress.h:108
const Set< const DiscreteVariable *> * primedVars__
The set of variables we want to keep at the end.
Definition: regress.h:111
#define ALLOCATE(x)
Definition: regress_tpl.h:34
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * DG1__
One of the two function graphs used for the operation.
Definition: regress.h:102
short int * default__
Just a computationnal trick.
Definition: regress.h:136
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * DG2__
The other one.
Definition: regress.h:105
+ Here is the call graph for this function:

◆ findRetrogradeVariables__()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE void gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::findRetrogradeVariables__ ( const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *  dg,
HashTable< NodeId, short int * > &  dgInstNeed 
)
private

Establish for each node in both function graph if it has retrograde variables beneath it.

Definition at line 229 of file regress_tpl.h.

References gum::Set< Key, Alloc >::emplace().

231  {
232  HashTable< NodeId, short int* > nodesVarDescendant;
233  Size tableSize = Size(nbVar__ * sizeof(short int));
234 
235  for (auto varIter = dg->variablesSequence().rbeginSafe();
236  varIter != dg->variablesSequence().rendSafe();
237  --varIter) {
238  Idx varPos = rd__->variablesSequence().pos(*varIter);
239 
240  const Link< NodeId >* nodeIter = dg->varNodeListe(*varIter)->list();
241  while (nodeIter != nullptr) {
242  short int* instantiationNeeded
243  = static_cast< short int* >(ALLOCATE(tableSize));
244  dgInstNeed.insert(nodeIter->element(), instantiationNeeded);
245  short int* varDescendant = static_cast< short int* >(ALLOCATE(tableSize));
246  nodesVarDescendant.insert(nodeIter->element(), varDescendant);
247  for (Idx j = 0; j < nbVar__; j++) {
248  instantiationNeeded[j] = (short int)0;
249  varDescendant[j] = (short int)0;
250  }
251 
252 
253  varDescendant[varPos] = (short int)1;
254  for (Idx modality = 0; modality < dg->node(nodeIter->element())->nbSons();
255  ++modality) {
256  if (!dg->isTerminalNode(dg->node(nodeIter->element())->son(modality))) {
257  short int* sonVarDescendant
258  = nodesVarDescendant[dg->node(nodeIter->element())->son(modality)];
259  for (Idx varIdx = 0; varIdx < nbVar__; varIdx++) {
260  varDescendant[varIdx] += sonVarDescendant[varIdx];
261  if (varDescendant[varIdx] && varIdx < varPos)
262  instantiationNeeded[varIdx] = (short int)1;
263  }
264  }
265  }
266  nodeIter = nodeIter->nextLink();
267  }
268  }
269 
270  for (auto varIter = dg->variablesSequence().beginSafe();
271  varIter != dg->variablesSequence().endSafe();
272  ++varIter) {
273  const Link< NodeId >* nodeIter = dg->varNodeListe(*varIter)->list();
274  while (nodeIter != nullptr) {
275  for (Idx modality = 0; modality < dg->node(nodeIter->element())->nbSons();
276  ++modality) {
277  NodeId sonId = dg->node(nodeIter->element())->son(modality);
278  if (!dg->isTerminalNode(sonId)) {
279  for (Idx varIdx = 0; varIdx < nbVar__; ++varIdx) {
280  if (dgInstNeed[nodeIter->element()][varIdx]
281  && nodesVarDescendant[sonId][varIdx]) {
282  dgInstNeed[sonId][varIdx] = (short int)1;
283  }
284  }
285  }
286  }
287  nodeIter = nodeIter->nextLink();
288  }
289  }
290 
291  for (HashTableIterator< NodeId, short int* > it = nodesVarDescendant.begin();
292  it != nodesVarDescendant.end();
293  ++it) {
294  DEALLOCATE(it.val(), tableSize);
295  }
296 
297  nodesVarDescendant.clear();
298  }
Idx nbVar__
The total number of variable implied in the operation.
Definition: regress.h:120
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * rd__
The resulting function graph.
Definition: regress.h:108
#define ALLOCATE(x)
Definition: regress_tpl.h:34
#define DEALLOCATE(x, y)
Definition: regress_tpl.h:35
std::size_t Size
In aGrUM, hashed values are unsigned long int.
Definition: types.h:47
Size NodeId
Type for node ids.
Definition: graphElements.h:97
+ Here is the call graph for this function:

Member Data Documentation

◆ combine__

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const COMBINEOPERATOR< GUM_SCALAR > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::combine__
private

The functions to be performed on the leaves.

Definition at line 123 of file regress.h.

◆ default__

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
short int* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::default__
private

Just a computationnal trick.

Definition at line 136 of file regress.h.

◆ DG1__

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy >* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::DG1__
private

One of the two function graphs used for the operation.

Definition at line 102 of file regress.h.

◆ DG1InstantiationNeeded__

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
HashTable< NodeId, short int* > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::DG1InstantiationNeeded__
private

Table uses to know if a given node of given function graph has retrograde variables.

Definition at line 132 of file regress.h.

◆ DG2__

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy >* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::DG2__
private

The other one.

Definition at line 105 of file regress.h.

◆ DG2InstantiationNeeded__

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
HashTable< NodeId, short int* > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::DG2InstantiationNeeded__
private

Definition at line 133 of file regress.h.

◆ explorationTable__

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
HashTable< double, NodeId > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::explorationTable__
private

The hashtable used to know if two pair of nodes have already been visited.

Definition at line 128 of file regress.h.

◆ nbVar__

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
Idx gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::nbVar__
private

The total number of variable implied in the operation.

Definition at line 120 of file regress.h.

◆ neutral__

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const GUM_SCALAR gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::neutral__
private

The function to be performed on the leaves.

Definition at line 117 of file regress.h.

◆ primedVars__

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const Set< const DiscreteVariable* >* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::primedVars__
private

The set of variables we want to keep at the end.

Definition at line 111 of file regress.h.

◆ project__

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const PROJECTOPERATOR< GUM_SCALAR > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::project__
private

Definition at line 124 of file regress.h.

◆ rd__

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy >* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::rd__
private

The resulting function graph.

Definition at line 108 of file regress.h.

◆ targetVar__

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const DiscreteVariable* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::targetVar__
private

The variable we work on to eleminate.

Definition at line 114 of file regress.h.


The documentation for this class was generated from the following files: