aGrUM  0.20.3
a C++ library for (probabilistic) graphical models
gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy > Class Template Reference

Class used to perform Function Graph Operations in the FMDP Framework. More...

#include <agrum/tools/multidim/patterns/regress.h>

+ Collaboration diagram for gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >:

Public Member Functions

Constructors / Destructors
 Regress (const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *vfunction, const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *probDist, const Set< const DiscreteVariable * > *primedVars, const DiscreteVariable *targetVar, const GUM_SCALAR neutral)
 Default constructor. More...
 
 ~Regress ()
 Default destructor. More...
 
Main Method
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * compute ()
 Computes and builds the Function Graph that is the result of the operation. More...
 

Detailed Description

template<typename GUM_SCALAR, template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
class gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >

Class used to perform Function Graph Operations in the FMDP Framework.

Definition at line 53 of file regress.h.

Constructor & Destructor Documentation

◆ Regress()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::Regress ( const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *  vfunction,
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *  probDist,
const Set< const DiscreteVariable * > *  primedVars,
const DiscreteVariable targetVar,
const GUM_SCALAR  neutral 
)

Default constructor.

Definition at line 46 of file regress_tpl.h.

References gum::Set< Key, Alloc >::emplace().

51  :
52  _DG1_(DG1),
53  _DG2_(DG2), _neutral_(neutral), _combine_(), _project_(),
54  _DG1InstantiationNeeded_(DG1->realSize(), true, false),
55  _DG2InstantiationNeeded_(DG2->realSize(), true, false) {
56  GUM_CONSTRUCTOR(Regress);
58  _nbVar_ = 0;
59  _default_ = nullptr;
60  _primedVars_ = primedVars;
61  _targetVar_ = targetVar;
62  }
const Set< const DiscreteVariable *> * _primedVars_
The set of variables we want to keep at the end.
Definition: regress.h:109
const PROJECTOPERATOR< GUM_SCALAR > _project_
Definition: regress.h:122
HashTable< NodeId, short int *> _DG1InstantiationNeeded_
Table uses to know if a given node of given function graph has retrograde variables.
Definition: regress.h:130
Regress(const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *vfunction, const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *probDist, const Set< const DiscreteVariable * > *primedVars, const DiscreteVariable *targetVar, const GUM_SCALAR neutral)
Default constructor.
Definition: regress_tpl.h:46
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _rd_
The resulting function graph.
Definition: regress.h:106
const COMBINEOPERATOR< GUM_SCALAR > _combine_
The functions to be performed on the leaves.
Definition: regress.h:121
short int * _default_
Just a computationnal trick.
Definition: regress.h:134
Idx _nbVar_
The total number of variable implied in the operation.
Definition: regress.h:118
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _DG1_
One of the two function graphs used for the operation.
Definition: regress.h:100
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _DG2_
The other one.
Definition: regress.h:103
const DiscreteVariable * _targetVar_
The variable we work on to eleminate.
Definition: regress.h:112
HashTable< NodeId, short int *> _DG2InstantiationNeeded_
Definition: regress.h:131
const GUM_SCALAR _neutral_
The function to be performed on the leaves.
Definition: regress.h:115
static MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * getReducedAndOrderedInstance()
Returns a reduced and ordered instance.
+ Here is the call graph for this function:

◆ ~Regress()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::~Regress ( )

Default destructor.

Definition at line 71 of file regress_tpl.h.

References gum::Set< Key, Alloc >::emplace().

71  {
72  GUM_DESTRUCTOR(Regress);
73 
74  for (auto instIter = _DG1InstantiationNeeded_.beginSafe();
75  instIter != _DG1InstantiationNeeded_.endSafe();
76  ++instIter)
77  DEALLOCATE(instIter.val(), sizeof(short int) * _nbVar_);
78 
79  for (auto instIter = _DG2InstantiationNeeded_.beginSafe();
80  instIter != _DG2InstantiationNeeded_.endSafe();
81  ++instIter)
82  DEALLOCATE(instIter.val(), sizeof(short int) * _nbVar_);
83 
84  if (_nbVar_ != 0) DEALLOCATE(_default_, sizeof(short int) * _nbVar_);
85  }
const iterator_safe & endSafe() noexcept
Returns the safe iterator pointing to the end of the hashtable.
HashTable< NodeId, short int *> _DG1InstantiationNeeded_
Table uses to know if a given node of given function graph has retrograde variables.
Definition: regress.h:130
Regress(const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *vfunction, const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *probDist, const Set< const DiscreteVariable * > *primedVars, const DiscreteVariable *targetVar, const GUM_SCALAR neutral)
Default constructor.
Definition: regress_tpl.h:46
#define DEALLOCATE(x, y)
Definition: regress_tpl.h:35
short int * _default_
Just a computationnal trick.
Definition: regress.h:134
Idx _nbVar_
The total number of variable implied in the operation.
Definition: regress.h:118
iterator_safe beginSafe()
Returns the safe iterator pointing to the beginning of the hashtable.
HashTable< NodeId, short int *> _DG2InstantiationNeeded_
Definition: regress.h:131
+ Here is the call graph for this function:

Member Function Documentation

◆ _compute_()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE NodeId gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_compute_ ( O4DGContext currentSituation,
Idx  lastInstVarPos 
)
private

The main recursion function.

Definition at line 310 of file regress_tpl.h.

References gum::Set< Key, Alloc >::emplace().

312  {
313  NodeId newNode = 0;
314 
315  // If both current nodes are terminal,
316  // we only have to compute the resulting value
317  if (_DG1_->isTerminalNode(currentSituation.DG1Node())
318  && _DG2_->isTerminalNode(currentSituation.DG2Node())) {
319  // We have to compute new valueand we insert a new node in diagram with
320  // this value, ...
321  GUM_SCALAR newVal = _neutral_;
322  GUM_SCALAR tempVal = _combine_(_DG1_->nodeValue(currentSituation.DG1Node()),
323  _DG2_->nodeValue(currentSituation.DG2Node()));
324  for (Idx targetModa = 0; targetModa < _targetVar_->domainSize(); ++targetModa)
325  newVal = _project_(newVal, tempVal);
326  return _rd_->manager()->addTerminalNode(newVal);
327  }
328 
329  // If not,
330  // we'll have to do some exploration
331 
332  // First we ensure that we hadn't already visit this pair of node under hte
333  // same circumstances
334  short int* dg1NeededVar = _DG1InstantiationNeeded_.exists(currentSituation.DG1Node())
335  ? _DG1InstantiationNeeded_[currentSituation.DG1Node()]
336  : _default_;
337  Idx dg1CurrentVarPos
338  = _DG1_->isTerminalNode(currentSituation.DG1Node())
339  ? _nbVar_
340  : _rd_->variablesSequence().pos(_DG1_->node(currentSituation.DG1Node())->nodeVar());
341  short int* dg2NeededVar = _DG2InstantiationNeeded_.exists(currentSituation.DG2Node())
342  ? _DG2InstantiationNeeded_[currentSituation.DG2Node()]
343  : _default_;
344  Idx dg2CurrentVarPos
345  = _DG2_->isTerminalNode(currentSituation.DG2Node())
346  ? _nbVar_
347  : _rd_->variablesSequence().pos(_DG2_->node(currentSituation.DG2Node())->nodeVar());
348 
349  short int* instNeeded = static_cast< short int* >(ALLOCATE(sizeof(short int) * _nbVar_));
350 
351  for (Idx i = 0; i < _nbVar_; i++) {
352  instNeeded[i] = dg1NeededVar[i] + dg2NeededVar[i];
353  }
354 
355  double curSitKey = currentSituation.key(instNeeded);
356 
357  if (_explorationTable_.exists(curSitKey)) {
358  DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
359 
360  return _explorationTable_[curSitKey];
361  }
362 
363  // ====================================================
364 
365  NodeId origDG1 = currentSituation.DG1Node(), origDG2 = currentSituation.DG2Node();
366 
367  const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy >* leaddg = nullptr;
368  NodeId leadNodeId = 0;
369  Idx leadVarPos = _rd_->variablesSequence().size();
370  typedef void (O4DGContext::*SetNodeFunction)(const NodeId&);
371  SetNodeFunction leadFunction = nullptr;
372 
373  bool sameVar = false;
374 
375  if (!_DG1_->isTerminalNode(currentSituation.DG1Node())) {
376  if (currentSituation.varModality(dg1CurrentVarPos) != 0) {
377  // If var associated to current node has already been instanciated, we
378  // have to jump it
379  currentSituation.setDG1Node(_DG1_->node(currentSituation.DG1Node())
380  ->son(currentSituation.varModality(dg1CurrentVarPos) - 1));
381 
382  newNode = _compute_(currentSituation, lastInstVarPos);
383  _explorationTable_.insert(curSitKey, newNode);
384  currentSituation.setDG1Node(origDG1);
385  currentSituation.setDG2Node(origDG2);
386 
387  DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
388 
389  return newNode;
390  }
391 
392  leaddg = _DG1_;
393  leadNodeId = currentSituation.DG1Node();
394  leadVarPos = dg1CurrentVarPos;
395  leadFunction = &O4DGContext::setDG1Node;
396  }
397 
398  if (!_DG2_->isTerminalNode(currentSituation.DG2Node())) {
399  if (currentSituation.varModality(dg2CurrentVarPos) != 0) {
400  // If var associated to current node has already been instanciated, we
401  // have to jump it
402  currentSituation.setDG2Node(_DG2_->node(currentSituation.DG2Node())
403  ->son(currentSituation.varModality(dg2CurrentVarPos) - 1));
404 
405  newNode = _compute_(currentSituation, lastInstVarPos);
406  _explorationTable_.insert(curSitKey, newNode);
407  currentSituation.setDG1Node(origDG1);
408  currentSituation.setDG2Node(origDG2);
409 
410  DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
411 
412  return newNode;
413  }
414 
415  if (leadVarPos == dg2CurrentVarPos) { sameVar = true; }
416 
417  if (leadVarPos > dg2CurrentVarPos) {
418  leaddg = _DG2_;
419  leadNodeId = currentSituation.DG2Node();
420  leadVarPos = dg2CurrentVarPos;
421  leadFunction = &O4DGContext::setDG2Node;
422  }
423  }
424 
425  // ====================================================
426  // Anticipated Exploration
427 
428  // Before exploring nodes, we have to ensure that every anticipated
429  // exploration is done
430  for (Idx varPos = lastInstVarPos + 1; varPos < leadVarPos; ++varPos) {
431  if (instNeeded[varPos]) {
432  const DiscreteVariable* curVar = _rd_->variablesSequence().atPos(varPos);
433  NodeId* sonsIds = static_cast< NodeId* >(ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
434 
435  for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
436  currentSituation.chgVarModality(varPos, modality + 1);
437 
438  sonsIds[modality] = _compute_(currentSituation, varPos);
439  }
440 
441  newNode = _rd_->manager()->addInternalNode(curVar, sonsIds);
442 
443  _explorationTable_.insert(curSitKey, newNode);
444  currentSituation.chgVarModality(varPos, 0);
445  currentSituation.setDG1Node(origDG1);
446  currentSituation.setDG2Node(origDG2);
447 
448  DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
449 
450  return newNode;
451  }
452  }
453 
454  // ====================================================
455  // Terminal Exploration
456  if (sameVar && _DG1_->node(origDG1)->nodeVar() == _targetVar_) {
457  GUM_SCALAR newVal = _neutral_;
458  for (Idx targetModa = 0; targetModa < _targetVar_->domainSize(); ++targetModa)
459  newVal = _project_(newVal,
460  _combine_(_DG1_->nodeValue(_DG1_->node(origDG1)->son(targetModa)),
461  _DG2_->nodeValue(_DG2_->node(origDG2)->son(targetModa))));
462  newNode = _rd_->manager()->addTerminalNode(newVal);
463  _explorationTable_.insert(curSitKey, newNode);
464  DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
465  return newNode;
466  }
467  if (_DG1_->isTerminalNode(origDG1)) {
468  if (_DG2_->node(origDG2)->nodeVar() == _targetVar_) {
469  GUM_SCALAR newVal = _neutral_;
470  for (Idx targetModa = 0; targetModa < _targetVar_->domainSize(); ++targetModa)
471  newVal = _project_(newVal,
472  _combine_(_DG1_->nodeValue(origDG1),
473  _DG2_->nodeValue(_DG2_->node(origDG2)->son(targetModa))));
474  newNode = _rd_->manager()->addTerminalNode(newVal);
475  _explorationTable_.insert(curSitKey, newNode);
476  DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
477  return newNode;
478  }
479  } else {
480  if (_DG1_->node(origDG1)->nodeVar() == _targetVar_ && _DG2_->isTerminalNode(origDG2)) {
481  GUM_SCALAR newVal = _neutral_;
482  for (Idx targetModa = 0; targetModa < _targetVar_->domainSize(); ++targetModa)
483  newVal = _project_(newVal,
484  _combine_(_DG1_->nodeValue(_DG1_->node(origDG1)->son(targetModa)),
485  _DG2_->nodeValue(origDG2)));
486  newNode = _rd_->manager()->addTerminalNode(newVal);
487  _explorationTable_.insert(curSitKey, newNode);
488  DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
489  return newNode;
490  }
491  }
492 
493  // ====================================================
494  // Normal Exploration
495 
496  // If only one of the current node is terminal,
497  // we have to pursue deeper on the other diagram
498  if (sameVar) {
499  // If so - meaning it's the same variable - we have to go
500  // down on both
501  const InternalNode* dg1Node = _DG1_->node(origDG1);
502  const InternalNode* dg2Node = _DG2_->node(origDG2);
503 
504  const DiscreteVariable* curVar = dg1Node->nodeVar();
505  Idx varPos = _rd_->variablesSequence().pos(curVar);
506  NodeId* sonsIds = static_cast< NodeId* >(ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
507 
508  for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
509  currentSituation.chgVarModality(varPos, modality + 1);
510  currentSituation.setDG1Node(dg1Node->son(modality));
511  currentSituation.setDG2Node(dg2Node->son(modality));
512 
513  sonsIds[modality] = _compute_(currentSituation, varPos);
514  }
515 
516  newNode = _rd_->manager()->addInternalNode(curVar, sonsIds);
517 
518  _explorationTable_.insert(curSitKey, newNode);
519  currentSituation.chgVarModality(varPos, 0);
520  currentSituation.setDG1Node(origDG1);
521  currentSituation.setDG2Node(origDG2);
522 
523  DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
524 
525  return newNode;
526  }
527  // ====================================================
528  else {
529  const InternalNode* leaddgNode = leaddg->node(leadNodeId);
530 
531  const DiscreteVariable* curVar = leaddgNode->nodeVar();
532  NodeId* sonsIds = static_cast< NodeId* >(ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
533 
534  for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
535  currentSituation.chgVarModality(leadVarPos, modality + 1);
536  (currentSituation.*leadFunction)(leaddgNode->son(modality));
537 
538  sonsIds[modality] = _compute_(currentSituation, leadVarPos);
539  }
540 
541  newNode = _rd_->manager()->addInternalNode(curVar, sonsIds);
542 
543  _explorationTable_.insert(curSitKey, newNode);
544  currentSituation.chgVarModality(leadVarPos, 0);
545  currentSituation.setDG1Node(origDG1);
546  currentSituation.setDG2Node(origDG2);
547 
548  DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
549 
550  return newNode;
551  }
552  }
const PROJECTOPERATOR< GUM_SCALAR > _project_
Definition: regress.h:122
bool exists(const Key &key) const
Checks whether there exists an element with a given key in the hashtable.
NodeId _compute_(O4DGContext &currentSituation, Idx lastInstVarPos)
The main recursion function.
Definition: regress_tpl.h:310
HashTable< NodeId, short int *> _DG1InstantiationNeeded_
Table uses to know if a given node of given function graph has retrograde variables.
Definition: regress.h:130
#define ALLOCATE(x)
Definition: regress_tpl.h:34
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _rd_
The resulting function graph.
Definition: regress.h:106
const COMBINEOPERATOR< GUM_SCALAR > _combine_
The functions to be performed on the leaves.
Definition: regress.h:121
virtual Size domainSize() const =0
#define DEALLOCATE(x, y)
Definition: regress_tpl.h:35
void setDG1Node(const NodeId &)
Set DG1 diagram current explored Node.
HashTable< double, NodeId > _explorationTable_
The hashtable used to know if two pair of nodes have already been visited.
Definition: regress.h:126
short int * _default_
Just a computationnal trick.
Definition: regress.h:134
void setDG2Node(const NodeId &)
Set DG2 diagram current explored Node.
Idx _nbVar_
The total number of variable implied in the operation.
Definition: regress.h:118
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _DG1_
One of the two function graphs used for the operation.
Definition: regress.h:100
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _DG2_
The other one.
Definition: regress.h:103
const DiscreteVariable * _targetVar_
The variable we work on to eleminate.
Definition: regress.h:112
HashTable< NodeId, short int *> _DG2InstantiationNeeded_
Definition: regress.h:131
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
Size NodeId
Type for node ids.
Definition: graphElements.h:97
const GUM_SCALAR _neutral_
The function to be performed on the leaves.
Definition: regress.h:115
+ Here is the call graph for this function:

◆ _establishVarOrder_()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE void gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_establishVarOrder_ ( )
private

Computes an order for the final Decision graph that will minimize the number of re exploration.

Definition at line 135 of file regress_tpl.h.

References gum::Set< Key, Alloc >::emplace().

135  {
136  SequenceIteratorSafe< const DiscreteVariable* > fite = _DG1_->variablesSequence().beginSafe();
137  SequenceIteratorSafe< const DiscreteVariable* > site = _DG2_->variablesSequence().beginSafe();
138 
139  while (fite != _DG1_->variablesSequence().endSafe()
140  && site != _DG2_->variablesSequence().endSafe()) {
141  // Test : if var from first order is already in final order
142  // we move onto the next one
143  if (_rd_->variablesSequence().exists(*fite)) {
144  ++fite;
145  continue;
146  }
147 
148  // Test : if var from second order is already in final order
149  // we move onto the next one
150  if (_rd_->variablesSequence().exists(*site)) {
151  ++site;
152  continue;
153  }
154 
155  // Test : is current var of the first order present in the second order.
156  // if not we add it to final order
157  if (!_DG2_->variablesSequence().exists(*fite) && !_primedVars_->exists(*fite)) {
158  _rd_->add(**fite);
159  ++fite;
160  continue;
161  }
162 
163  // Test : is current var of the second order present in the first order.
164  // if not we add it to final order
165  if (!_DG1_->variablesSequence().exists(*site) && !_primedVars_->exists(*site)) {
166  _rd_->add(**site);
167  ++site;
168  continue;
169  }
170 
171  // Test : is current var of the second order present in the first order.
172  // if not we add it to final order
173  if (*fite == *site) {
174  _rd_->add(**fite);
175  ++fite;
176  ++site;
177  continue;
178  }
179 
180  // Test : if chosing first order var cost less in terms or re exploration,
181  // we chose it
182  _rd_->add(**fite);
183  ++fite;
184  }
185 
186  // Whenever an iterator has finished its sequence,
187  // the other may still be in the middle of its one.
188  // Hence, this part ensures that any variables remaining
189  // will be added to the final sequence if needed.
190  if (fite == _DG1_->variablesSequence().endSafe()) {
191  for (; site != _DG2_->variablesSequence().endSafe(); ++site)
192  if (!_rd_->variablesSequence().exists(*site)) _rd_->add(**site);
193  } else {
194  for (; fite != _DG1_->variablesSequence().endSafe(); ++fite)
195  if (!_rd_->variablesSequence().exists(*fite)) _rd_->add(**fite);
196  }
197 
198  // Various initialization needed now that we have a bigger picture
199  _nbVar_ = _rd_->variablesSequence().size();
200 
201  if (_nbVar_ != 0) {
202  _default_ = static_cast< short int* >(ALLOCATE(sizeof(short int) * _nbVar_));
203  for (Idx i = 0; i < _nbVar_; i++)
204  _default_[i] = (short int)0;
205  }
206  }
const Set< const DiscreteVariable *> * _primedVars_
The set of variables we want to keep at the end.
Definition: regress.h:109
#define ALLOCATE(x)
Definition: regress_tpl.h:34
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _rd_
The resulting function graph.
Definition: regress.h:106
short int * _default_
Just a computationnal trick.
Definition: regress.h:134
Idx _nbVar_
The total number of variable implied in the operation.
Definition: regress.h:118
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _DG1_
One of the two function graphs used for the operation.
Definition: regress.h:100
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _DG2_
The other one.
Definition: regress.h:103
+ Here is the call graph for this function:

◆ _findRetrogradeVariables_()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE void gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_findRetrogradeVariables_ ( const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *  dg,
HashTable< NodeId, short int * > &  dgInstNeed 
)
private

Establish for each node in both function graph if it has retrograde variables beneath it.

Definition at line 218 of file regress_tpl.h.

References gum::Set< Key, Alloc >::emplace().

219  {
220  HashTable< NodeId, short int* > nodesVarDescendant;
221  Size tableSize = Size(_nbVar_ * sizeof(short int));
222 
223  for (auto varIter = dg->variablesSequence().rbeginSafe();
224  varIter != dg->variablesSequence().rendSafe();
225  --varIter) {
226  Idx varPos = _rd_->variablesSequence().pos(*varIter);
227 
228  const Link< NodeId >* nodeIter = dg->varNodeListe(*varIter)->list();
229  while (nodeIter != nullptr) {
230  short int* instantiationNeeded = static_cast< short int* >(ALLOCATE(tableSize));
231  dgInstNeed.insert(nodeIter->element(), instantiationNeeded);
232  short int* varDescendant = static_cast< short int* >(ALLOCATE(tableSize));
233  nodesVarDescendant.insert(nodeIter->element(), varDescendant);
234  for (Idx j = 0; j < _nbVar_; j++) {
235  instantiationNeeded[j] = (short int)0;
236  varDescendant[j] = (short int)0;
237  }
238 
239 
240  varDescendant[varPos] = (short int)1;
241  for (Idx modality = 0; modality < dg->node(nodeIter->element())->nbSons(); ++modality) {
242  if (!dg->isTerminalNode(dg->node(nodeIter->element())->son(modality))) {
243  short int* sonVarDescendant
244  = nodesVarDescendant[dg->node(nodeIter->element())->son(modality)];
245  for (Idx varIdx = 0; varIdx < _nbVar_; varIdx++) {
246  varDescendant[varIdx] += sonVarDescendant[varIdx];
247  if (varDescendant[varIdx] && varIdx < varPos)
248  instantiationNeeded[varIdx] = (short int)1;
249  }
250  }
251  }
252  nodeIter = nodeIter->nextLink();
253  }
254  }
255 
256  for (auto varIter = dg->variablesSequence().beginSafe();
257  varIter != dg->variablesSequence().endSafe();
258  ++varIter) {
259  const Link< NodeId >* nodeIter = dg->varNodeListe(*varIter)->list();
260  while (nodeIter != nullptr) {
261  for (Idx modality = 0; modality < dg->node(nodeIter->element())->nbSons(); ++modality) {
262  NodeId sonId = dg->node(nodeIter->element())->son(modality);
263  if (!dg->isTerminalNode(sonId)) {
264  for (Idx varIdx = 0; varIdx < _nbVar_; ++varIdx) {
265  if (dgInstNeed[nodeIter->element()][varIdx] && nodesVarDescendant[sonId][varIdx]) {
266  dgInstNeed[sonId][varIdx] = (short int)1;
267  }
268  }
269  }
270  }
271  nodeIter = nodeIter->nextLink();
272  }
273  }
274 
275  for (HashTableIterator< NodeId, short int* > it = nodesVarDescendant.begin();
276  it != nodesVarDescendant.end();
277  ++it) {
278  DEALLOCATE(it.val(), tableSize);
279  }
280 
281  nodesVarDescendant.clear();
282  }
#define ALLOCATE(x)
Definition: regress_tpl.h:34
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _rd_
The resulting function graph.
Definition: regress.h:106
#define DEALLOCATE(x, y)
Definition: regress_tpl.h:35
Idx _nbVar_
The total number of variable implied in the operation.
Definition: regress.h:118
std::size_t Size
In aGrUM, hashed values are unsigned long int.
Definition: types.h:47
Size NodeId
Type for node ids.
Definition: graphElements.h:97
+ Here is the call graph for this function:

◆ compute()

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy>
INLINE MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::compute ( )

Computes and builds the Function Graph that is the result of the operation.

Definition at line 98 of file regress_tpl.h.

References gum::Set< Key, Alloc >::emplace().

98  {
102 
103  Idx* varInst = nullptr;
104  if (_nbVar_ != 0) {
105  varInst = static_cast< Idx* >(ALLOCATE(sizeof(Idx) * _nbVar_));
106  for (Idx i = 0; i < _nbVar_; i++)
107  varInst[i] = (Idx)0;
108  }
109 
110  O4DGContext conti(varInst, _nbVar_);
111  conti.setDG1Node(_DG1_->root());
112  conti.setDG2Node(_DG2_->root());
113 
114  NodeId root = _compute_(conti, (Idx)0 - 1);
115  _rd_->manager()->setRootNode(root);
116 
117  if (_nbVar_ != 0) DEALLOCATE(varInst, sizeof(Idx) * _nbVar_);
118 
119  _rd_->erase(*_targetVar_);
120 
121  return _rd_;
122  }
NodeId _compute_(O4DGContext &currentSituation, Idx lastInstVarPos)
The main recursion function.
Definition: regress_tpl.h:310
HashTable< NodeId, short int *> _DG1InstantiationNeeded_
Table uses to know if a given node of given function graph has retrograde variables.
Definition: regress.h:130
#define ALLOCATE(x)
Definition: regress_tpl.h:34
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _rd_
The resulting function graph.
Definition: regress.h:106
#define DEALLOCATE(x, y)
Definition: regress_tpl.h:35
Idx _nbVar_
The total number of variable implied in the operation.
Definition: regress.h:118
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _DG1_
One of the two function graphs used for the operation.
Definition: regress.h:100
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _DG2_
The other one.
Definition: regress.h:103
void _findRetrogradeVariables_(const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *dg, HashTable< NodeId, short int * > &dgInstNeed)
Establish for each node in both function graph if it has retrograde variables beneath it...
Definition: regress_tpl.h:218
const DiscreteVariable * _targetVar_
The variable we work on to eleminate.
Definition: regress.h:112
HashTable< NodeId, short int *> _DG2InstantiationNeeded_
Definition: regress.h:131
Size NodeId
Type for node ids.
Definition: graphElements.h:97
void _establishVarOrder_()
Computes an order for the final Decision graph that will minimize the number of re exploration...
Definition: regress_tpl.h:135
+ Here is the call graph for this function:

Member Data Documentation

◆ _combine_

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const COMBINEOPERATOR< GUM_SCALAR > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_combine_
private

The functions to be performed on the leaves.

Definition at line 121 of file regress.h.

◆ _default_

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
short int* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_default_
private

Just a computationnal trick.

Definition at line 134 of file regress.h.

◆ _DG1_

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy >* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_DG1_
private

One of the two function graphs used for the operation.

Definition at line 100 of file regress.h.

◆ _DG1InstantiationNeeded_

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
HashTable< NodeId, short int* > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_DG1InstantiationNeeded_
private

Table uses to know if a given node of given function graph has retrograde variables.

Definition at line 130 of file regress.h.

◆ _DG2_

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy >* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_DG2_
private

The other one.

Definition at line 103 of file regress.h.

◆ _DG2InstantiationNeeded_

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
HashTable< NodeId, short int* > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_DG2InstantiationNeeded_
private

Definition at line 131 of file regress.h.

◆ _explorationTable_

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
HashTable< double, NodeId > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_explorationTable_
private

The hashtable used to know if two pair of nodes have already been visited.

Definition at line 126 of file regress.h.

◆ _nbVar_

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
Idx gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_nbVar_
private

The total number of variable implied in the operation.

Definition at line 118 of file regress.h.

◆ _neutral_

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const GUM_SCALAR gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_neutral_
private

The function to be performed on the leaves.

Definition at line 115 of file regress.h.

◆ _primedVars_

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const Set< const DiscreteVariable* >* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_primedVars_
private

The set of variables we want to keep at the end.

Definition at line 109 of file regress.h.

◆ _project_

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const PROJECTOPERATOR< GUM_SCALAR > gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_project_
private

Definition at line 122 of file regress.h.

◆ _rd_

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy >* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_rd_
private

The resulting function graph.

Definition at line 106 of file regress.h.

◆ _targetVar_

template<typename GUM_SCALAR , template< typename > class COMBINEOPERATOR, template< typename > class PROJECTOPERATOR, template< typename > class TerminalNodePolicy = ExactTerminalNodePolicy>
const DiscreteVariable* gum::Regress< GUM_SCALAR, COMBINEOPERATOR, PROJECTOPERATOR, TerminalNodePolicy >::_targetVar_
private

The variable we work on to eleminate.

Definition at line 112 of file regress.h.


The documentation for this class was generated from the following files: