aGrUM  0.14.2
imddi_tpl.h
Go to the documentation of this file.
1 /*********************************************************************************
2  * Copyright (C) 2005 by Pierre-Henri WUILLEMIN et Christophe GONZALES *
3  * {prenom.nom}_at_lip6.fr *
4  * *
5  * This program is free software; you can redistribute it and/or modify *
6  * it under the terms of the GNU General Public License as published by *
7  * the Free Software Foundation; either version 2 of the License, or *
8  * (at your option) any later version. *
9  * *
10  * This program is distributed in the hope that it will be useful, *
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
13  * GNU General Public License for more details. *
14  * *
15  * You should have received a copy of the GNU General Public License *
16  * along with this program; if not, write to the *
17  * Free Software Foundation, Inc., *
18  * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
19  *********************************************************************************/
26 // =======================================================
27 #include <agrum/core/math/math.h>
29 #include <agrum/core/types.h>
30 // =======================================================
33 // =======================================================
35 // =======================================================
36 
37 
38 namespace gum {
39 
40  // ############################################################################
41  // Constructor & destructor.
42  // ############################################################################
43 
44  // ============================================================================
45  // Variable Learner constructor
46  // ============================================================================
47  template < TESTNAME AttributeSelection, bool isScalar >
50  double attributeSelectionThreshold,
51  double pairSelectionThreshold,
52  Set< const DiscreteVariable* > attributeListe,
53  const DiscreteVariable* learnedValue) :
54  IncrementalGraphLearner< AttributeSelection, isScalar >(
55  target, attributeListe, learnedValue),
56  __lg(&(this->_model), pairSelectionThreshold), __nbTotalObservation(0),
57  __attributeSelectionThreshold(attributeSelectionThreshold) {
58  GUM_CONSTRUCTOR(IMDDI);
59  __addLeaf(this->_root);
60  }
61 
62  // ============================================================================
63  // Reward Learner constructor
64  // ============================================================================
65  template < TESTNAME AttributeSelection, bool isScalar >
68  double attributeSelectionThreshold,
69  double pairSelectionThreshold,
70  Set< const DiscreteVariable* > attributeListe) :
71  IncrementalGraphLearner< AttributeSelection, isScalar >(
72  target, attributeListe, new LabelizedVariable("Reward", "", 2)),
73  __lg(&(this->_model), pairSelectionThreshold), __nbTotalObservation(0),
74  __attributeSelectionThreshold(attributeSelectionThreshold) {
75  GUM_CONSTRUCTOR(IMDDI);
76  __addLeaf(this->_root);
77  }
78 
79  // ============================================================================
80  // Reward Learner constructor
81  // ============================================================================
82  template < TESTNAME AttributeSelection, bool isScalar >
84  GUM_DESTRUCTOR(IMDDI);
86  __leafMap.beginSafe();
87  leafIter != __leafMap.endSafe();
88  ++leafIter)
89  delete leafIter.val();
90  }
91 
92 
93  // ############################################################################
94  // Incrementals methods
95  // ############################################################################
96 
97  template < TESTNAME AttributeSelection, bool isScalar >
99  const Observation* obs) {
102  }
103 
104  template < TESTNAME AttributeSelection, bool isScalar >
106  const Observation* newObs, NodeId currentNodeId) {
107  IncrementalGraphLearner< AttributeSelection,
108  isScalar >::_updateNodeWithObservation(newObs,
109  currentNodeId);
110  if (this->_nodeVarMap[currentNodeId] == this->_value)
111  __lg.updateLeaf(__leafMap[currentNodeId]);
112  }
113 
114 
115  // ============================================================================
116  // Updates the tree after a new observation has been added
117  // ============================================================================
118  template < TESTNAME AttributeSelection, bool isScalar >
120  __varOrder.clear();
121 
122  // First xe initialize the node set which will give us the scores
123  Set< NodeId > currentNodeSet;
124  currentNodeSet.insert(this->_root);
125 
126  // Then we initialize the pool of variables to consider
127  VariableSelector vs(this->_setOfVars);
128  for (vs.begin(); vs.hasNext(); vs.next()) {
129  __updateScore(vs.current(), this->_root, vs);
130  }
131 
132  // Then, until there's no node remaining
133  while (!vs.isEmpty()) {
134  // We select the best var
135  const DiscreteVariable* selectedVar = vs.select();
136  __varOrder.insert(selectedVar);
137 
138  // Then we decide if we update each node according to this var
139  __updateNodeSet(currentNodeSet, selectedVar, vs);
140  }
141 
142  // If there are remaining node that are not leaves after we establish the
143  // var order
144  // these nodes are turned into leaf.
145  for (SetIteratorSafe< NodeId > nodeIter = currentNodeSet.beginSafe();
146  nodeIter != currentNodeSet.endSafe();
147  ++nodeIter)
148  this->_convertNode2Leaf(*nodeIter);
149 
150 
151  if (__lg.needsUpdate()) __lg.update();
152  }
153 
154 
155  // ############################################################################
156  // Updating methods
157  // ############################################################################
158 
159 
160  // ###################################################################
161  // Select the most relevant variable
162  //
163  // First parameter is the set of variables among which the most
164  // relevant one is choosed
165  // Second parameter is the set of node the will attribute a score
166  // to each variable so that we choose the best.
167  // ###################################################################
168  template < TESTNAME AttributeSelection, bool isScalar >
170  const DiscreteVariable* var, NodeId nody, VariableSelector& vs) {
171  if (!this->_nodeId2Database[nody]->isTestRelevant(var)) return;
172  double weight = (double)this->_nodeId2Database[nody]->nbObservation()
173  / (double)this->__nbTotalObservation;
174  vs.updateScore(var,
175  weight * this->_nodeId2Database[nody]->testValue(var),
176  weight * this->_nodeId2Database[nody]->testOtherCriterion(var));
177  }
178 
179  template < TESTNAME AttributeSelection, bool isScalar >
181  const DiscreteVariable* var, NodeId nody, VariableSelector& vs) {
182  if (!this->_nodeId2Database[nody]->isTestRelevant(var)) return;
183  double weight = (double)this->_nodeId2Database[nody]->nbObservation()
184  / (double)this->__nbTotalObservation;
185  vs.downdateScore(var,
186  weight * this->_nodeId2Database[nody]->testValue(var),
187  weight
188  * this->_nodeId2Database[nody]->testOtherCriterion(var));
189  }
190 
191 
192  // ============================================================================
193  // For each node in the given set, this methods checks whether or not
194  // we should installed the given variable as a test.
195  // If so, the node is updated
196  // ============================================================================
197  template < TESTNAME AttributeSelection, bool isScalar >
199  Set< NodeId >& nodeSet,
200  const DiscreteVariable* selectedVar,
201  VariableSelector& vs) {
202  Set< NodeId > oldNodeSet(nodeSet);
203  nodeSet.clear();
204  for (SetIteratorSafe< NodeId > nodeIter = oldNodeSet.beginSafe();
205  nodeIter != oldNodeSet.endSafe();
206  ++nodeIter) {
207  if (this->_nodeId2Database[*nodeIter]->isTestRelevant(selectedVar)
208  && this->_nodeId2Database[*nodeIter]->testValue(selectedVar)
210  this->_transpose(*nodeIter, selectedVar);
211 
212  // Then we subtract the from the score given to each variables the
213  // quantity given by this node
214  for (vs.begin(); vs.hasNext(); vs.next()) {
215  __downdateScore(vs.current(), *nodeIter, vs);
216  }
217 
218  // And finally we add all its child to the new set of nodes
219  // and updates the remaining var's score
220  for (Idx modality = 0;
221  modality < this->_nodeVarMap[*nodeIter]->domainSize();
222  ++modality) {
223  NodeId sonId = this->_nodeSonsMap[*nodeIter][modality];
224  nodeSet << sonId;
225 
226  for (vs.begin(); vs.hasNext(); vs.next()) {
227  __updateScore(vs.current(), sonId, vs);
228  }
229  }
230  } else {
231  nodeSet << *nodeIter;
232  }
233  }
234  }
235 
236 
237  // ============================================================================
238  // Insert a new node with given associated database, var and maybe sons
239  // ============================================================================
240  template < TESTNAME AttributeSelection, bool isScalar >
243  const DiscreteVariable* boundVar,
244  Set< const Observation* >* obsSet) {
245  NodeId currentNodeId =
247  nDB, boundVar, obsSet);
248 
249  __addLeaf(currentNodeId);
250 
251  return currentNodeId;
252  }
253 
254 
255  // ============================================================================
256  // Changes var associated to a node
257  // ============================================================================
258  template < TESTNAME AttributeSelection, bool isScalar >
260  NodeId currentNodeId, const DiscreteVariable* desiredVar) {
261  if (this->_nodeVarMap[currentNodeId] == this->_value)
262  __removeLeaf(currentNodeId);
263 
265  currentNodeId, desiredVar);
266 
267  if (desiredVar == this->_value) __addLeaf(currentNodeId);
268  }
269 
270 
271  // ============================================================================
272  // Remove node from graph
273  // ============================================================================
274  template < TESTNAME AttributeSelection, bool isScalar >
276  if (this->_nodeVarMap[currentNodeId] == this->_value)
277  __removeLeaf(currentNodeId);
279  currentNodeId);
280  }
281 
282 
283  // ============================================================================
284  // Add leaf to aggregator
285  // ============================================================================
286  template < TESTNAME AttributeSelection, bool isScalar >
288  __leafMap.insert(currentNodeId,
290  currentNodeId,
291  this->_nodeId2Database[currentNodeId],
292  &(this->_valueAssumed)));
293  __lg.addLeaf(__leafMap[currentNodeId]);
294  }
295 
296 
297  // ============================================================================
298  // Remove leaf from aggregator
299  // ============================================================================
300  template < TESTNAME AttributeSelection, bool isScalar >
302  __lg.removeLeaf(__leafMap[currentNodeId]);
303  delete __leafMap[currentNodeId];
304  __leafMap.erase(currentNodeId);
305  }
306 
307 
308  // ============================================================================
309  // Computes the Reduced and Ordered Function Graph associated to this ordered
310  // tree
311  // ============================================================================
312  template < TESTNAME AttributeSelection, bool isScalar >
314  // if( __lg.needsUpdate() || this->_needUpdate ){
316  this->_needUpdate = false;
317  // }
318  }
319 
320 
321  // ============================================================================
322  // Performs the leaves merging
323  // ============================================================================
324  template < TESTNAME AttributeSelection, bool isScalar >
326  // *******************************************************************************************************
327  // Mise à jour de l'aggregateur de feuille
328  __lg.update();
329 
330  // *******************************************************************************************************
331  // Reinitialisation du Graphe de Décision
332  this->_target->clear();
333  for (auto varIter = __varOrder.beginSafe(); varIter != __varOrder.endSafe();
334  ++varIter)
335  this->_target->add(**varIter);
336  this->_target->add(*this->_value);
337 
339 
340  // *******************************************************************************************************
341  // Insertion des feuilles
345  treeNode2leaf.cbeginSafe();
346  treeNodeIter != treeNode2leaf.cendSafe();
347  ++treeNodeIter) {
348  if (!leaf2DGNode.exists(treeNodeIter.val()))
349  leaf2DGNode.insert(treeNodeIter.val(),
350  __insertLeafInFunctionGraph(treeNodeIter.val(),
352 
353  toTarget.insert(treeNodeIter.key(), leaf2DGNode[treeNodeIter.val()]);
354  }
355 
356  // *******************************************************************************************************
357  // Insertion des noeuds internes (avec vérification des possibilités de
358  // fusion)
360  __varOrder.rbeginSafe();
361  varIter != __varOrder.rendSafe();
362  --varIter) {
363  for (Link< NodeId >* curNodeIter = this->_var2Node[*varIter]->list();
364  curNodeIter;
365  curNodeIter = curNodeIter->nextLink()) {
366  NodeId* sonsMap = static_cast< NodeId* >(
367  SOA_ALLOCATE(sizeof(NodeId) * (*varIter)->domainSize()));
368  for (Idx modality = 0; modality < (*varIter)->domainSize(); ++modality)
369  sonsMap[modality] =
370  toTarget[this->_nodeSonsMap[curNodeIter->element()][modality]];
371  toTarget.insert(
372  curNodeIter->element(),
373  this->_target->manager()->addInternalNode(*varIter, sonsMap));
374  }
375  }
376 
377  // *******************************************************************************************************
378  // Polish
379  this->_target->manager()->setRootNode(toTarget[this->_root]);
380  this->_target->manager()->clean();
381  }
382 
383 
384  // ============================================================================
385  // Performs the leaves merging
386  // ============================================================================
387  template < TESTNAME AttributeSelection, bool isScalar >
390  double value = 0.0;
391  for (Idx moda = 0; moda < leaf->nbModa(); moda++) {
392  value += (double)leaf->effectif(moda) * this->_valueAssumed.atPos(moda);
393  }
394  if (leaf->total()) value /= (double)leaf->total();
395  return this->_target->manager()->addTerminalNode(value);
396  }
397 
398 
399  // ============================================================================
400  // Performs the leaves merging
401  // ============================================================================
402  template < TESTNAME AttributeSelection, bool isScalar >
405  NodeId* sonsMap = static_cast< NodeId* >(
406  SOA_ALLOCATE(sizeof(NodeId) * this->_value->domainSize()));
407  for (Idx modality = 0; modality < this->_value->domainSize(); ++modality) {
408  double newVal = 0.0;
409  if (leaf->total())
410  newVal = (double)leaf->effectif(modality) / (double)leaf->total();
411  sonsMap[modality] = this->_target->manager()->addTerminalNode(newVal);
412  }
413  return this->_target->manager()->addInternalNode(this->_value, sonsMap);
414  }
415 } // namespace gum
Useful macros for maths.
HashTable< NodeId, NodeId *> _nodeSonsMap
A table giving for any node a table mapping to its son idx is the modality of associated variable...
Safe iterators for Sequence.
Definition: sequence.h:1203
void updateGraph()
Updates the tree after a new observation has been added.
Definition: imddi_tpl.h:119
LeafAggregator __lg
Definition: imddi.h:167
Provides basic types used in aGrUM.
NodeId __insertLeafInFunctionGraph(AbstractLeaf *, Int2Type< true >)
Computes the score of the given variables for the given node.
Definition: imddi_tpl.h:388
HashTable< const DiscreteVariable *, LinkedList< NodeId > *> _var2Node
Associates to any variable the list of all nodes associated to this variable.
void __removeLeaf(NodeId)
Adds a new observation to the structure.
Definition: imddi_tpl.h:301
virtual double effectif(Idx) const =0
Gaves the leaf effectif for given modality.
Set< const DiscreteVariable *> _setOfVars
void addObservation(const Observation *)
Adds a new observation to the structure.
Definition: imddi_tpl.h:98
void _updateNodeWithObservation(const Observation *newObs, NodeId currentNodeId)
Adds a new observation to the structure.
Definition: imddi_tpl.h:105
void clean()
Removes var without nodes in the diagram.
HashTable< NodeId, NodeDatabase< AttributeSelection, isScalar > *> _nodeId2Database
This hashtable binds every node to an associated NodeDatabase which handles every observation that co...
void setRootNode(const NodeId &root)
Sets root node of decision diagram.
void _removeNode(NodeId removedNodeId)
Adds a new observation to the structure.
Definition: imddi_tpl.h:275
class LabelizedVariable
Safe iterators for the Set classDevelopers may consider using Set<x>::iterator_safe instead of SetIte...
Definition: set.h:808
void __updateScore(const DiscreteVariable *, NodeId, VariableSelector &vs)
Computes the score of the given variables for the given node.
Definition: imddi_tpl.h:169
<agrum/FMDP/learning/datastructure/incrementalGraphLearner>
const_iterator_safe cbeginSafe() const
Returns the safe const_iterator pointing to the beginning of the hashtable.
void updateFunctionGraph()
Computes the score of the given variables for the given node.
Definition: imddi_tpl.h:313
<agrum/FMDP/learning/datastructure/leaves/abstractLeaf.h>
Definition: abstractLeaf.h:50
Safe Const Iterators for hashtables.
Definition: hashTable.h:1915
void downdateScore(const DiscreteVariable *var, double score, double secondaryscore)
The set of remaining vars to select among.
virtual void _convertNode2Leaf(NodeId)
Turns the given node into a leaf if not already so.
bool exists(const Key &key) const
Checks whether there exists an element with a given key in the hashtable.
NodeId addInternalNode(const DiscreteVariable *var)
Inserts a new non terminal node in graph.
Base class for discrete random variable.
Safe Iterators for hashtables.
Definition: hashTable.h:2217
void __rebuildFunctionGraph()
Computes the score of the given variables for the given node.
Definition: imddi_tpl.h:325
gum is the global namespace for all aGrUM entities
Definition: agrum.h:25
MultiDimFunctionGraph< double > * _target
The final diagram we&#39;re building.
HashTable< NodeId, AbstractLeaf *> __leafMap
Definition: imddi.h:169
virtual NodeId _insertLeafNode(NodeDatabase< AttributeSelection, isScalar > *nDB, const DiscreteVariable *boundVar, Set< const Observation * > *obsSet)
inserts a new leaf node in internal graohs
Headers of the ChiSquare class.
Sequence< const DiscreteVariable *> __varOrder
Definition: imddi.h:165
void _chgNodeBoundVar(NodeId chgedNodeId, const DiscreteVariable *desiredVar)
Adds a new observation to the structure.
Definition: imddi_tpl.h:259
Representation of a setA Set is a structure that contains arbitrary elements.
Definition: set.h:162
const iterator_safe & endSafe() const noexcept
The usual safe end iterator to parse the set.
Definition: set_tpl.h:499
virtual Size domainSize() const =0
void begin()
The set of remaining vars to select among.
virtual Idx nbModa() const =0
virtual void add(const DiscreteVariable &v)
Adds a new var to the variables of the multidimensional matrix.
virtual void addObservation(const Observation *obs)
Inserts a new observation.
const const_iterator_safe & cendSafe() const noexcept
Returns the safe const_iterator pointing to the end of the hashtable.
<agrum/FMDP/learning/datastructure/leaves/concreteLeaf.h>
Definition: concreteLeaf.h:55
virtual void _transpose(NodeId, const DiscreteVariable *)
Installs given variable to the given node, ensuring that the variable is not present in its subtree...
~IMDDI()
Default destructor.
Definition: imddi_tpl.h:83
bool hasNext()
The set of remaining vars to select among.
void __addLeaf(NodeId)
Adds a new observation to the structure.
Definition: imddi_tpl.h:287
virtual void _removeNode(NodeId removedNodeId)
Removes a node from the internal graph.
void __downdateScore(const DiscreteVariable *, NodeId, VariableSelector &vs)
Computes the score of the given variables for the given node.
Definition: imddi_tpl.h:180
const DiscreteVariable * select()
Select the most relevant variable.
HashTable< NodeId, AbstractLeaf *> leavesMap()
priority queues (in which an element cannot appear more than once)
IMDDI(MultiDimFunctionGraph< double > *target, double attributeSelectionThreshold, double pairSelectionThreshold, Set< const DiscreteVariable * > attributeListe, const DiscreteVariable *learnedValue)
Variable Learner constructor.
Definition: imddi_tpl.h:48
<agrum/FMDP/planning/FunctionGraph/variableselector.h>
virtual void _chgNodeBoundVar(NodeId chgedNodeId, const DiscreteVariable *desiredVar)
Changes the associated variable of a node.
iterator_safe beginSafe() const
The usual safe begin iterator to parse the set.
Definition: set_tpl.h:485
bool isEmpty()
The set of remaining vars to select among.
void updateScore(const DiscreteVariable *var, double score, double secondaryscore)
The set of remaining vars to select among.
NodeId _root
The root of the ordered tree.
double __attributeSelectionThreshold
The threshold above which we consider variables to be dependant.
Definition: imddi.h:175
NodeId addTerminalNode(const GUM_SCALAR &value)
Adds a value to the MultiDimFunctionGraph.
Size Idx
Type for indexes.
Definition: types.h:50
void removeLeaf(AbstractLeaf *)
void next()
The set of remaining vars to select among.
void clear()
Removes all the elements, if any, from the set.
Definition: set_tpl.h:372
MultiDimFunctionGraphManager< GUM_SCALAR, TerminalNodePolicy > * manager()
Returns a const reference to the manager of this diagram.
NodeGraphPart _model
The source of nodeId.
Headers of the IMDDI class.
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
virtual double total() const =0
bool updateLeaf(AbstractLeaf *)
NodeId _insertLeafNode(NodeDatabase< AttributeSelection, isScalar > *nDB, const DiscreteVariable *boundVar, Set< const Observation * > *sonsMap)
Adds a new observation to the structure.
Definition: imddi_tpl.h:241
void addLeaf(AbstractLeaf *)
Base class for labelized discrete random variables.
const DiscreteVariable * current()
The set of remaining vars to select among.
HashTable< NodeId, const DiscreteVariable *> _nodeVarMap
Gives for any node its associated variable.
Size NodeId
Type for node ids.
Definition: graphElements.h:97
void insert(const Key &k)
Inserts a new element into the set.
Definition: set_tpl.h:610
void __updateNodeSet(Set< NodeId > &, const DiscreteVariable *, VariableSelector &)
For each node in the given set, this methods checks whether or not we should installed the given vari...
Definition: imddi_tpl.h:198
void clear()
Clears the function graph.
<agrum/FMDP/learning/datastructure/nodeDatabase.h>
Definition: nodeDatabase.h:55
#define SOA_ALLOCATE(x)
Idx __nbTotalObservation
The total number of observation added to this tree.
Definition: imddi.h:172