aGrUM  0.14.2
DAGCycleDetector.cpp
Go to the documentation of this file.
1 /***************************************************************************
2  * Copyright (C) 2005 by Christophe GONZALES and Pierre-Henri WUILLEMIN *
3  * {prenom.nom}_at_lip6.fr *
4  * *
5  * This program is free software; you can redistribute it and/or modify *
6  * it under the terms of the GNU General Public License as published by *
7  * the Free Software Foundation; either version 2 of the License, or *
8  * (at your option) any later version. *
9  * *
10  * This program is distributed in the hope that it will be useful, *
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
13  * GNU General Public License for more details. *
14  * *
15  * You should have received a copy of the GNU General Public License *
16  * along with this program; if not, write to the *
17  * Free Software Foundation, Inc., *
18  * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
19  ***************************************************************************/
33 #include <agrum/core/sequence.h>
35 
36 #ifdef GUM_NO_INLINE
38 #endif // GUM_NOINLINE
39 
40 namespace gum {
41 
43  void DAGCycleDetector::setDAG(const DAG& dag) {
44  // sets the dag
45  __dag = dag;
46 
47  // get the set of roots and leaves of the dag
48  List< NodeId > roots, leaves;
49  NodeProperty< Size > nb_parents, nb_children;
50 
51  for (const auto node : dag) {
52  Size nb_ch = dag.children(node).size();
53  nb_children.insert(node, nb_ch);
54 
55  if (!nb_ch) leaves.insert(node);
56 
57  Size nb_pa = dag.parents(node).size();
58  nb_parents.insert(node, nb_pa);
59 
60  if (!nb_pa) roots.insert(node);
61  }
62 
63  // recompute the set of ancestors
64  NodeProperty< Size > empty_set;
65  __ancestors.clear();
66 
67  for (NodeId node : dag) {
68  __ancestors.insert(node, empty_set);
69  }
70 
71  while (!roots.empty()) {
72  // get a node and update the ancestors of its children
73  NodeId node = roots.front();
74  NodeProperty< Size > node_ancestors = __ancestors[node];
75  node_ancestors.insert(node, 1);
76  const NodeSet& node_children = dag.children(node);
77  roots.popFront();
78 
79  for (const auto ch : node_children) {
80  __addWeightedSet(__ancestors[ch], node_ancestors, 1);
81  --nb_parents[ch];
82 
83  if (!nb_parents[ch]) { roots.insert(ch); }
84  }
85  }
86 
87  // recompute the set of descendants
88  __descendants.clear();
89 
90  for (const auto node : dag) {
91  __descendants.insert(node, empty_set);
92  }
93 
94  while (!leaves.empty()) {
95  // get a node and update the descendants of its parents
96  NodeId node = leaves.front();
97  NodeProperty< Size > node_descendants = __descendants[node];
98  node_descendants.insert(node, 1);
99  const NodeSet& node_parents = dag.parents(node);
100  leaves.popFront();
101 
102  for (const auto pa : node_parents) {
103  __addWeightedSet(__descendants[pa], node_descendants, 1);
104  --nb_children[pa];
105 
106  if (!nb_children[pa]) { leaves.insert(pa); }
107  }
108  }
109  }
110 
113  const std::vector< Change >& modifs) const {
114  // first, we separate deletions and insertions (reversals are cut into
115  // a deletion + an insertion) and we check that no insertion also exists
116  // as a deletion (if so, we remove both operations). In addition, if
117  // we try to add an arc (X,X) we return that it induces a cycle
118  HashTable< Arc, Size > deletions(Size(modifs.size()));
119  HashTable< Arc, Size > additions(Size(modifs.size()));
120 
121  for (const auto& modif : modifs) {
122  Arc arc(modif.tail(), modif.head());
123 
124  switch (modif.type()) {
126  if (deletions.exists(arc))
127  ++deletions[arc];
128  else
129  deletions.insert(arc, 1);
130 
131  break;
132 
134  if (modif.tail() == modif.head()) return true;
135 
136  if (additions.exists(arc))
137  ++additions[arc];
138  else
139  additions.insert(arc, 1);
140 
141  break;
142 
144  if (deletions.exists(arc))
145  ++deletions[arc];
146  else
147  deletions.insert(arc, 1);
148 
149  arc = Arc(modif.head(), modif.tail());
150 
151  if (additions.exists(arc))
152  ++additions[arc];
153  else
154  additions.insert(arc, 1);
155 
156  break;
157 
158  default: GUM_ERROR(OperationNotAllowed, "undefined change type");
159  }
160  }
161 
162  for (auto iter = additions.beginSafe(); // safe iterator needed here
163  iter != additions.endSafe();
164  ++iter) {
165  if (deletions.exists(iter.key())) {
166  Size& nb_del = deletions[iter.key()];
167  Size& nb_add = iter.val();
168 
169  if (nb_del > nb_add) {
170  additions.erase(iter);
171  nb_del -= nb_add;
172  } else if (nb_del < nb_add) {
173  deletions.erase(iter.key());
174  nb_add -= nb_del;
175  } else {
176  deletions.erase(iter.key());
177  additions.erase(iter);
178  }
179  }
180  }
181 
182  // get the set of nodes involved in the modifications
183  NodeSet extremities;
184 
185  for (const auto& modif : modifs) {
186  extremities.insert(modif.tail());
187  extremities.insert(modif.head());
188  }
189 
190  // we now retrieve the set of ancestors and descendants of all the
191  // extremities of the arcs involved in the modifications. We also
192  // keep track of all the children and parents of these nodes
193  NodeProperty< NodeProperty< Size > > ancestors, descendants;
194 
195  for (const auto& modif : modifs) {
196  if (!ancestors.exists(modif.tail())) {
197  NodeProperty< Size >& anc =
198  ancestors.insert(modif.tail(), NodeProperty< Size >()).second;
199  __restrictWeightedSet(anc, __ancestors[modif.tail()], extremities);
200 
201  NodeProperty< Size >& desc =
202  descendants.insert(modif.tail(), NodeProperty< Size >()).second;
203  __restrictWeightedSet(desc, __descendants[modif.tail()], extremities);
204  }
205 
206  if (!ancestors.exists(modif.head())) {
207  NodeProperty< Size >& anc =
208  ancestors.insert(modif.head(), NodeProperty< Size >()).second;
209  __restrictWeightedSet(anc, __ancestors[modif.head()], extremities);
210 
211  NodeProperty< Size >& desc =
212  descendants.insert(modif.head(), NodeProperty< Size >()).second;
213  __restrictWeightedSet(desc, __descendants[modif.head()], extremities);
214  }
215  }
216 
217  // we apply all the suppressions:
218  // assume that the modif concerns arc (X,Y). Then, after removing
219  // arc (X,Y), the sets of descendants of the nodes T in
220  // ( X + ancestors (X) ) are updated by subtracting the number of paths
221  // leading to Y and its set of descendants. These numbers are equal to
222  // the numbers found in descendants[X] * the numbers of paths leading
223  // to X, i.e., descendants[T][X].
224  // By symmetry, the set of ancestors of nodes T in ( Y + descendants (Y) )
225  // are
226  // updated by subtracting the number of paths leading to X and its
227  // ancestors.
228  for (auto iter = deletions.cbegin(); iter != deletions.cend(); ++iter) {
229  const Arc& arc = iter.key();
230  const NodeId tail = arc.tail();
231  const NodeProperty< Size >& anc_tail = ancestors[tail];
232  const NodeId head = arc.head();
233  const NodeProperty< Size >& desc_head = descendants[head];
234 
235  // update the set of descendants
236  NodeProperty< Size > set_to_del = desc_head;
237  set_to_del.insert(head, 1);
238  __delWeightedSet(descendants[tail], set_to_del, 1);
239 
240  for (auto iter = anc_tail.cbegin(); iter != anc_tail.cend(); ++iter) {
242  descendants[iter.key()], set_to_del, descendants[iter.key()][tail]);
243  }
244 
245  // update the set of ancestors
246  set_to_del = anc_tail;
247  set_to_del.insert(tail, 1);
248  __delWeightedSet(ancestors[head], set_to_del, 1);
249 
250  for (auto iter = desc_head.cbegin(); iter != desc_head.cend(); ++iter) {
252  ancestors[iter.key()], set_to_del, ancestors[iter.key()][head]);
253  }
254  }
255 
256  // now we apply all the additions of arcs (including the additions after
257  // arc reversals). After adding arc (X,Y), the set of ancestors T of Y and
258  // its
259  // descendants shall be updated by adding the number of paths leading to
260  // X and its ancestors, i.e., ancestors[X] * the number of paths leading
261  // to Y, i.e., ancestors[T][Y]. Similarly, the set of descendants of X and
262  // its
263  // ancestors should be updated by adding the number of paths leading to Y
264  // and
265  // its descendants. Finally, an arc (X,Y) can be added if and
266  // only if Y does not belong to the ancestor set of X
267  for (auto iter = additions.cbegin(); iter != additions.cend(); ++iter) {
268  const Arc& arc = iter.key();
269  NodeId tail = arc.tail();
270  NodeId head = arc.head();
271 
272  const NodeProperty< Size >& anc_tail = ancestors[tail];
273 
274  if (anc_tail.exists(head)) { return true; }
275 
276  const NodeProperty< Size >& desc_head = descendants[head];
277 
278  // update the set of ancestors
279  NodeProperty< Size > set_to_add = anc_tail;
280  set_to_add.insert(tail, 1);
281  __addWeightedSet(ancestors[head], set_to_add, 1);
282 
283  for (auto iter = desc_head.cbegin(); iter != desc_head.cend(); ++iter) {
285  ancestors[iter.key()], set_to_add, ancestors[iter.key()][head]);
286  }
287 
288  // update the set of descendants
289  set_to_add = desc_head;
290  set_to_add.insert(head, 1);
291  __addWeightedSet(descendants[tail], set_to_add, 1);
292 
293  for (auto iter = anc_tail.cbegin(); iter != anc_tail.cend(); ++iter) {
295  descendants[iter.key()], set_to_add, descendants[iter.key()][tail]);
296  }
297  }
298 
299  return false;
300  }
301 
304  // check that the arc does not already exist
305  if (__dag.existsArc(tail, head)) return;
306 
307  // check that the arc would not create a cycle
308  if (hasCycleFromAddition(tail, head)) {
310  "the arc would create a directed into a DAG");
311  }
312 
313  __dag.addArc(tail, head);
314 
315  // now we apply the addition of the arc as done in method
316  // hasCycleFromModifications
317  const NodeProperty< Size >& anc_tail = __ancestors[tail];
318  const NodeProperty< Size >& desc_head = __descendants[head];
319 
320  // update the set of ancestors
321  NodeProperty< Size > set_to_add = anc_tail;
322  set_to_add.insert(tail, 1);
323  __addWeightedSet(__ancestors[head], set_to_add, 1);
324 
325  for (auto iter = desc_head.cbegin(); iter != desc_head.cend(); ++iter) {
327  __ancestors[iter.key()], set_to_add, __ancestors[iter.key()][head]);
328  }
329 
330  // update the set of descendants
331  set_to_add = desc_head;
332  set_to_add.insert(head, 1);
333  __addWeightedSet(__descendants[tail], set_to_add, 1);
334 
335  for (auto iter = anc_tail.cbegin(); iter != anc_tail.cend(); ++iter) {
337  __descendants[iter.key()], set_to_add, __descendants[iter.key()][tail]);
338  }
339  }
340 
343  // check that the arc exists
344  if (!__dag.existsArc(tail, head)) return;
345 
346  __dag.eraseArc(Arc(tail, head));
347 
348  // we apply the deletion of the arc as done in method
349  // hasCycleFromModifications
350  const NodeProperty< Size >& anc_tail = __ancestors[tail];
351  const NodeProperty< Size >& desc_head = __descendants[head];
352 
353  // update the set of descendants
354  NodeProperty< Size > set_to_del = desc_head;
355  set_to_del.insert(head, 1);
356  __delWeightedSet(__descendants[tail], set_to_del, 1);
357 
358  for (auto iter = anc_tail.cbegin(); iter != anc_tail.cend(); ++iter) {
360  __descendants[iter.key()], set_to_del, __descendants[iter.key()][tail]);
361  }
362 
363  // update the set of ancestors
364  set_to_del = anc_tail;
365  set_to_del.insert(tail, 1);
366  __delWeightedSet(__ancestors[head], set_to_del, 1);
367 
368  for (auto iter = desc_head.cbegin(); iter != desc_head.cend(); ++iter) {
370  __ancestors[iter.key()], set_to_del, __ancestors[iter.key()][head]);
371  }
372  }
373 
374 } /* namespace gum */
bool empty() const noexcept
Returns a boolean indicating whether the chained list is empty.
Definition: list_tpl.h:1967
DiGraph __dag
the initial dag from which modifications are applied
virtual void addArc(const NodeId tail, const NodeId head)
insert a new arc into the directed graph
Definition: diGraph_inl.h:32
Header file of gum::Sequence, a class for storing (ordered) sequences of objects. ...
const iterator_safe & endSafe() noexcept
Returns the safe iterator pointing to the end of the hashtable.
Size size() const noexcept
Returns the number of elements stored into the hashtable.
void erase(const Key &key)
Removes a given element from the hash table.
void eraseArc(NodeId x, NodeId y)
removes an arc from the current DAG
void __addWeightedSet(NodeProperty< Size > &nodeset, const NodeProperty< Size > &set_to_add, Size multiplier) const
adds a weighted nodeset to another (weights are added)
virtual void eraseArc(const Arc &arc)
removes an arc from the ArcGraphPart
bool exists(const Key &key) const
Checks whether there exists an element with a given key in the hashtable.
void addArc(NodeId x, NodeId y)
adds a new arc to the current DAG
void __delWeightedSet(NodeProperty< Size > &nodeset, const NodeProperty< Size > &set_to_del, Size multiplier) const
removes a weighted nodeset from another (weights are subtracted)
void __restrictWeightedSet(NodeProperty< Size > &result_set, const NodeProperty< Size > &set_to_restrict, const NodeSet &extrmities) const
put into a weighted nodeset the nodes of another weighted set that belong to a set of arc extremities...
Generic doubly linked lists.
Definition: list.h:369
gum is the global namespace for all aGrUM entities
Definition: agrum.h:25
void popFront()
Removes the first element of a List, if any.
Definition: list_tpl.h:1961
NodeId head() const
returns the head of the arc
The class for generic Hash Tables.
Definition: hashTable.h:676
A class for detecting directed cycles in DAGs when trying to apply many changes to the graph...
bool hasCycleFromModifications(const std::vector< Change > &modifs) const
indicates whether a set of modifications would create a cycle
void setDAG(const DAG &dag)
sets the initial DAG from which changes shall be applied
The base class for all directed edgesThis class is used as a basis for manipulating all directed edge...
const const_iterator & cend() const noexcept
Returns the unsafe const_iterator pointing to the end of the hashtable.
A class for detecting directed cycles in DAGs when trying to apply many changes to the graph...
const_iterator cbegin() const
Returns an unsafe const_iterator pointing to the beginning of the hashtable.
Val & insert(const Val &val)
Inserts a new element at the end of the chained list (alias of pushBack).
Definition: list_tpl.h:1616
Val & front() const
Returns a reference to first element of a list, if any.
Definition: list_tpl.h:1828
bool hasCycleFromAddition(NodeId x, NodeId y) const noexcept
indicates whether an arc addition would create a cycle
iterator_safe beginSafe()
Returns the safe iterator pointing to the beginning of the hashtable.
NodeProperty< NodeProperty< Size > > __descendants
the set of descendants of each node in the dag
NodeProperty< NodeProperty< Size > > __ancestors
the set of ancestors of each node in the dag
std::size_t Size
In aGrUM, hashed values are unsigned long int.
Definition: types.h:45
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
Base class for dag.
Definition: DAG.h:99
bool existsArc(const Arc &arc) const
indicates whether a given arc exists
Size NodeId
Type for node ids.
Definition: graphElements.h:97
void insert(const Key &k)
Inserts a new element into the set.
Definition: set_tpl.h:610
NodeId tail() const
returns the tail of the arc
#define GUM_ERROR(type, msg)
Definition: exceptions.h:52