aGrUM  0.20.2
a C++ library for (probabilistic) graphical models
clusteredLayerGenerator_tpl.h
Go to the documentation of this file.
1 /**
2  *
3  * Copyright 2005-2020 Pierre-Henri WUILLEMIN(@LIP6) & Christophe GONZALES(@AMU)
4  * info_at_agrum_dot_org
5  *
6  * This library is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU Lesser General Public License as published by
8  * the Free Software Foundation, either version 3 of the License, or
9  * (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14  * GNU Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public License
17  * along with this library. If not, see <http://www.gnu.org/licenses/>.
18  *
19  */
20 
21 
22 /**
23  * @file
24  * @brief Inline implementation of ClusteredLayerGenerator.
25  *
26  * @author Lionel TORTI and Pierre-Henri WUILLEMIN(@LIP6)
27  */
28 
29 namespace gum {
30  namespace prm {
31 
32  template < typename GUM_SCALAR >
33  PRM< GUM_SCALAR >* ClusteredLayerGenerator< GUM_SCALAR >::generate() {
34  if (layers__.size() == 0) {
36  "cannot generate a layered PRM<GUM_SCALAR> without layers");
37  }
38 
39  std::vector< MyData > l;
45  return factory.prm();
46  }
47 
48  template < typename GUM_SCALAR >
53 
54  for (Size i = 0; i < domain_size__; ++i) {
56  sBuff << i;
58  }
59 
61  return name;
62  }
63 
64  template < typename GUM_SCALAR >
67  const std::string& type,
69  for (Size lvl = 0; lvl < layers__.size(); ++lvl) {
73 
74  for (Size a = 0; a < layers__[lvl].a; ++a) {
75  l[lvl].a.push_back(
77  f.addAttribute(type, l[lvl].a.back());
78  }
79 
80  if (lvl) {
81  for (Size g = 0; g < layers__[lvl].g; ++g) {
82  l[lvl].g.push_back(
84  f.addAttribute("boolean", l[lvl].g.back());
85  }
86 
88  f.addReferenceSlot(l[lvl - 1].i, l[lvl].r, true);
89  }
90 
91  f.endInterface();
92  }
93  }
94 
95  template < typename GUM_SCALAR >
98  const std::string& type,
100  // double ratio = getClusterRatio() + RAND_MAX;
101  Set< std::string > i;
102 
103  for (Size lvl = 0; lvl < layers__.size(); ++lvl) {
104  i.insert(l[lvl].i);
105 
106  for (Size c = 0; c < layers__[lvl].c; ++c) {
107  // if (std::rand() < ratio)
108  generateCluster__(f, type, l, lvl, i);
109  // else
110  // generateClass__(f, type, l, lvl, i);
111  }
112 
113  i.erase(l[lvl].i);
114  }
115  }
116 
117  template < typename GUM_SCALAR >
119  PRMFactory< GUM_SCALAR >& f,
120  const std::string& type,
122  Size lvl,
123  Set< std::string >& i) {
124  Size size = 0;
125  GUM_SCALAR sum = 0.0;
127  std::vector< std::string >* v = 0;
128 
129  switch (std::rand() % 2) {
130  // Shape A->B
131  // v == [first, second, second.ref -> first]
132  case 0: {
133  v = new std::vector< std::string >();
134  generateClass__(f, type, l, lvl, i);
135  first = l[lvl].c.back();
136  v->push_back(first);
138  f.startClass(v->back());
140  f.addReferenceSlot(first, v->back(), true);
141  DAG dag;
144 
145  // Adding aggregates
146  for (std::vector< std::string >::iterator g = l[lvl].g.begin();
147  g != l[lvl].g.end();
148  ++g) {
149  std::stringstream s;
150  s << v->back() << "." << l[lvl].a[std::rand() % l[lvl].a.size()];
151  std::vector< std::string > chain(1, s.str()), param(1, "1");
152  f.addAggregator(*g, "exists", chain, param);
153  }
154 
155  // Adding attributes
156  for (std::vector< std::string >::iterator a = l[lvl].a.begin();
157  a != l[lvl].a.end();
158  ++a) {
159  f.startAttribute(type, *a, true);
160  size = getDomainSize();
161 
162  for (const auto par: dag.parents(names.second(*a))) {
164  size *= f.retrieveClass(l[lvl].c.back())
165  .get(names.first(par))
166  .type()
167  ->domainSize();
168  }
169 
171 
172  for (size_t norms = 0; norms < size; norms += getDomainSize()) {
173  sum = 0.0;
174 
175  for (size_t idx = 0; idx < getDomainSize(); ++idx) {
176  val[idx] = 1 + std::rand();
177  sum += val[idx];
178  }
179 
180  for (size_t idx = 0; idx < getDomainSize(); ++idx)
181  cpf[norms + idx] = val[idx] / sum;
182  }
183 
185  f.endAttribute();
186  }
187 
188  f.endClass();
189  break;
190  }
191 
192  // Shape A -> B -> C
193  // v == [first, second, second.ref -> first, third, third.ref -> second]
194  case 1: {
195  v = new std::vector< std::string >();
196  generateClass__(f, type, l, lvl, i);
197  {
198  first = l[lvl].c.back();
199  v->push_back(first);
201  second = v->back();
204  f.addReferenceSlot(first, v->back(), true);
205  DAG dag;
208 
209  // Adding aggregates
210  for (std::vector< std::string >::iterator g = l[lvl].g.begin();
211  g != l[lvl].g.end();
212  ++g) {
213  std::stringstream s;
214  s << v->back() << "." << l[lvl].a[std::rand() % l[lvl].a.size()];
215  std::vector< std::string > chain(1, s.str()), param(1, "1");
216  f.addAggregator(*g, "exists", chain, param);
217  }
218 
219  // Adding attributes
220  for (std::vector< std::string >::iterator a = l[lvl].a.begin();
221  a != l[lvl].a.end();
222  ++a) {
223  f.startAttribute(type, *a, true);
224  size = getDomainSize();
225 
226  for (const auto par: dag.parents(names.second(*a))) {
228  size *= f.retrieveClass(l[lvl].c.back())
229  .get(names.first(par))
230  .type()
231  ->domainSize();
232  }
233 
235 
236  for (size_t norms = 0; norms < size; norms += getDomainSize()) {
237  sum = 0.0;
238 
239  for (size_t idx = 0; idx < getDomainSize(); ++idx) {
240  val[idx] = 1 + std::rand();
241  sum += val[idx];
242  }
243 
244  for (size_t idx = 0; idx < getDomainSize(); ++idx)
245  cpf[norms + idx] = val[idx] / sum;
246  }
247 
249  f.endAttribute();
250  }
251 
252  f.endClass();
253  }
254  {
256  third = v->back();
257  f.startClass(third);
259  f.addReferenceSlot(second, v->back(), true);
260  DAG dag;
263 
264  // Adding aggregates
265  for (std::vector< std::string >::iterator g = l[lvl].g.begin();
266  g != l[lvl].g.end();
267  ++g) {
268  std::stringstream s;
269  s << v->back() << "." << l[lvl].a[std::rand() % l[lvl].a.size()];
270  std::vector< std::string > chain(1, s.str()), param(1, "1");
271  f.addAggregator(*g, "exists", chain, param);
272  }
273 
274  // Adding attributes
275  for (std::vector< std::string >::iterator a = l[lvl].a.begin();
276  a != l[lvl].a.end();
277  ++a) {
278  f.startAttribute(type, *a, true);
279  size = getDomainSize();
280 
281  for (const auto par: dag.parents(names.second(*a))) {
283  size *= f.retrieveClass(l[lvl].c.back())
284  .get(names.first(par))
285  .type()
286  ->domainSize();
287  }
288 
290 
291  for (size_t norms = 0; norms < size; norms += getDomainSize()) {
292  sum = 0.0;
293 
294  for (size_t idx = 0; idx < getDomainSize(); ++idx) {
295  val[idx] = 1 + std::rand();
296  sum += val[idx];
297  }
298 
299  for (size_t idx = 0; idx < getDomainSize(); ++idx)
300  cpf[norms + idx] = val[idx] / sum;
301  }
302 
304  f.endAttribute();
305  }
306 
307  f.endClass();
308  }
309  break;
310  }
311 
312  default: {
313  GUM_ERROR(OperationNotAllowed, "unexpected value");
314  }
315  }
316 
318  }
319 
320  template < typename GUM_SCALAR >
322  PRMFactory< GUM_SCALAR >& f,
323  const std::string& type,
325  Size lvl,
326  Set< std::string >& i) {
327  Size size = 0;
328  GUM_SCALAR sum = 0.0;
330  f.startClass(l[lvl].c.back(), "", &i);
331 
332  if (lvl) f.addReferenceSlot(l[lvl - 1].i, l[lvl].r, true);
333 
334  DAG dag;
337 
338  // Adding aggregates
339  if (lvl) {
340  for (const auto agg: l[lvl].g) {
341  std::stringstream s;
342  s << l[lvl].r << "." << l[lvl - 1].a[std::rand() % l[lvl - 1].a.size()];
343  std::vector< std::string > chain(1, s.str()), param(1, "1");
344  f.addAggregator(agg, "exists", chain, param);
345  }
346  }
347 
348  // Adding attributes
349  for (const auto attr: l[lvl].a) {
350  f.startAttribute(type, attr, true);
351  size = getDomainSize();
352 
353  for (const auto par: dag.parents(names.second(attr))) {
355  size *= f.retrieveClass(l[lvl].c.back())
356  .get(names.first(par))
357  .type()
358  ->domainSize();
359  }
360 
362 
363  for (size_t norms = 0; norms < size; norms += getDomainSize()) {
364  sum = 0.0;
365 
366  for (size_t idx = 0; idx < getDomainSize(); ++idx) {
367  val[idx] = 1 + std::rand();
368  sum += val[idx];
369  }
370 
371  for (size_t idx = 0; idx < getDomainSize(); ++idx)
372  cpf[norms + idx] = val[idx] / sum;
373  }
374 
376  f.endAttribute();
377  }
378 
379  f.endClass();
380  }
381 
382  template < typename GUM_SCALAR >
384  Size lvl,
385  DAG& dag,
387  std::vector< typename ClusteredLayerGenerator< GUM_SCALAR >::MyData >& l) {
389  std::vector< NodeId > nodes;
390  NodeId id = 0;
391 
392  if (lvl) {
393  for (std::vector< std::string >::iterator g = l[lvl].g.begin();
394  g != l[lvl].g.end();
395  ++g) {
396  id = dag.addNode();
397  names.insert(*g, id);
398  nodes.push_back(id);
399  }
400  }
401 
402  for (std::vector< std::string >::iterator a = l[lvl].a.begin();
403  a != l[lvl].a.end();
404  ++a) {
405  id = dag.addNode();
406  names.insert(*a, id);
407 
408  for (std::vector< NodeId >::iterator prnt = nodes.begin();
409  prnt != nodes.end();
410  ++prnt)
411  if (std::rand() < density) dag.addArc(*prnt, names.second(*a));
412 
413  nodes.push_back(id);
414  }
415 
416  // For each nodes with #parents > max_parents__ we randomly remove parents
417  // until
418  // #parents <= max_parents__
419  for (const auto node: dag.nodes()) {
420  if (dag.parents(node).size() > getMaxParents()) {
421  std::vector< NodeId > v;
422 
423  for (const auto par: dag.parents(node))
424  v.push_back(par);
425 
426  while (dag.parents(node).size() > getMaxParents()) {
427  size_t idx = std::rand() % v.size();
428  Arc arc(v[idx], node);
430  dag.eraseArc(arc);
431  v[idx] = v.back();
432  v.pop_back();
433  }
434  }
435  }
436  }
437 
438  template < typename GUM_SCALAR >
441  std::vector< typename ClusteredLayerGenerator< GUM_SCALAR >::MyData >& l) {
443  std::vector< std::vector< std::string > > o(layers__.size());
445  std::vector< std::string >* v = 0;
446  size_t idx = 0;
447 
448  for (size_t lvl = 0; lvl < layers__.size(); ++lvl) {
450 
451  for (size_t count = 0; count < layers__[lvl].o; ++count) {
452  c = l[lvl].c[std::rand() % l[lvl].c.size()];
453 
454  if (cluster_map__.exists(c)) {
455  v = cluster_map__[c];
456 
457  switch (v->size()) {
458  case 3: {
462  factory.addInstance(v->at(1), second);
464  chain << second << "." << v->at(2);
466  break;
467  }
468 
469  case 5: {
473  factory.addInstance(v->at(1), second);
475  chain_1 << second << "." << v->at(2);
478  factory.addInstance(v->at(3), third);
479  chain_2 << third << "." << v->at(4);
481  break;
482  }
483 
484  default: {
485  GUM_ERROR(OperationNotAllowed, "unexpected vector size");
486  }
487  }
488 
489  // cluster_map__.erase(c);
490  // delete v;
491  name = first;
492  } else {
495  }
496 
497  o[lvl].push_back(name);
498 
499  if (lvl) {
501  chain << name << "." << l[lvl].r;
502  std::vector< std::string > ref2add;
503 
504  for (std::vector< std::string >::iterator iter = o[lvl - 1].begin();
505  iter != o[lvl - 1].end();
506  ++iter)
507  if (std::rand() <= density) ref2add.push_back(*iter);
508 
509  if (ref2add.empty())
511  chain.str(),
512  o[lvl - 1][std::rand() % o[lvl - 1].size()]);
513 
514  while (ref2add.size() > getMaxParents()) {
515  idx = std::rand() % ref2add.size();
516  ref2add[idx] = ref2add.back();
517  ref2add.pop_back();
518  }
519 
520  for (std::vector< std::string >::iterator iter = ref2add.begin();
521  iter != ref2add.end();
522  ++iter)
524  }
525  }
526  }
527 
528  factory.endSystem();
529  }
530 
531  template < typename GUM_SCALAR >
534  cluster_ratio__(0.0) {
536  }
537 
538  template < typename GUM_SCALAR >
544  }
545 
546  template < typename GUM_SCALAR >
549  // typedef HashTable<std::string, std::vector<std::string>*>::iterator
550  // Iter;
551  // for (Iter iter = cluster_map__.begin(); iter != cluster_map__.end();
552  // ++iter)
553  // {
554  // delete *iter;
555  // }
556  }
557 
558  template < typename GUM_SCALAR >
565  return *this;
566  }
567 
568  template < typename GUM_SCALAR >
570  return domain_size__;
571  }
572 
573  template < typename GUM_SCALAR >
575  domain_size__ = s;
576  }
577 
578  template < typename GUM_SCALAR >
580  return max_parents__;
581  }
582 
583  template < typename GUM_SCALAR >
585  max_parents__ = s;
586  }
587 
588  template < typename GUM_SCALAR >
590  const std::vector< typename LayerGenerator< GUM_SCALAR >::LayerData >& v) {
591  layers__ = v;
592  }
593 
594  template < typename GUM_SCALAR >
597  return layers__;
598  }
599 
600  template < typename GUM_SCALAR >
601  INLINE const std::vector< typename LayerGenerator< GUM_SCALAR >::LayerData >&
603  return layers__;
604  }
605 
606  template < typename GUM_SCALAR >
608  return cluster_ratio__;
609  }
610 
611  template < typename GUM_SCALAR >
612  INLINE void
615  }
616 
617  } /* namespace prm */
618 } /* namespace gum */
INLINE void emplace(Args &&... args)
Definition: set_tpl.h:669
ParamScopeData(const std::string &s, const PRMReferenceSlot< GUM_SCALAR > &ref, Idx d)