aGrUM  0.20.2
a C++ library for (probabilistic) graphical models
genericBNLearner_inl.h
Go to the documentation of this file.
1 /**
2  *
3  * Copyright 2005-2020 Pierre-Henri WUILLEMIN(@LIP6) & Christophe GONZALES(@AMU)
4  * info_at_agrum_dot_org
5  *
6  * This library is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU Lesser General Public License as published by
8  * the Free Software Foundation, either version 3 of the License, or
9  * (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14  * GNU Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public License
17  * along with this library. If not, see <http://www.gnu.org/licenses/>.
18  *
19  */
20 
21 
22 /** @file
23  * @brief A pack of learning algorithms that can easily be used
24  *
25  * The pack currently contains K2, GreedyHillClimbing, 3off2 and
26  *LocalSearchWithTabuList
27  *
28  * @author Christophe GONZALES(@AMU) and Pierre-Henri WUILLEMIN(@LIP6)
29  */
30 
31 // to help IDE parser
32 #include <agrum/BN/learning/BNLearnUtils/genericBNLearner.h>
33 #include <agrum/tools/graphs/undiGraph.h>
34 
35 namespace gum {
36 
37  namespace learning {
38 
39  // returns the row filter
40  INLINE DBRowGeneratorParser<>& genericBNLearner::Database::parser() {
41  return *parser__;
42  }
43 
44  // returns the modalities of the variables
45  INLINE const std::vector< std::size_t >&
47  return domain_sizes__;
48  }
49 
50  // returns the names of the variables in the database
51  INLINE const std::vector< std::string >&
53  return database__.variableNames();
54  }
55 
56  /// assign new weight to the rows of the learning database
57  INLINE void
59  if (database__.nbRows() == std::size_t(0)) return;
60  const double weight = new_weight / double(database__.nbRows());
62  }
63 
64  // returns the node id corresponding to a variable name
67  try {
69  return nodeId2cols__.first(cols[0]);
70  } catch (...) {
72  "Variable " << var_name
73  << " could not be found in the database");
74  }
75  }
76 
77 
78  // returns the variable name corresponding to a given node id
79  INLINE const std::string&
81  try {
83  } catch (...) {
85  "Variable of Id " << id
86  << " could not be found in the database");
87  }
88  }
89 
90 
91  /// returns the internal database table
92  INLINE const DatabaseTable<>&
94  return database__;
95  }
96 
97 
98  /// returns the set of missing symbols taken into account
99  INLINE const std::vector< std::string >&
101  return database__.missingSymbols();
102  }
103 
104 
105  /// returns the mapping between node ids and their columns in the database
106  INLINE const Bijection< NodeId, std::size_t >&
108  return nodeId2cols__;
109  }
110 
111 
112  /// returns the number of records in the database
114  return database__.nbRows();
115  }
116 
117 
118  /// returns the number of records in the database
120  return database__.size();
121  }
122 
123 
124  /// sets the weight of the ith record
126  const double weight) {
128  }
129 
130 
131  /// returns the weight of the ith record
132  INLINE double genericBNLearner::Database::weight(const std::size_t i) const {
133  return database__.weight(i);
134  }
135 
136 
137  /// returns the weight of the whole database
139  return database__.weight();
140  }
141 
142 
143  // ===========================================================================
144 
145  // returns the node id corresponding to a variable name
148  }
149 
150  // returns the variable name corresponding to a given node id
153  }
154 
155  /// assign new weight to the rows of the learning database
158  }
159 
160  /// assign new weight to the ith row of the learning database
162  const double new_weight) {
164  }
165 
166  /// returns the weight of the ith record
167  INLINE double genericBNLearner::recordWeight(const std::size_t i) const {
168  return score_database__.weight(i);
169  }
170 
171  /// returns the weight of the whole database
173  return score_database__.weight();
174  }
175 
176  // sets an initial DAG structure
178  initial_dag__ = dag;
179  }
180 
181  // indicate that we wish to use an AIC score
185  }
186 
187  // indicate that we wish to use a BD score
191  }
192 
193  // indicate that we wish to use a BDeu score
197  }
198 
199  // indicate that we wish to use a BIC score
203  }
204 
205  // indicate that we wish to use a K2 score
209  }
210 
211  // indicate that we wish to use a Log2Likelihood score
215  }
216 
217  // sets the max indegree
220  }
221 
222  // indicate that we wish to use 3off2
226  }
227 
228  // indicate that we wish to use 3off2
232  }
233 
234  /// indicate that we wish to use the NML correction for 3off2
238  "You must use the 3off2 algorithm before selecting "
239  << "the NML score");
240  }
242  }
243 
244  /// indicate that we wish to use the MDL correction for 3off2
248  "You must use the 3off2 algorithm before selecting "
249  << "the MDL score");
250  }
252  }
253 
254  /// indicate that we wish to use the NoCorr correction for 3off2
258  "You must use the 3off2 algorithm before selecting "
259  << "the NoCorr score");
260  }
262  }
263 
264  /// get the list of arcs hiding latent variables
268  "You must use the 3off2 algorithm before selecting "
269  << "the latentVariables method");
270  }
271  return miic_3off2__.latentVariables();
272  }
273 
274  // indicate that we wish to use a K2 algorithm
278  }
279 
280  // indicate that we wish to use a K2 algorithm
284  }
285 
286  // indicate that we wish to use a greedy hill climbing algorithm
289  }
290 
291  // indicate that we wish to use a local search with tabu list
293  Size nb_decrease) {
297  }
298 
299  /// use The EM algorithm to learn paramters
300  INLINE void genericBNLearner::useEM(const double epsilon) {
302  }
303 
304 
307  }
308 
309  // assign a set of forbidden edges
312  }
313  // assign a set of forbidden edges from an UndiGraph
316  }
317 
318  // assign a new possible edge
321  }
322 
323  // remove a forbidden edge
326  }
327 
328  // assign a new forbidden edge
330  const NodeId head) {
332  }
333 
334  // remove a forbidden edge
336  const NodeId head) {
338  }
339 
340  // assign a new forbidden edge
342  const std::string& head) {
344  }
345 
346  // remove a forbidden edge
348  const std::string& head) {
350  }
351 
352  // assign a set of forbidden arcs
355  }
356 
357  // assign a new forbidden arc
360  }
361 
362  // remove a forbidden arc
365  }
366 
367  // assign a new forbidden arc
369  const NodeId head) {
371  }
372 
373  // remove a forbidden arc
375  const NodeId head) {
377  }
378 
379  // assign a new forbidden arc
381  const std::string& head) {
383  }
384 
385  // remove a forbidden arc
387  const std::string& head) {
389  }
390 
391  // assign a set of forbidden arcs
394  }
395 
396  // assign a new forbidden arc
399  }
400 
401  // remove a forbidden arc
404  }
405 
406  // assign a new forbidden arc
408  const std::string& head) {
410  }
411 
412  // remove a forbidden arc
414  const std::string& head) {
416  }
417 
418  // assign a new forbidden arc
420  const NodeId head) {
422  }
423 
424  // remove a forbidden arc
426  const NodeId head) {
428  }
429 
430  // sets a partial order on the nodes
431  INLINE void
434  }
435 
437  const std::vector< std::vector< std::string > >& slices) {
439  NodeId rank = 0;
440  for (const auto& slice: slices) {
441  for (const auto& name: slice) {
443  }
444  rank++;
445  }
447  }
448 
449  // sets the apriori weight
451  if (weight < 0) {
452  GUM_ERROR(OutOfBounds, "the weight of the apriori must be positive");
453  }
454 
457  }
458 
459  // use the apriori smoothing
463  }
464 
465  // use the apriori smoothing
467  if (weight < 0) {
468  GUM_ERROR(OutOfBounds, "the weight of the apriori must be positive");
469  }
470 
473 
475  }
476 
477  // use the Dirichlet apriori
479  double weight) {
480  if (weight < 0) {
481  GUM_ERROR(OutOfBounds, "the weight of the apriori must be positive");
482  }
483 
487 
489  }
490 
491 
492  // use the apriori BDeu
494  if (weight < 0) {
495  GUM_ERROR(OutOfBounds, "the weight of the apriori must be positive");
496  }
497 
500 
502  }
503 
504 
505  // returns the type (as a string) of a given apriori
507  switch (apriori_type__) {
508  case AprioriType::NO_APRIORI:
509  return AprioriNoApriori<>::type::type;
510 
511  case AprioriType::SMOOTHING:
512  return AprioriSmoothing<>::type::type;
513 
516 
517  case AprioriType::BDEU:
518  return AprioriBDeu<>::type::type;
519 
520  default:
522  "genericBNLearner getAprioriType does "
523  "not support yet this apriori");
524  }
525  }
526 
527  // returns the names of the variables in the database
529  return score_database__.names();
530  }
531 
532  // returns the modalities of the variables in the database
533  INLINE const std::vector< std::size_t >&
535  return score_database__.domainSizes();
536  }
537 
538  // returns the modalities of a variable in the database
540  return score_database__.domainSizes()[var];
541  }
542  // returns the modalities of a variables in the database
545  }
546 
547  /// returns the current database rows' ranges used for learning
548  INLINE const std::vector< std::pair< std::size_t, std::size_t > >&
550  return ranges__;
551  }
552 
553  /// reset the ranges to the one range corresponding to the whole database
555 
556  /// returns the database used by the BNLearner
559  }
560 
562  return score_database__.domainSizes().size();
563  }
564 
567  }
568  } /* namespace learning */
569 } /* namespace gum */
INLINE void emplace(Args &&... args)
Definition: set_tpl.h:669
Database(const std::string &filename, const BayesNet< GUM_SCALAR > &bn, const std::vector< std::string > &missing_symbols)