aGrUM  0.20.3
a C++ library for (probabilistic) graphical models
DAG2BNLearner_tpl.h
Go to the documentation of this file.
1 /**
2  *
3  * Copyright (c) 2005-2021 by Pierre-Henri WUILLEMIN(@LIP6) & Christophe GONZALES(@AMU)
4  * info_at_agrum_dot_org
5  *
6  * This library is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU Lesser General Public License as published by
8  * the Free Software Foundation, either version 3 of the License, or
9  * (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14  * GNU Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public License
17  * along with this library. If not, see <http://www.gnu.org/licenses/>.
18  *
19  */
20 
21 
22 /** @file
23  * @brief A class that, given a structure and a parameter estimator returns a
24  * full Bayes net
25  *
26  * @author Christophe GONZALES(@AMU) and Pierre-Henri WUILLEMIN(@LIP6)
27  */
28 
29 
30 #include <algorithm>
31 #include <string>
32 #include <vector>
33 
34 namespace gum {
35 
36  namespace learning {
37 
38  /// returns the allocator used by the score
39  template < template < typename > class ALLOC >
42  return *this;
43  }
44 
45 
46  /// default constructor
47  template < template < typename > class ALLOC >
49  const typename DAG2BNLearner< ALLOC >::allocator_type& alloc) :
50  ALLOC< NodeId >(alloc) {
52  }
53 
54 
55  /// copy constructor with a given allocator
56  template < template < typename > class ALLOC >
58  const DAG2BNLearner< ALLOC >& from,
59  const typename DAG2BNLearner< ALLOC >::allocator_type& alloc) :
61  ALLOC< NodeId >(alloc) {
63  }
64 
65 
66  /// copy constructor
67  template < template < typename > class ALLOC >
70 
71 
72  /// move constructor with a given allocator
73  template < template < typename > class ALLOC >
76  const typename DAG2BNLearner< ALLOC >::allocator_type& alloc) :
78  ALLOC< NodeId >(alloc) {
80  }
81 
82 
83  /// move constructor
84  template < template < typename > class ALLOC >
87 
88 
89  /// virtual copy constructor with a given allocator
90  template < template < typename > class ALLOC >
92  const typename DAG2BNLearner< ALLOC >::allocator_type& alloc) const {
95  try {
97  } catch (...) {
99  throw;
100  }
101 
102  return new_learner;
103  }
104 
105 
106  /// virtual copy constructor
107  template < template < typename > class ALLOC >
109  return clone(this->getAllocator());
110  }
111 
112 
113  /// destructor
114  template < template < typename > class ALLOC >
117  }
118 
119 
120  /// copy operator
121  template < template < typename > class ALLOC >
124  return *this;
125  }
126 
127 
128  /// move operator
129  template < template < typename > class ALLOC >
132  return *this;
133  }
134 
135 
136  /// copy a potential into another whose variables' sequence differs
137  template < template < typename > class ALLOC >
138  template < typename GUM_SCALAR >
139  void
141  const gum::Potential< GUM_SCALAR >& other_pot) {
142  // check that the variables are identical
144  GUM_ERROR(gum::CPTError, "the potentials do not have the same variables")
145  }
146 
147  // perform the copy
150  for (i.setFirst(); !i.end(); ++i) {
151  j.setVals(i);
152  pot.set(j, other_pot[i]);
153  }
154  }
155 
156  /// create a BN
157  template < template < typename > class ALLOC >
158  template < typename GUM_SCALAR >
160  const DAG& dag) {
162 
163  // create a bn with dummy parameters corresponding to the dag
164  const auto& node2cols = estimator.nodeId2Columns();
165  const auto& database = estimator.database();
166  if (node2cols.empty()) {
167  for (const auto id: dag) {
168  bn.add(dynamic_cast< const DiscreteVariable& >(database.variable(id)), id);
169  }
170  } else {
171  for (const auto id: dag) {
172  const std::size_t col = node2cols.second(id);
173  bn.add(dynamic_cast< const DiscreteVariable& >(database.variable(col)), id);
174  }
175  }
176 
177  // add the arcs
179  for (const auto& arc: dag.arcs()) {
180  bn.addArc(arc.tail(), arc.head());
181  }
183 
184  // estimate the parameters
186  for (const auto id: dag) {
187  // get the sequence of variables and make the targets be the last
188  auto& pot = const_cast< Potential< GUM_SCALAR >& >(bn.cpt(id));
189 
190  // get the variables of the CPT of id in the correct order
191  const Sequence< const DiscreteVariable* >& vars = pot.variablesSequence();
192 
193  // setup the estimation
195  for (auto i = std::size_t(1); i < vars.size(); ++i) {
196  conditioning_ids[i - 1] = varmap.get(*(vars[i]));
197  }
199  }
200 
201  return bn;
202  }
203 
204  /// create a BN
205  template < template < typename > class ALLOC >
206  template < typename GUM_SCALAR >
210  const DAG& dag) {
211  // bootstrap EM by learning an initial model
213  for (const auto& nod: bn.nodes()) {
214  bn.cpt(nod).noising(0.1);
215  }
217 
218  // perform EM
220 
222  do {
223  // bugfix for parallel execution of VariableElimination
224  const auto& xdag = bn.dag();
225  for (const auto node: xdag) {
226  xdag.parents(node);
227  xdag.children(node);
228  }
229 
232 
233  delta = GUM_SCALAR(0.0);
234  for (const auto node: dag) {
235  const auto& old_cpt = bn.cpt(node);
236  const auto& new_cpt = new_bn.cpt(node);
237 
240 
241  for (; !old_inst.end(); ++old_inst, ++new_inst) {
243  if (old_val > 0.0) {
245  const GUM_SCALAR diff = new_val - old_val;
246  const auto diffrel = (diff < 0.0) ? (-diff / old_val) : (diff / old_val);
247  if (delta < diffrel) delta = diffrel;
248  }
249  }
250  }
251 
252  bn = std::move(new_bn);
253  } while (continueApproximationScheme(double(delta)));
254 
255  stopApproximationScheme(); // just to be sure of the approximationScheme
256  // has been notified of the end of loop
257 
258  return bn;
259  } // namespace learning
260 
261 
262  /// returns the approximation policy of the learning algorithm
263  template < template < typename > class ALLOC >
265  return *this;
266  }
267 
268 
269  } // namespace learning
270 
271 } /* namespace gum */
INLINE void emplace(Args &&... args)
Definition: set_tpl.h:643
Database(const std::string &filename, const BayesNet< GUM_SCALAR > &bn, const std::vector< std::string > &missing_symbols)