aGrUM  0.20.3
a C++ library for (probabilistic) graphical models
kNML_tpl.h
Go to the documentation of this file.
1 /**
2  *
3  * Copyright (c) 2005-2021 by Pierre-Henri WUILLEMIN(@LIP6) & Christophe GONZALES(@AMU)
4  * info_at_agrum_dot_org
5  *
6  * This library is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU Lesser General Public License as published by
8  * the Free Software Foundation, either version 3 of the License, or
9  * (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14  * GNU Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public License
17  * along with this library. If not, see <http://www.gnu.org/licenses/>.
18  *
19  */
20 
21 
22 /**
23  * @file
24  * @brief The class for the NML penalty used in 3off2
25  *
26  * @author Christophe GONZALES(@AMU) and Pierre-Henri WUILLEMIN(@LIP6)
27  */
28 
29 #ifndef DOXYGEN_SHOULD_SKIP_THIS
30 
31 namespace gum {
32 
33  namespace learning {
34 
35  /// default constructor
36  template < template < typename > class ALLOC >
37  INLINE KNML< ALLOC >::KNML(
39  const Apriori< ALLOC >& apriori,
40  const std::vector< std::pair< std::size_t, std::size_t >,
41  ALLOC< std::pair< std::size_t, std::size_t > > >& ranges,
43  const typename KNML< ALLOC >::allocator_type& alloc) :
47  }
48 
49 
50  /// default constructor
51  template < template < typename > class ALLOC >
52  INLINE KNML< ALLOC >::KNML(
54  const Apriori< ALLOC >& apriori,
56  const typename KNML< ALLOC >::allocator_type& alloc) :
60  }
61 
62 
63  /// copy constructor with a given allocator
64  template < template < typename > class ALLOC >
65  INLINE KNML< ALLOC >::KNML(const KNML< ALLOC >& from,
66  const typename KNML< ALLOC >::allocator_type& alloc) :
70  }
71 
72 
73  /// copy constructor
74  template < template < typename > class ALLOC >
75  INLINE KNML< ALLOC >::KNML(const KNML< ALLOC >& from) :
76  KNML< ALLOC >(from, from.getAllocator()) {}
77 
78 
79  /// move constructor with a given allocator
80  template < template < typename > class ALLOC >
81  INLINE KNML< ALLOC >::KNML(KNML< ALLOC >&& from,
82  const typename KNML< ALLOC >::allocator_type& alloc) :
86  }
87 
88 
89  /// move constructor
90  template < template < typename > class ALLOC >
91  INLINE KNML< ALLOC >::KNML(KNML< ALLOC >&& from) :
92  KNML< ALLOC >(std::move(from), from.getAllocator()) {}
93 
94 
95  /// virtual copy constructor with a given allocator
96  template < template < typename > class ALLOC >
97  KNML< ALLOC >* KNML< ALLOC >::clone(const typename KNML< ALLOC >::allocator_type& alloc) const {
100  try {
102  } catch (...) {
104  throw;
105  }
106 
107  return new_score;
108  }
109 
110 
111  /// virtual copy constructor
112  template < template < typename > class ALLOC >
113  KNML< ALLOC >* KNML< ALLOC >::clone() const {
114  return clone(this->getAllocator());
115  }
116 
117 
118  /// destructor
119  template < template < typename > class ALLOC >
120  KNML< ALLOC >::~KNML() {
122  }
123 
124 
125  /// copy operator
126  template < template < typename > class ALLOC >
127  KNML< ALLOC >& KNML< ALLOC >::operator=(const KNML< ALLOC >& from) {
128  if (this != &from) {
131  }
132  return *this;
133  }
134 
135 
136  /// move operator
137  template < template < typename > class ALLOC >
138  KNML< ALLOC >& KNML< ALLOC >::operator=(KNML< ALLOC >&& from) {
139  if (this != &from) {
142  }
143  return *this;
144  }
145 
146 
147  /// clears all the data structures from memory, including the cache
148  template < template < typename > class ALLOC >
149  void KNML< ALLOC >::clear() {
152  }
153 
154 
155  /// clears the current cache
156  template < template < typename > class ALLOC >
157  void KNML< ALLOC >::clearCache() {
160  }
161 
162 
163  /// turn on/off the use of a cache of the previously computed score
164  template < template < typename > class ALLOC >
165  void KNML< ALLOC >::useCache(const bool on_off) {
168  }
169 
170 
171  /// returns the score corresponding to a given nodeset
172  template < template < typename > class ALLOC >
173  double KNML< ALLOC >::score_(const IdCondSet< ALLOC >& idset) {
174  // perform the countings on the database for all the nodes in the idset
175  // This will help optimizing the computations of the Nxui, Nyui and Nui
176  // that we will be needed subsequently
177  this->counter_.counts(idset, true);
178 
180 
181  // get the domain sizes of X and Y
182  const auto& db = this->database();
183  const auto& node2cols = this->nodeId2Columns();
184  std::size_t r_x, r_y;
185  if (!node2cols.empty()) {
188  } else {
189  r_x = db.domainSize(idset[0]);
190  r_y = db.domainSize(idset[1]);
191  }
192 
193 
194  // here, we distinguish idsets with conditioning nodes from those
195  // without conditioning nodes
196  if (idset.hasConditioningSet()) {
197  // now get the Nxui, Nyui and Nui
199  idset_xui.erase(idset[1]);
201  idset_yui.erase(idset[0]);
202 
203  std::vector< double, ALLOC< double > > N_ui
204  = this->counter_.counts(idset.conditionalIdCondSet(), false);
205  std::vector< double, ALLOC< double > > N_xui = this->counter_.counts(idset_xui, false);
206  std::vector< double, ALLOC< double > > N_yui = this->counter_.counts(idset_yui, false);
207 
212  }
213 
214 
215  // the value of kNML is equal to:
216  // 0.5 * sum_Z ( sum_X( log( C^(r_y)_#ZX ) ) - log( C^(r_y)_#Z ) +
217  // sum_Y( log( C^(r_x)_#ZY ) ) - log( C^(r_x)_#Z ) )
218  double score = 0.0;
219  for (auto n_xui: N_xui)
221  for (auto n_yui: N_yui)
223  for (auto n_ui: N_ui) {
226  }
227 
228  score *= 0.5;
229 
230  return score;
231  } else {
232  // here, there is no conditioning set
233  // now get the Nxui, Nyui and Nui
234  IdCondSet< ALLOC > idset_xui(idset[0], this->empty_ids_, true);
235  IdCondSet< ALLOC > idset_yui(idset[1], this->empty_ids_, true);
236 
237  std::vector< double, ALLOC< double > > N_xui = this->counter_.counts(idset_xui, false);
238  std::vector< double, ALLOC< double > > N_yui = this->counter_.counts(idset_yui, false);
239 
243  }
244 
245 
246  // so, the formula for kNML is:
247  // 0.5 * ( sum_X( log( C^(r_y)_#X ) ) - log( C^(r_y)N_ ) +
248  // sum_Y( log( C^(r_x)_#Y ) ) - log( C^(r_x)N_ ) )
249  double N = 0.0;
250  double score = 0.0;
251  for (auto n_xui: N_xui) {
253  N += n_xui;
254  }
255  for (auto n_yui: N_yui)
259 
260  score *= 0.5;
261 
262  return score;
263  }
264  }
265 
266  } /* namespace learning */
267 
268 } /* namespace gum */
269 
270 #endif /* DOXYGEN_SHOULD_SKIP_THIS */
INLINE void emplace(Args &&... args)
Definition: set_tpl.h:643
Database(const std::string &filename, const BayesNet< GUM_SCALAR > &bn, const std::vector< std::string > &missing_symbols)