aGrUM
0.20.3
a C++ library for (probabilistic) graphical models
DAG2BNLearner_tpl.h
Go to the documentation of this file.
1
/**
2
*
3
* Copyright (c) 2005-2021 by Pierre-Henri WUILLEMIN(@LIP6) & Christophe GONZALES(@AMU)
4
* info_at_agrum_dot_org
5
*
6
* This library is free software: you can redistribute it and/or modify
7
* it under the terms of the GNU Lesser General Public License as published by
8
* the Free Software Foundation, either version 3 of the License, or
9
* (at your option) any later version.
10
*
11
* This library is distributed in the hope that it will be useful,
12
* but WITHOUT ANY WARRANTY; without even the implied warranty of
13
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
* GNU Lesser General Public License for more details.
15
*
16
* You should have received a copy of the GNU Lesser General Public License
17
* along with this library. If not, see <http://www.gnu.org/licenses/>.
18
*
19
*/
20
21
22
/** @file
23
* @brief A class that, given a structure and a parameter estimator returns a
24
* full Bayes net
25
*
26
* @author Christophe GONZALES(@AMU) and Pierre-Henri WUILLEMIN(@LIP6)
27
*/
28
29
30
#
include
<
algorithm
>
31
#
include
<
string
>
32
#
include
<
vector
>
33
34
namespace
gum
{
35
36
namespace
learning
{
37
38
/// returns the allocator used by the score
39
template
<
template
<
typename
>
class
ALLOC
>
40
INLINE
typename
DAG2BNLearner
<
ALLOC
>::
allocator_type
41
DAG2BNLearner
<
ALLOC
>::
getAllocator
()
const
{
42
return
*
this
;
43
}
44
45
46
/// default constructor
47
template
<
template
<
typename
>
class
ALLOC
>
48
DAG2BNLearner
<
ALLOC
>::
DAG2BNLearner
(
49
const
typename
DAG2BNLearner
<
ALLOC
>::
allocator_type
&
alloc
) :
50
ALLOC
<
NodeId
>(
alloc
) {
51
GUM_CONSTRUCTOR
(
DAG2BNLearner
);
52
}
53
54
55
/// copy constructor with a given allocator
56
template
<
template
<
typename
>
class
ALLOC
>
57
DAG2BNLearner
<
ALLOC
>::
DAG2BNLearner
(
58
const
DAG2BNLearner
<
ALLOC
>&
from
,
59
const
typename
DAG2BNLearner
<
ALLOC
>::
allocator_type
&
alloc
) :
60
ApproximationScheme
(
from
),
61
ALLOC
<
NodeId
>(
alloc
) {
62
GUM_CONS_CPY
(
DAG2BNLearner
);
63
}
64
65
66
/// copy constructor
67
template
<
template
<
typename
>
class
ALLOC
>
68
DAG2BNLearner
<
ALLOC
>::
DAG2BNLearner
(
const
DAG2BNLearner
<
ALLOC
>&
from
) :
69
DAG2BNLearner
(
from
,
from
.
getAllocator
()) {}
70
71
72
/// move constructor with a given allocator
73
template
<
template
<
typename
>
class
ALLOC
>
74
DAG2BNLearner
<
ALLOC
>::
DAG2BNLearner
(
75
DAG2BNLearner
<
ALLOC
>&&
from
,
76
const
typename
DAG2BNLearner
<
ALLOC
>::
allocator_type
&
alloc
) :
77
ApproximationScheme
(
std
::
move
(
from
)),
78
ALLOC
<
NodeId
>(
alloc
) {
79
GUM_CONS_MOV
(
DAG2BNLearner
);
80
}
81
82
83
/// move constructor
84
template
<
template
<
typename
>
class
ALLOC
>
85
DAG2BNLearner
<
ALLOC
>::
DAG2BNLearner
(
DAG2BNLearner
<
ALLOC
>&&
from
) :
86
DAG2BNLearner
(
std
::
move
(
from
),
from
.
getAllocator
()) {}
87
88
89
/// virtual copy constructor with a given allocator
90
template
<
template
<
typename
>
class
ALLOC
>
91
DAG2BNLearner
<
ALLOC
>*
DAG2BNLearner
<
ALLOC
>::
clone
(
92
const
typename
DAG2BNLearner
<
ALLOC
>::
allocator_type
&
alloc
)
const
{
93
ALLOC
<
DAG2BNLearner
<
ALLOC
> >
allocator
(
alloc
);
94
DAG2BNLearner
<
ALLOC
>*
new_learner
=
allocator
.
allocate
(1);
95
try
{
96
allocator
.
construct
(
new_learner
, *
this
,
alloc
);
97
}
catch
(...) {
98
allocator
.
deallocate
(
new_learner
, 1);
99
throw
;
100
}
101
102
return
new_learner
;
103
}
104
105
106
/// virtual copy constructor
107
template
<
template
<
typename
>
class
ALLOC
>
108
DAG2BNLearner
<
ALLOC
>*
DAG2BNLearner
<
ALLOC
>::
clone
()
const
{
109
return
clone
(
this
->
getAllocator
());
110
}
111
112
113
/// destructor
114
template
<
template
<
typename
>
class
ALLOC
>
115
DAG2BNLearner
<
ALLOC
>::~
DAG2BNLearner
() {
116
GUM_DESTRUCTOR
(
DAG2BNLearner
);
117
}
118
119
120
/// copy operator
121
template
<
template
<
typename
>
class
ALLOC
>
122
DAG2BNLearner
<
ALLOC
>&
DAG2BNLearner
<
ALLOC
>::
operator
=(
const
DAG2BNLearner
<
ALLOC
>&
from
) {
123
ApproximationScheme
::
operator
=(
from
);
124
return
*
this
;
125
}
126
127
128
/// move operator
129
template
<
template
<
typename
>
class
ALLOC
>
130
DAG2BNLearner
<
ALLOC
>&
DAG2BNLearner
<
ALLOC
>::
operator
=(
DAG2BNLearner
<
ALLOC
>&&
from
) {
131
ApproximationScheme
::
operator
=(
std
::
move
(
from
));
132
return
*
this
;
133
}
134
135
136
/// copy a potential into another whose variables' sequence differs
137
template
<
template
<
typename
>
class
ALLOC
>
138
template
<
typename
GUM_SCALAR
>
139
void
140
DAG2BNLearner
<
ALLOC
>::
_probaVarReordering_
(
gum
::
Potential
<
GUM_SCALAR
>&
pot
,
141
const
gum
::
Potential
<
GUM_SCALAR
>&
other_pot
) {
142
// check that the variables are identical
143
if
(!
pot
.
variablesSequence
().
diffSet
(
other_pot
.
variablesSequence
()).
empty
()) {
144
GUM_ERROR
(
gum
::
CPTError
,
"the potentials do not have the same variables"
)
145
}
146
147
// perform the copy
148
Instantiation
i
(
other_pot
);
149
Instantiation
j
(
pot
);
150
for
(
i
.
setFirst
(); !
i
.
end
(); ++
i
) {
151
j
.
setVals
(
i
);
152
pot
.
set
(
j
,
other_pot
[
i
]);
153
}
154
}
155
156
/// create a BN
157
template
<
template
<
typename
>
class
ALLOC
>
158
template
<
typename
GUM_SCALAR
>
159
BayesNet
<
GUM_SCALAR
>
DAG2BNLearner
<
ALLOC
>::
createBN
(
ParamEstimator
<
ALLOC
>&
estimator
,
160
const
DAG
&
dag
) {
161
BayesNet
<
GUM_SCALAR
>
bn
;
162
163
// create a bn with dummy parameters corresponding to the dag
164
const
auto
&
node2cols
=
estimator
.
nodeId2Columns
();
165
const
auto
&
database
=
estimator
.
database
();
166
if
(
node2cols
.
empty
()) {
167
for
(
const
auto
id
:
dag
) {
168
bn
.
add
(
dynamic_cast
<
const
DiscreteVariable
& >(
database
.
variable
(
id
)),
id
);
169
}
170
}
else
{
171
for
(
const
auto
id
:
dag
) {
172
const
std
::
size_t
col
=
node2cols
.
second
(
id
);
173
bn
.
add
(
dynamic_cast
<
const
DiscreteVariable
& >(
database
.
variable
(
col
)),
id
);
174
}
175
}
176
177
// add the arcs
178
bn
.
beginTopologyTransformation
();
179
for
(
const
auto
&
arc
:
dag
.
arcs
()) {
180
bn
.
addArc
(
arc
.
tail
(),
arc
.
head
());
181
}
182
bn
.
endTopologyTransformation
();
183
184
// estimate the parameters
185
const
VariableNodeMap
&
varmap
=
bn
.
variableNodeMap
();
186
for
(
const
auto
id
:
dag
) {
187
// get the sequence of variables and make the targets be the last
188
auto
&
pot
=
const_cast
<
Potential
<
GUM_SCALAR
>& >(
bn
.
cpt
(
id
));
189
190
// get the variables of the CPT of id in the correct order
191
const
Sequence
<
const
DiscreteVariable
* >&
vars
=
pot
.
variablesSequence
();
192
193
// setup the estimation
194
std
::
vector
<
NodeId
>
conditioning_ids
(
vars
.
size
() - 1);
195
for
(
auto
i
=
std
::
size_t
(1);
i
<
vars
.
size
(); ++
i
) {
196
conditioning_ids
[
i
- 1] =
varmap
.
get
(*(
vars
[
i
]));
197
}
198
estimator
.
setParameters
(
id
,
conditioning_ids
,
pot
);
199
}
200
201
return
bn
;
202
}
203
204
/// create a BN
205
template
<
template
<
typename
>
class
ALLOC
>
206
template
<
typename
GUM_SCALAR
>
207
BayesNet
<
GUM_SCALAR
>
208
DAG2BNLearner
<
ALLOC
>::
createBN
(
ParamEstimator
<
ALLOC
>&
bootstrap_estimator
,
209
ParamEstimator
<
ALLOC
>&
general_estimator
,
210
const
DAG
&
dag
) {
211
// bootstrap EM by learning an initial model
212
BayesNet
<
GUM_SCALAR
>
bn
=
createBN
<
GUM_SCALAR
>(
bootstrap_estimator
,
dag
);
213
for
(
const
auto
&
nod
:
bn
.
nodes
()) {
214
bn
.
cpt
(
nod
).
noising
(0.1);
215
}
216
general_estimator
.
setBayesNet
(
bn
);
217
218
// perform EM
219
initApproximationScheme
();
220
221
GUM_SCALAR
delta
;
222
do
{
223
// bugfix for parallel execution of VariableElimination
224
const
auto
&
xdag
=
bn
.
dag
();
225
for
(
const
auto
node
:
xdag
) {
226
xdag
.
parents
(
node
);
227
xdag
.
children
(
node
);
228
}
229
230
BayesNet
<
GUM_SCALAR
>
new_bn
=
createBN
<
GUM_SCALAR
>(
general_estimator
,
dag
);
231
updateApproximationScheme
();
232
233
delta
=
GUM_SCALAR
(0.0);
234
for
(
const
auto
node
:
dag
) {
235
const
auto
&
old_cpt
=
bn
.
cpt
(
node
);
236
const
auto
&
new_cpt
=
new_bn
.
cpt
(
node
);
237
238
Instantiation
old_inst
(
old_cpt
);
239
Instantiation
new_inst
(
new_cpt
);
240
241
for
(; !
old_inst
.
end
(); ++
old_inst
, ++
new_inst
) {
242
const
GUM_SCALAR
old_val
=
old_cpt
.
get
(
old_inst
);
243
if
(
old_val
> 0.0) {
244
const
GUM_SCALAR
new_val
=
new_cpt
.
get
(
new_inst
);
245
const
GUM_SCALAR
diff
=
new_val
-
old_val
;
246
const
auto
diffrel
= (
diff
< 0.0) ? (-
diff
/
old_val
) : (
diff
/
old_val
);
247
if
(
delta
<
diffrel
)
delta
=
diffrel
;
248
}
249
}
250
}
251
252
bn
=
std
::
move
(
new_bn
);
253
}
while
(
continueApproximationScheme
(
double
(
delta
)));
254
255
stopApproximationScheme
();
// just to be sure of the approximationScheme
256
// has been notified of the end of loop
257
258
return
bn
;
259
}
// namespace learning
260
261
262
/// returns the approximation policy of the learning algorithm
263
template
<
template
<
typename
>
class
ALLOC
>
264
INLINE
ApproximationScheme
&
DAG2BNLearner
<
ALLOC
>::
approximationScheme
() {
265
return
*
this
;
266
}
267
268
269
}
// namespace learning
270
271
}
/* namespace gum */
gum::Set::emplace
INLINE void emplace(Args &&... args)
Definition:
set_tpl.h:643
gum::learning::genericBNLearner::Database::Database
Database(const std::string &filename, const BayesNet< GUM_SCALAR > &bn, const std::vector< std::string > &missing_symbols)
Definition:
genericBNLearner_tpl.h:31