aGrUM
0.20.2
a C++ library for (probabilistic) graphical models
inferenceEngine_tpl.h
Go to the documentation of this file.
1
/**
2
*
3
* Copyright 2005-2020 Pierre-Henri WUILLEMIN(@LIP6) & Christophe GONZALES(@AMU)
4
* info_at_agrum_dot_org
5
*
6
* This library is free software: you can redistribute it and/or modify
7
* it under the terms of the GNU Lesser General Public License as published by
8
* the Free Software Foundation, either version 3 of the License, or
9
* (at your option) any later version.
10
*
11
* This library is distributed in the hope that it will be useful,
12
* but WITHOUT ANY WARRANTY; without even the implied warranty of
13
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
* GNU Lesser General Public License for more details.
15
*
16
* You should have received a copy of the GNU Lesser General Public License
17
* along with this library. If not, see <http://www.gnu.org/licenses/>.
18
*
19
*/
20
21
22
/** @file
23
* @brief the class for computing G2 scores
24
*
25
* @author Christophe GONZALES(@AMU) and Pierre-Henri WUILLEMIN(@LIP6)
26
*/
27
#
include
<
agrum
/
CN
/
inference
/
inferenceEngine
.
h
>
28
#
include
<
agrum
/
agrum
.
h
>
29
30
namespace
gum
{
31
namespace
credal
{
32
33
/*template< typename GUM_SCALAR >
34
InferenceEngine< GUM_SCALAR >::InferenceEngine () : ApproximationScheme() {
35
std::cout << "InferenceEngine construct ()" << std::endl;
36
GUM_CONSTRUCTOR ( InferenceEngine );
37
}*/
38
39
template
<
typename
GUM_SCALAR >
40
InferenceEngine< GUM_SCALAR >::InferenceEngine(
41
const
CredalNet< GUM_SCALAR >& credalNet) :
42
ApproximationScheme() {
43
credalNet_ = &credalNet;
44
45
dbnOpt_.setCNet(credalNet);
46
47
initMarginals_();
48
49
GUM_CONSTRUCTOR(InferenceEngine);
50
}
51
52
template
<
typename
GUM_SCALAR
>
53
InferenceEngine
<
GUM_SCALAR
>::~
InferenceEngine
() {
54
GUM_DESTRUCTOR
(
InferenceEngine
);
55
}
56
57
template
<
typename
GUM_SCALAR
>
58
const
CredalNet
<
GUM_SCALAR
>&
59
InferenceEngine
<
GUM_SCALAR
>::
credalNet
()
const
{
60
return
*
credalNet_
;
61
}
62
63
template
<
typename
GUM_SCALAR
>
64
void
InferenceEngine
<
GUM_SCALAR
>::
eraseAllEvidence
() {
65
evidence_
.
clear
();
66
query_
.
clear
();
67
/*
68
marginalMin_.clear();
69
marginalMax_.clear();
70
oldMarginalMin_.clear();
71
oldMarginalMax_.clear();
72
*/
73
initMarginals_
();
74
/*
75
expectationMin_.clear();
76
expectationMax_.clear();
77
*/
78
initExpectations_
();
79
80
// marginalSets_.clear();
81
initMarginalSets_
();
82
83
dynamicExpMin_
.
clear
();
84
dynamicExpMax_
.
clear
();
85
86
//_modal.clear();
87
88
//_t0.clear();
89
//_t1.clear();
90
}
91
92
/*
93
template< typename GUM_SCALAR >
94
void InferenceEngine< GUM_SCALAR >::setIterStop ( const int &iter_stop ) {
95
iterStop_ = iter_stop;
96
}*/
97
98
template
<
typename
GUM_SCALAR
>
99
void
InferenceEngine
<
GUM_SCALAR
>::
storeBNOpt
(
const
bool
value
) {
100
storeBNOpt_
=
value
;
101
}
102
103
template
<
typename
GUM_SCALAR
>
104
void
InferenceEngine
<
GUM_SCALAR
>::
storeVertices
(
const
bool
value
) {
105
storeVertices_
=
value
;
106
107
if
(
value
)
initMarginalSets_
();
108
}
109
110
template
<
typename
GUM_SCALAR
>
111
void
InferenceEngine
<
GUM_SCALAR
>::
setRepetitiveInd
(
const
bool
repetitive
) {
112
bool
oldValue
=
repetitiveInd_
;
113
repetitiveInd_
=
repetitive
;
114
115
// do not compute clusters more than once
116
if
(
repetitiveInd_
&& !
oldValue
)
repetitiveInit_
();
117
}
118
119
template
<
typename
GUM_SCALAR
>
120
bool
InferenceEngine
<
GUM_SCALAR
>::
repetitiveInd
()
const
{
121
return
repetitiveInd_
;
122
}
123
/*
124
template< typename GUM_SCALAR >
125
int InferenceEngine< GUM_SCALAR >::iterStop () const {
126
return iterStop_;
127
}*/
128
129
template
<
typename
GUM_SCALAR
>
130
bool
InferenceEngine
<
GUM_SCALAR
>::
storeVertices
()
const
{
131
return
storeVertices_
;
132
}
133
134
template
<
typename
GUM_SCALAR
>
135
bool
InferenceEngine
<
GUM_SCALAR
>::
storeBNOpt
()
const
{
136
return
storeBNOpt_
;
137
}
138
139
template
<
typename
GUM_SCALAR
>
140
VarMod2BNsMap
<
GUM_SCALAR
>*
141
InferenceEngine
<
GUM_SCALAR
>::
getVarMod2BNsMap
() {
142
return
&
dbnOpt_
;
143
}
144
145
template
<
typename
GUM_SCALAR
>
146
void
InferenceEngine
<
GUM_SCALAR
>::
insertModalsFile
(
const
std
::
string
&
path
) {
147
std
::
ifstream
mod_stream
(
path
.
c_str
(),
std
::
ios
::
in
);
148
149
if
(!
mod_stream
.
good
()) {
150
GUM_ERROR
(
OperationNotAllowed
,
151
"void InferenceEngine< GUM_SCALAR "
152
">::insertModals(const std::string & path) : "
153
"could not open input file : "
154
<<
path
);
155
}
156
157
if
(!
modal_
.
empty
())
modal_
.
clear
();
158
159
std
::
string
line
,
tmp
;
160
char
*
cstr
, *
p
;
161
162
while
(
mod_stream
.
good
()) {
163
getline
(
mod_stream
,
line
);
164
165
if
(
line
.
size
() == 0)
continue
;
166
167
cstr
=
new
char
[
line
.
size
() + 1];
168
strcpy
(
cstr
,
line
.
c_str
());
169
170
p
=
strtok
(
cstr
,
" "
);
171
tmp
=
p
;
172
173
std
::
vector
<
GUM_SCALAR
>
values
;
174
p
=
strtok
(
nullptr
,
" "
);
175
176
while
(
p
!=
nullptr
) {
177
values
.
push_back
(
GUM_SCALAR
(
atof
(
p
)));
178
p
=
strtok
(
nullptr
,
" "
);
179
}
// end of : line
180
181
modal_
.
insert
(
tmp
,
values
);
//[tmp] = values;
182
183
delete
[]
p
;
184
delete
[]
cstr
;
185
}
// end of : file
186
187
mod_stream
.
close
();
188
189
initExpectations_
();
190
}
191
192
template
<
typename
GUM_SCALAR
>
193
void
InferenceEngine
<
GUM_SCALAR
>::
insertModals
(
194
const
std
::
map
<
std
::
string
,
std
::
vector
<
GUM_SCALAR
> >&
modals
) {
195
if
(!
modal_
.
empty
())
modal_
.
clear
();
196
197
for
(
auto
it
=
modals
.
cbegin
(),
theEnd
=
modals
.
cend
();
it
!=
theEnd
; ++
it
) {
198
NodeId
id
;
199
200
try
{
201
id
=
credalNet_
->
current_bn
().
idFromName
(
it
->
first
);
202
}
catch
(
NotFound
&
err
) {
203
GUM_SHOWERROR
(
err
);
204
continue
;
205
}
206
207
// check that modals are net compatible
208
auto
dSize
=
credalNet_
->
current_bn
().
variable
(
id
).
domainSize
();
209
210
if
(
dSize
!=
it
->
second
.
size
())
continue
;
211
212
// GUM_ERROR(OperationNotAllowed, "void InferenceEngine< GUM_SCALAR
213
// >::insertModals( const std::map< std::string, std::vector< GUM_SCALAR
214
// > >
215
// &modals) : modalities does not respect variable cardinality : " <<
216
// credalNet_->current_bn().variable( id ).name() << " : " << dSize << "
217
// != "
218
// << it->second.size());
219
220
modal_
.
insert
(
it
->
first
,
it
->
second
);
//[ it->first ] = it->second;
221
}
222
223
//_modal = modals;
224
225
initExpectations_
();
226
}
227
228
template
<
typename
GUM_SCALAR
>
229
void
InferenceEngine
<
GUM_SCALAR
>::
insertEvidence
(
230
const
std
::
map
<
std
::
string
,
std
::
vector
<
GUM_SCALAR
> >&
eviMap
) {
231
if
(!
evidence_
.
empty
())
evidence_
.
clear
();
232
233
for
(
auto
it
=
eviMap
.
cbegin
(),
theEnd
=
eviMap
.
cend
();
it
!=
theEnd
; ++
it
) {
234
NodeId
id
;
235
236
try
{
237
id
=
credalNet_
->
current_bn
().
idFromName
(
it
->
first
);
238
}
catch
(
NotFound
&
err
) {
239
GUM_SHOWERROR
(
err
);
240
continue
;
241
}
242
243
evidence_
.
insert
(
id
,
it
->
second
);
244
}
245
}
246
247
// check that observed variables DO exists in the network (otherwise Lazy
248
// report
249
// an error and app crash)
250
template
<
typename
GUM_SCALAR
>
251
void
InferenceEngine
<
GUM_SCALAR
>::
insertEvidence
(
252
const
NodeProperty
<
std
::
vector
<
GUM_SCALAR
> >&
evidence
) {
253
if
(!
evidence_
.
empty
())
evidence_
.
clear
();
254
255
// use cbegin() to get const_iterator when available in aGrUM hashtables
256
for
(
const
auto
&
elt
:
evidence
) {
257
try
{
258
credalNet_
->
current_bn
().
variable
(
elt
.
first
);
259
}
catch
(
NotFound
&
err
) {
260
GUM_SHOWERROR
(
err
);
261
continue
;
262
}
263
264
evidence_
.
insert
(
elt
.
first
,
elt
.
second
);
265
}
266
}
267
268
template
<
typename
GUM_SCALAR
>
269
void
270
InferenceEngine
<
GUM_SCALAR
>::
insertEvidenceFile
(
const
std
::
string
&
path
) {
271
std
::
ifstream
evi_stream
(
path
.
c_str
(),
std
::
ios
::
in
);
272
273
if
(!
evi_stream
.
good
()) {
274
GUM_ERROR
(
IOError
,
275
"void InferenceEngine< GUM_SCALAR "
276
">::insertEvidence(const std::string & path) : could not "
277
"open input file : "
278
<<
path
);
279
}
280
281
if
(!
evidence_
.
empty
())
evidence_
.
clear
();
282
283
std
::
string
line
,
tmp
;
284
char
*
cstr
, *
p
;
285
286
while
(
evi_stream
.
good
() &&
std
::
strcmp
(
line
.
c_str
(),
"[EVIDENCE]"
) != 0) {
287
getline
(
evi_stream
,
line
);
288
}
289
290
while
(
evi_stream
.
good
()) {
291
getline
(
evi_stream
,
line
);
292
293
if
(
std
::
strcmp
(
line
.
c_str
(),
"[QUERY]"
) == 0)
break
;
294
295
if
(
line
.
size
() == 0)
continue
;
296
297
cstr
=
new
char
[
line
.
size
() + 1];
298
strcpy
(
cstr
,
line
.
c_str
());
299
300
p
=
strtok
(
cstr
,
" "
);
301
tmp
=
p
;
302
303
// if user input is wrong
304
NodeId
node
= -1;
305
306
try
{
307
node
=
credalNet_
->
current_bn
().
idFromName
(
tmp
);
308
}
catch
(
NotFound
&
err
) {
309
GUM_SHOWERROR
(
err
);
310
continue
;
311
}
312
313
std
::
vector
<
GUM_SCALAR
>
values
;
314
p
=
strtok
(
nullptr
,
" "
);
315
316
while
(
p
!=
nullptr
) {
317
values
.
push_back
(
GUM_SCALAR
(
atof
(
p
)));
318
p
=
strtok
(
nullptr
,
" "
);
319
}
// end of : line
320
321
evidence_
.
insert
(
node
,
values
);
322
323
delete
[]
p
;
324
delete
[]
cstr
;
325
}
// end of : file
326
327
evi_stream
.
close
();
328
}
329
330
template
<
typename
GUM_SCALAR
>
331
void
InferenceEngine
<
GUM_SCALAR
>::
insertQuery
(
332
const
NodeProperty
<
std
::
vector
<
bool
> >&
query
) {
333
if
(!
query_
.
empty
())
query_
.
clear
();
334
335
for
(
const
auto
&
elt
:
query
) {
336
try
{
337
credalNet_
->
current_bn
().
variable
(
elt
.
first
);
338
}
catch
(
NotFound
&
err
) {
339
GUM_SHOWERROR
(
err
);
340
continue
;
341
}
342
343
query_
.
insert
(
elt
.
first
,
elt
.
second
);
344
}
345
}
346
347
template
<
typename
GUM_SCALAR
>
348
void
InferenceEngine
<
GUM_SCALAR
>::
insertQueryFile
(
const
std
::
string
&
path
) {
349
std
::
ifstream
evi_stream
(
path
.
c_str
(),
std
::
ios
::
in
);
350
351
if
(!
evi_stream
.
good
()) {
352
GUM_ERROR
(
IOError
,
353
"void InferenceEngine< GUM_SCALAR >::insertQuery(const "
354
"std::string & path) : could not open input file : "
355
<<
path
);
356
}
357
358
if
(!
query_
.
empty
())
query_
.
clear
();
359
360
std
::
string
line
,
tmp
;
361
char
*
cstr
, *
p
;
362
363
while
(
evi_stream
.
good
() &&
std
::
strcmp
(
line
.
c_str
(),
"[QUERY]"
) != 0) {
364
getline
(
evi_stream
,
line
);
365
}
366
367
while
(
evi_stream
.
good
()) {
368
getline
(
evi_stream
,
line
);
369
370
if
(
std
::
strcmp
(
line
.
c_str
(),
"[EVIDENCE]"
) == 0)
break
;
371
372
if
(
line
.
size
() == 0)
continue
;
373
374
cstr
=
new
char
[
line
.
size
() + 1];
375
strcpy
(
cstr
,
line
.
c_str
());
376
377
p
=
strtok
(
cstr
,
" "
);
378
tmp
=
p
;
379
380
// if user input is wrong
381
NodeId
node
= -1;
382
383
try
{
384
node
=
credalNet_
->
current_bn
().
idFromName
(
tmp
);
385
}
catch
(
NotFound
&
err
) {
386
GUM_SHOWERROR
(
err
);
387
continue
;
388
}
389
390
auto
dSize
=
credalNet_
->
current_bn
().
variable
(
node
).
domainSize
();
391
392
p
=
strtok
(
nullptr
,
" "
);
393
394
if
(
p
==
nullptr
) {
395
query_
.
insert
(
node
,
std
::
vector
<
bool
>(
dSize
,
true
));
396
}
else
{
397
std
::
vector
<
bool
>
values
(
dSize
,
false
);
398
399
while
(
p
!=
nullptr
) {
400
if
((
Size
)
atoi
(
p
) >=
dSize
)
401
GUM_ERROR
(
OutOfBounds
,
402
"void InferenceEngine< GUM_SCALAR "
403
">::insertQuery(const std::string & path) : "
404
"query modality is higher or equal to "
405
"cardinality"
);
406
407
values
[
atoi
(
p
)] =
true
;
408
p
=
strtok
(
nullptr
,
" "
);
409
}
// end of : line
410
411
query_
.
insert
(
node
,
values
);
412
}
413
414
delete
[]
p
;
415
delete
[]
cstr
;
416
}
// end of : file
417
418
evi_stream
.
close
();
419
}
420
421
template
<
typename
GUM_SCALAR
>
422
INLINE
Potential
<
GUM_SCALAR
>
InferenceEngine
<
GUM_SCALAR
>::
marginalMin
(
423
const
std
::
string
&
varName
)
const
{
424
return
marginalMin
(
credalNet_
->
current_bn
().
idFromName
(
varName
));
425
}
426
427
template
<
typename
GUM_SCALAR
>
428
INLINE
Potential
<
GUM_SCALAR
>
InferenceEngine
<
GUM_SCALAR
>::
marginalMax
(
429
const
std
::
string
&
varName
)
const
{
430
return
marginalMax
(
credalNet_
->
current_bn
().
idFromName
(
varName
));
431
}
432
433
template
<
typename
GUM_SCALAR
>
434
gum
::
Potential
<
GUM_SCALAR
>
435
InferenceEngine
<
GUM_SCALAR
>::
marginalMin
(
const
NodeId
id
)
const
{
436
try
{
437
Potential
<
GUM_SCALAR
>
res
;
438
res
.
add
(
credalNet_
->
current_bn
().
variable
(
id
));
439
res
.
fillWith
(
marginalMin_
[
id
]);
440
return
res
;
441
}
catch
(
NotFound
&
err
) {
throw
(
err
); }
442
}
443
444
template
<
typename
GUM_SCALAR
>
445
gum
::
Potential
<
GUM_SCALAR
>
446
InferenceEngine
<
GUM_SCALAR
>::
marginalMax
(
const
NodeId
id
)
const
{
447
try
{
448
Potential
<
GUM_SCALAR
>
res
;
449
res
.
add
(
credalNet_
->
current_bn
().
variable
(
id
));
450
res
.
fillWith
(
marginalMax_
[
id
]);
451
return
res
;
452
}
catch
(
NotFound
&
err
) {
throw
(
err
); }
453
}
454
455
template
<
typename
GUM_SCALAR
>
456
const
GUM_SCALAR
&
InferenceEngine
<
GUM_SCALAR
>::
expectationMin
(
457
const
std
::
string
&
varName
)
const
{
458
try
{
459
return
expectationMin_
[
credalNet_
->
current_bn
().
idFromName
(
varName
)];
460
}
catch
(
NotFound
&
err
) {
throw
(
err
); }
461
}
462
463
template
<
typename
GUM_SCALAR
>
464
const
GUM_SCALAR
&
InferenceEngine
<
GUM_SCALAR
>::
expectationMax
(
465
const
std
::
string
&
varName
)
const
{
466
try
{
467
return
expectationMax_
[
credalNet_
->
current_bn
().
idFromName
(
varName
)];
468
}
catch
(
NotFound
&
err
) {
throw
(
err
); }
469
}
470
471
template
<
typename
GUM_SCALAR
>
472
const
GUM_SCALAR
&
473
InferenceEngine
<
GUM_SCALAR
>::
expectationMin
(
const
NodeId
id
)
const
{
474
try
{
475
return
expectationMin_
[
id
];
476
}
catch
(
NotFound
&
err
) {
throw
(
err
); }
477
}
478
479
template
<
typename
GUM_SCALAR
>
480
const
GUM_SCALAR
&
481
InferenceEngine
<
GUM_SCALAR
>::
expectationMax
(
const
NodeId
id
)
const
{
482
try
{
483
return
expectationMax_
[
id
];
484
}
catch
(
NotFound
&
err
) {
throw
(
err
); }
485
}
486
487
template
<
typename
GUM_SCALAR
>
488
const
std
::
vector
<
GUM_SCALAR
>&
InferenceEngine
<
GUM_SCALAR
>::
dynamicExpMin
(
489
const
std
::
string
&
varName
)
const
{
490
std
::
string
errTxt
=
"const std::vector< GUM_SCALAR > & InferenceEngine< "
491
"GUM_SCALAR >::dynamicExpMin ( const std::string & "
492
"varName ) const : "
;
493
494
if
(
dynamicExpMin_
.
empty
())
495
GUM_ERROR
(
OperationNotAllowed
,
496
errTxt
+
"_dynamicExpectations() needs to be called before"
);
497
498
if
(!
dynamicExpMin_
.
exists
(
499
varName
)
/*dynamicExpMin_.find(varName) == dynamicExpMin_.end()*/
)
500
GUM_ERROR
(
NotFound
,
errTxt
+
"variable name not found : "
<<
varName
);
501
502
return
dynamicExpMin_
[
varName
];
503
}
504
505
template
<
typename
GUM_SCALAR
>
506
const
std
::
vector
<
GUM_SCALAR
>&
InferenceEngine
<
GUM_SCALAR
>::
dynamicExpMax
(
507
const
std
::
string
&
varName
)
const
{
508
std
::
string
errTxt
=
"const std::vector< GUM_SCALAR > & InferenceEngine< "
509
"GUM_SCALAR >::dynamicExpMax ( const std::string & "
510
"varName ) const : "
;
511
512
if
(
dynamicExpMax_
.
empty
())
513
GUM_ERROR
(
OperationNotAllowed
,
514
errTxt
+
"_dynamicExpectations() needs to be called before"
);
515
516
if
(!
dynamicExpMax_
.
exists
(
517
varName
)
/*dynamicExpMin_.find(varName) == dynamicExpMin_.end()*/
)
518
GUM_ERROR
(
NotFound
,
errTxt
+
"variable name not found : "
<<
varName
);
519
520
return
dynamicExpMax_
[
varName
];
521
}
522
523
template
<
typename
GUM_SCALAR
>
524
const
std
::
vector
<
std
::
vector
<
GUM_SCALAR
> >&
525
InferenceEngine
<
GUM_SCALAR
>::
vertices
(
const
NodeId
id
)
const
{
526
return
marginalSets_
[
id
];
527
}
528
529
template
<
typename
GUM_SCALAR
>
530
void
InferenceEngine
<
GUM_SCALAR
>::
saveMarginals
(
531
const
std
::
string
&
path
)
const
{
532
std
::
ofstream
m_stream
(
path
.
c_str
(),
std
::
ios
::
out
|
std
::
ios
::
trunc
);
533
534
if
(!
m_stream
.
good
()) {
535
GUM_ERROR
(
IOError
,
536
"void InferenceEngine< GUM_SCALAR >::saveMarginals(const "
537
"std::string & path) const : could not open output file "
538
": "
539
<<
path
);
540
}
541
542
for
(
const
auto
&
elt
:
marginalMin_
) {
543
Size
esize
=
Size
(
elt
.
second
.
size
());
544
545
for
(
Size
mod
= 0;
mod
<
esize
;
mod
++) {
546
m_stream
<<
credalNet_
->
current_bn
().
variable
(
elt
.
first
).
name
() <<
" "
547
<<
mod
<<
" "
<< (
elt
.
second
)[
mod
] <<
" "
548
<<
marginalMax_
[
elt
.
first
][
mod
] <<
std
::
endl
;
549
}
550
}
551
552
m_stream
.
close
();
553
}
554
555
template
<
typename
GUM_SCALAR
>
556
void
InferenceEngine
<
GUM_SCALAR
>::
saveExpectations
(
557
const
std
::
string
&
path
)
const
{
558
if
(
dynamicExpMin_
.
empty
())
//_modal.empty())
559
return
;
560
561
// else not here, to keep the const (natural with a saving process)
562
// else if(dynamicExpMin_.empty() || dynamicExpMax_.empty())
563
//_dynamicExpectations(); // works with or without a dynamic network
564
565
std
::
ofstream
m_stream
(
path
.
c_str
(),
std
::
ios
::
out
|
std
::
ios
::
trunc
);
566
567
if
(!
m_stream
.
good
()) {
568
GUM_ERROR
(
IOError
,
569
"void InferenceEngine< GUM_SCALAR "
570
">::saveExpectations(const std::string & path) : could "
571
"not open output file : "
572
<<
path
);
573
}
574
575
for
(
const
auto
&
elt
:
dynamicExpMin_
) {
576
m_stream
<<
elt
.
first
;
// it->first;
577
578
// iterates over a vector
579
for
(
const
auto
&
elt2
:
elt
.
second
) {
580
m_stream
<<
" "
<<
elt2
;
581
}
582
583
m_stream
<<
std
::
endl
;
584
}
585
586
for
(
const
auto
&
elt
:
dynamicExpMax_
) {
587
m_stream
<<
elt
.
first
;
588
589
// iterates over a vector
590
for
(
const
auto
&
elt2
:
elt
.
second
) {
591
m_stream
<<
" "
<<
elt2
;
592
}
593
594
m_stream
<<
std
::
endl
;
595
}
596
597
m_stream
.
close
();
598
}
599
600
template
<
typename
GUM_SCALAR
>
601
std
::
string
InferenceEngine
<
GUM_SCALAR
>::
toString
()
const
{
602
std
::
stringstream
output
;
603
output
<<
std
::
endl
;
604
605
// use cbegin() when available
606
for
(
const
auto
&
elt
:
marginalMin_
) {
607
Size
esize
=
Size
(
elt
.
second
.
size
());
608
609
for
(
Size
mod
= 0;
mod
<
esize
;
mod
++) {
610
output
<<
"P("
<<
credalNet_
->
current_bn
().
variable
(
elt
.
first
).
name
()
611
<<
"="
<<
mod
<<
"|e) = [ "
;
612
output
<<
marginalMin_
[
elt
.
first
][
mod
] <<
", "
613
<<
marginalMax_
[
elt
.
first
][
mod
] <<
" ]"
;
614
615
if
(!
query_
.
empty
())
616
if
(
query_
.
exists
(
elt
.
first
) &&
query_
[
elt
.
first
][
mod
])
617
output
<<
" QUERY"
;
618
619
output
<<
std
::
endl
;
620
}
621
622
output
<<
std
::
endl
;
623
}
624
625
return
output
.
str
();
626
}
627
628
template
<
typename
GUM_SCALAR
>
629
void
630
InferenceEngine
<
GUM_SCALAR
>::
saveVertices
(
const
std
::
string
&
path
)
const
{
631
std
::
ofstream
m_stream
(
path
.
c_str
(),
std
::
ios
::
out
|
std
::
ios
::
trunc
);
632
633
if
(!
m_stream
.
good
()) {
634
GUM_ERROR
(
IOError
,
635
"void InferenceEngine< GUM_SCALAR >::saveVertices(const "
636
"std::string & path) : could not open outpul file : "
637
<<
path
);
638
}
639
640
for
(
const
auto
&
elt
:
marginalSets_
) {
641
m_stream
<<
credalNet_
->
current_bn
().
variable
(
elt
.
first
).
name
()
642
<<
std
::
endl
;
643
644
for
(
const
auto
&
elt2
:
elt
.
second
) {
645
m_stream
<<
"["
;
646
bool
first
=
true
;
647
648
for
(
const
auto
&
elt3
:
elt2
) {
649
if
(!
first
) {
650
m_stream
<<
","
;
651
first
=
false
;
652
}
653
654
m_stream
<<
elt3
;
655
}
656
657
m_stream
<<
"]\n"
;
658
}
659
}
660
661
m_stream
.
close
();
662
}
663
664
template
<
typename
GUM_SCALAR
>
665
void
InferenceEngine
<
GUM_SCALAR
>::
initMarginals_
() {
666
marginalMin_
.
clear
();
667
marginalMax_
.
clear
();
668
oldMarginalMin_
.
clear
();
669
oldMarginalMax_
.
clear
();
670
671
for
(
auto
node
:
credalNet_
->
current_bn
().
nodes
()) {
672
auto
dSize
=
credalNet_
->
current_bn
().
variable
(
node
).
domainSize
();
673
marginalMin_
.
insert
(
node
,
std
::
vector
<
GUM_SCALAR
>(
dSize
, 1));
674
oldMarginalMin_
.
insert
(
node
,
std
::
vector
<
GUM_SCALAR
>(
dSize
, 1));
675
676
marginalMax_
.
insert
(
node
,
std
::
vector
<
GUM_SCALAR
>(
dSize
, 0));
677
oldMarginalMax_
.
insert
(
node
,
std
::
vector
<
GUM_SCALAR
>(
dSize
, 0));
678
}
679
}
680
681
template
<
typename
GUM_SCALAR
>
682
void
InferenceEngine
<
GUM_SCALAR
>::
initMarginalSets_
() {
683
marginalSets_
.
clear
();
684
685
if
(!
storeVertices_
)
return
;
686
687
for
(
auto
node
:
credalNet_
->
current_bn
().
nodes
())
688
marginalSets_
.
insert
(
node
,
std
::
vector
<
std
::
vector
<
GUM_SCALAR
> >());
689
}
690
691
// since only monitored variables in modal_ will be alble to compute
692
// expectations, it is useless to initialize those for all variables
693
// modal_ variables will always be checked further, so it is not necessary
694
// to
695
// check it here, but doing so will use less memory
696
template
<
typename
GUM_SCALAR
>
697
void
InferenceEngine
<
GUM_SCALAR
>::
initExpectations_
() {
698
expectationMin_
.
clear
();
699
expectationMax_
.
clear
();
700
701
if
(
modal_
.
empty
())
return
;
702
703
for
(
auto
node
:
credalNet_
->
current_bn
().
nodes
()) {
704
std
::
string
var_name
,
time_step
;
705
706
var_name
=
credalNet_
->
current_bn
().
variable
(
node
).
name
();
707
auto
delim
=
var_name
.
find_first_of
(
"_"
);
708
var_name
=
var_name
.
substr
(0,
delim
);
709
710
if
(!
modal_
.
exists
(
var_name
))
continue
;
711
712
expectationMin_
.
insert
(
node
,
modal_
[
var_name
].
back
());
713
expectationMax_
.
insert
(
node
,
modal_
[
var_name
].
front
());
714
}
715
}
716
717
template
<
typename
GUM_SCALAR
>
718
void
InferenceEngine
<
GUM_SCALAR
>::
dynamicExpectations
() {
719
dynamicExpectations_
();
720
}
721
722
template
<
typename
GUM_SCALAR
>
723
void
InferenceEngine
<
GUM_SCALAR
>::
dynamicExpectations_
() {
724
// no modals, no expectations computed during inference
725
if
(
expectationMin_
.
empty
() ||
modal_
.
empty
())
return
;
726
727
// already called by the algorithm or the user
728
if
(
dynamicExpMax_
.
size
() > 0 &&
dynamicExpMin_
.
size
() > 0)
return
;
729
730
// typedef typename std::map< int, GUM_SCALAR > innerMap;
731
using
innerMap
=
typename
gum
::
HashTable
<
int
,
GUM_SCALAR
>;
732
733
// typedef typename std::map< std::string, innerMap > outerMap;
734
using
outerMap
=
typename
gum
::
HashTable
<
std
::
string
,
innerMap
>;
735
736
// typedef typename std::map< std::string, std::vector< GUM_SCALAR > >
737
// mod;
738
739
// si non dynamique, sauver directement expectationMin_ et Max (revient au
740
// meme
741
// mais plus rapide)
742
outerMap
expectationsMin
,
expectationsMax
;
743
744
for
(
const
auto
&
elt
:
expectationMin_
) {
745
std
::
string
var_name
,
time_step
;
746
747
var_name
=
credalNet_
->
current_bn
().
variable
(
elt
.
first
).
name
();
748
auto
delim
=
var_name
.
find_first_of
(
"_"
);
749
time_step
=
var_name
.
substr
(
delim
+ 1,
var_name
.
size
());
750
var_name
=
var_name
.
substr
(0,
delim
);
751
752
// to be sure (don't store not monitored variables' expectations)
753
// although it
754
// should be taken care of before this point
755
if
(!
modal_
.
exists
(
var_name
))
continue
;
756
757
expectationsMin
.
getWithDefault
(
var_name
,
innerMap
())
758
.
getWithDefault
(
atoi
(
time_step
.
c_str
()), 0)
759
=
elt
.
second
;
// we iterate with min iterators
760
expectationsMax
.
getWithDefault
(
var_name
,
innerMap
())
761
.
getWithDefault
(
atoi
(
time_step
.
c_str
()), 0)
762
=
expectationMax_
[
elt
.
first
];
763
}
764
765
for
(
const
auto
&
elt
:
expectationsMin
) {
766
typename
std
::
vector
<
GUM_SCALAR
>
dynExp
(
elt
.
second
.
size
());
767
768
for
(
const
auto
&
elt2
:
elt
.
second
)
769
dynExp
[
elt2
.
first
] =
elt2
.
second
;
770
771
dynamicExpMin_
.
insert
(
elt
.
first
,
dynExp
);
772
}
773
774
for
(
const
auto
&
elt
:
expectationsMax
) {
775
typename
std
::
vector
<
GUM_SCALAR
>
dynExp
(
elt
.
second
.
size
());
776
777
for
(
const
auto
&
elt2
:
elt
.
second
) {
778
dynExp
[
elt2
.
first
] =
elt2
.
second
;
779
}
780
781
dynamicExpMax_
.
insert
(
elt
.
first
,
dynExp
);
782
}
783
}
784
785
template
<
typename
GUM_SCALAR
>
786
void
InferenceEngine
<
GUM_SCALAR
>::
repetitiveInit_
() {
787
timeSteps_
= 0;
788
t0_
.
clear
();
789
t1_
.
clear
();
790
791
// t = 0 vars belongs to t0_ as keys
792
for
(
auto
node
:
credalNet_
->
current_bn
().
dag
().
nodes
()) {
793
std
::
string
var_name
=
credalNet_
->
current_bn
().
variable
(
node
).
name
();
794
auto
delim
=
var_name
.
find_first_of
(
"_"
);
795
796
if
(
delim
>
var_name
.
size
()) {
797
GUM_ERROR
(
InvalidArgument
,
798
"void InferenceEngine< GUM_SCALAR "
799
">::repetitiveInit_() : the network does not "
800
"appear to be dynamic"
);
801
}
802
803
std
::
string
time_step
=
var_name
.
substr
(
delim
+ 1, 1);
804
805
if
(
time_step
.
compare
(
"0"
) == 0)
t0_
.
insert
(
node
,
std
::
vector
<
NodeId
>());
806
}
807
808
// t = 1 vars belongs to either t0_ as member value or t1_ as keys
809
for
(
const
auto
&
node
:
credalNet_
->
current_bn
().
dag
().
nodes
()) {
810
std
::
string
var_name
=
credalNet_
->
current_bn
().
variable
(
node
).
name
();
811
auto
delim
=
var_name
.
find_first_of
(
"_"
);
812
std
::
string
time_step
=
var_name
.
substr
(
delim
+ 1,
var_name
.
size
());
813
var_name
=
var_name
.
substr
(0,
delim
);
814
delim
=
time_step
.
find_first_of
(
"_"
);
815
time_step
=
time_step
.
substr
(0,
delim
);
816
817
if
(
time_step
.
compare
(
"1"
) == 0) {
818
bool
found
=
false
;
819
820
for
(
const
auto
&
elt
:
t0_
) {
821
std
::
string
var_0_name
822
=
credalNet_
->
current_bn
().
variable
(
elt
.
first
).
name
();
823
delim
=
var_0_name
.
find_first_of
(
"_"
);
824
var_0_name
=
var_0_name
.
substr
(0,
delim
);
825
826
if
(
var_name
.
compare
(
var_0_name
) == 0) {
827
const
Potential
<
GUM_SCALAR
>*
potential
(
828
&
credalNet_
->
current_bn
().
cpt
(
node
));
829
const
Potential
<
GUM_SCALAR
>*
potential2
(
830
&
credalNet_
->
current_bn
().
cpt
(
elt
.
first
));
831
832
if
(
potential
->
domainSize
() ==
potential2
->
domainSize
())
833
t0_
[
elt
.
first
].
push_back
(
node
);
834
else
835
t1_
.
insert
(
node
,
std
::
vector
<
NodeId
>());
836
837
found
=
true
;
838
break
;
839
}
840
}
841
842
if
(!
found
) {
t1_
.
insert
(
node
,
std
::
vector
<
NodeId
>()); }
843
}
844
}
845
846
// t > 1 vars belongs to either t0_ or t1_ as member value
847
// remember timeSteps_
848
for
(
auto
node
:
credalNet_
->
current_bn
().
dag
().
nodes
()) {
849
std
::
string
var_name
=
credalNet_
->
current_bn
().
variable
(
node
).
name
();
850
auto
delim
=
var_name
.
find_first_of
(
"_"
);
851
std
::
string
time_step
=
var_name
.
substr
(
delim
+ 1,
var_name
.
size
());
852
var_name
=
var_name
.
substr
(0,
delim
);
853
delim
=
time_step
.
find_first_of
(
"_"
);
854
time_step
=
time_step
.
substr
(0,
delim
);
855
856
if
(
time_step
.
compare
(
"0"
) != 0 &&
time_step
.
compare
(
"1"
) != 0) {
857
// keep max time_step
858
if
(
atoi
(
time_step
.
c_str
()) >
timeSteps_
)
859
timeSteps_
=
atoi
(
time_step
.
c_str
());
860
861
std
::
string
var_0_name
;
862
bool
found
=
false
;
863
864
for
(
const
auto
&
elt
:
t0_
) {
865
std
::
string
var_0_name
866
=
credalNet_
->
current_bn
().
variable
(
elt
.
first
).
name
();
867
delim
=
var_0_name
.
find_first_of
(
"_"
);
868
var_0_name
=
var_0_name
.
substr
(0,
delim
);
869
870
if
(
var_name
.
compare
(
var_0_name
) == 0) {
871
const
Potential
<
GUM_SCALAR
>*
potential
(
872
&
credalNet_
->
current_bn
().
cpt
(
node
));
873
const
Potential
<
GUM_SCALAR
>*
potential2
(
874
&
credalNet_
->
current_bn
().
cpt
(
elt
.
first
));
875
876
if
(
potential
->
domainSize
() ==
potential2
->
domainSize
()) {
877
t0_
[
elt
.
first
].
push_back
(
node
);
878
found
=
true
;
879
break
;
880
}
881
}
882
}
883
884
if
(!
found
) {
885
for
(
const
auto
&
elt
:
t1_
) {
886
std
::
string
var_0_name
887
=
credalNet_
->
current_bn
().
variable
(
elt
.
first
).
name
();
888
auto
delim
=
var_0_name
.
find_first_of
(
"_"
);
889
var_0_name
=
var_0_name
.
substr
(0,
delim
);
890
891
if
(
var_name
.
compare
(
var_0_name
) == 0) {
892
const
Potential
<
GUM_SCALAR
>*
potential
(
893
&
credalNet_
->
current_bn
().
cpt
(
node
));
894
const
Potential
<
GUM_SCALAR
>*
potential2
(
895
&
credalNet_
->
current_bn
().
cpt
(
elt
.
first
));
896
897
if
(
potential
->
domainSize
() ==
potential2
->
domainSize
()) {
898
t1_
[
elt
.
first
].
push_back
(
node
);
899
break
;
900
}
901
}
902
}
903
}
904
}
905
}
906
}
907
908
template
<
typename
GUM_SCALAR
>
909
void
InferenceEngine
<
GUM_SCALAR
>::
updateExpectations_
(
910
const
NodeId
&
id
,
911
const
std
::
vector
<
GUM_SCALAR
>&
vertex
) {
912
std
::
string
var_name
=
credalNet_
->
current_bn
().
variable
(
id
).
name
();
913
auto
delim
=
var_name
.
find_first_of
(
"_"
);
914
915
var_name
=
var_name
.
substr
(0,
delim
);
916
917
if
(
modal_
.
exists
(
var_name
)
/*modal_.find(var_name) != modal_.end()*/
) {
918
GUM_SCALAR
exp
= 0;
919
auto
vsize
=
vertex
.
size
();
920
921
for
(
Size
mod
= 0;
mod
<
vsize
;
mod
++)
922
exp
+=
vertex
[
mod
] *
modal_
[
var_name
][
mod
];
923
924
if
(
exp
>
expectationMax_
[
id
])
expectationMax_
[
id
] =
exp
;
925
926
if
(
exp
<
expectationMin_
[
id
])
expectationMin_
[
id
] =
exp
;
927
}
928
}
929
930
template
<
typename
GUM_SCALAR
>
931
void
InferenceEngine
<
GUM_SCALAR
>::
updateCredalSets_
(
932
const
NodeId
&
id
,
933
const
std
::
vector
<
GUM_SCALAR
>&
vertex
,
934
const
bool
&
elimRedund
) {
935
auto
&
nodeCredalSet
=
marginalSets_
[
id
];
936
auto
dsize
=
vertex
.
size
();
937
938
bool
eq
=
true
;
939
940
for
(
auto
it
=
nodeCredalSet
.
cbegin
(),
itEnd
=
nodeCredalSet
.
cend
();
941
it
!=
itEnd
;
942
++
it
) {
943
eq
=
true
;
944
945
for
(
Size
i
= 0;
i
<
dsize
;
i
++) {
946
if
(
std
::
fabs
(
vertex
[
i
] - (*
it
)[
i
]) > 1e-6) {
947
eq
=
false
;
948
break
;
949
}
950
}
951
952
if
(
eq
)
break
;
953
}
954
955
if
(!
eq
||
nodeCredalSet
.
size
() == 0) {
956
nodeCredalSet
.
push_back
(
vertex
);
957
return
;
958
}
else
959
return
;
960
961
// because of next lambda return condition
962
if
(
nodeCredalSet
.
size
() == 1)
return
;
963
964
// check that the point and all previously added ones are not inside the
965
// actual
966
// polytope
967
auto
itEnd
=
std
::
remove_if
(
968
nodeCredalSet
.
begin
(),
969
nodeCredalSet
.
end
(),
970
[&](
const
std
::
vector
<
GUM_SCALAR
>&
v
) ->
bool
{
971
for
(
auto
jt
=
v
.
cbegin
(),
972
jtEnd
=
v
.
cend
(),
973
minIt
=
marginalMin_
[
id
].
cbegin
(),
974
minItEnd
=
marginalMin_
[
id
].
cend
(),
975
maxIt
=
marginalMax_
[
id
].
cbegin
(),
976
maxItEnd
=
marginalMax_
[
id
].
cend
();
977
jt
!=
jtEnd
&&
minIt
!=
minItEnd
&&
maxIt
!=
maxItEnd
;
978
++
jt
, ++
minIt
, ++
maxIt
) {
979
if
((
std
::
fabs
(*
jt
- *
minIt
) < 1e-6 ||
std
::
fabs
(*
jt
- *
maxIt
) < 1e-6)
980
&&
std
::
fabs
(*
minIt
- *
maxIt
) > 1e-6)
981
return
false
;
982
}
983
return
true
;
984
});
985
986
nodeCredalSet
.
erase
(
itEnd
,
nodeCredalSet
.
end
());
987
988
// we need at least 2 points to make a convex combination
989
if
(!
elimRedund
||
nodeCredalSet
.
size
() <= 2)
return
;
990
991
// there may be points not inside the polytope but on one of it's facet,
992
// meaning it's still a convex combination of vertices of this facet. Here
993
// we
994
// need lrs.
995
LRSWrapper
<
GUM_SCALAR
>
lrsWrapper
;
996
lrsWrapper
.
setUpV
((
unsigned
int
)
dsize
, (
unsigned
int
)(
nodeCredalSet
.
size
()));
997
998
for
(
const
auto
&
vtx
:
nodeCredalSet
)
999
lrsWrapper
.
fillV
(
vtx
);
1000
1001
lrsWrapper
.
elimRedundVrep
();
1002
1003
marginalSets_
[
id
] =
lrsWrapper
.
getOutput
();
1004
}
1005
1006
template
<
typename
GUM_SCALAR
>
1007
const
NodeProperty
<
std
::
vector
<
NodeId
> >&
1008
InferenceEngine
<
GUM_SCALAR
>::
getT0Cluster
()
const
{
1009
return
t0_
;
1010
}
1011
1012
template
<
typename
GUM_SCALAR
>
1013
const
NodeProperty
<
std
::
vector
<
NodeId
> >&
1014
InferenceEngine
<
GUM_SCALAR
>::
getT1Cluster
()
const
{
1015
return
t1_
;
1016
}
1017
1018
template
<
typename
GUM_SCALAR
>
1019
inline
const
GUM_SCALAR
InferenceEngine
<
GUM_SCALAR
>::
computeEpsilon_
() {
1020
GUM_SCALAR
eps
= 0;
1021
#
pragma
omp
parallel
1022
{
1023
GUM_SCALAR
tEps
= 0;
1024
GUM_SCALAR
delta
;
1025
1026
/// int tId = getThreadNumber();
1027
int
nsize
=
int
(
marginalMin_
.
size
());
1028
1029
#
pragma
omp
for
1030
1031
for
(
int
i
= 0;
i
<
nsize
;
i
++) {
1032
auto
dSize
=
marginalMin_
[
i
].
size
();
1033
1034
for
(
Size
j
= 0;
j
<
dSize
;
j
++) {
1035
// on min
1036
delta
=
marginalMin_
[
i
][
j
] -
oldMarginalMin_
[
i
][
j
];
1037
delta
= (
delta
< 0) ? (-
delta
) :
delta
;
1038
tEps
= (
tEps
<
delta
) ?
delta
:
tEps
;
1039
1040
// on max
1041
delta
=
marginalMax_
[
i
][
j
] -
oldMarginalMax_
[
i
][
j
];
1042
delta
= (
delta
< 0) ? (-
delta
) :
delta
;
1043
tEps
= (
tEps
<
delta
) ?
delta
:
tEps
;
1044
1045
oldMarginalMin_
[
i
][
j
] =
marginalMin_
[
i
][
j
];
1046
oldMarginalMax_
[
i
][
j
] =
marginalMax_
[
i
][
j
];
1047
}
1048
}
// end of : all variables
1049
1050
#
pragma
omp
critical
(
epsilon_max
)
1051
{
1052
#
pragma
omp
flush
(
eps
)
1053
eps
= (
eps
<
tEps
) ?
tEps
:
eps
;
1054
}
1055
}
1056
1057
return
eps
;
1058
}
1059
}
// namespace credal
1060
}
// namespace gum
gum::Set::emplace
INLINE void emplace(Args &&... args)
Definition:
set_tpl.h:669
gum::credal
namespace for all credal networks entities
Definition:
LpInterface.cpp:37