5ccc6aa07a3ad2b38fb60546c1668a8f0d4b2522
1 /* This file is part of libDAI - http://www.libdai.org/
3 * libDAI is licensed under the terms of the GNU General Public License version
4 * 2, or (at your option) any later version. libDAI is distributed without any
5 * warranty. See the file COPYING for more details.
7 * Copyright (C) 2006-2009 Joris Mooij [joris dot mooij at libdai dot org]
8 * Copyright (C) 2006-2007 Radboud University Nijmegen, The Netherlands
15 #include <dai/exceptions.h>
24 const char *HAK::Name
= "HAK";
27 void HAK::setProperties( const PropertySet
&opts
) {
28 assert( opts
.hasKey("tol") );
29 assert( opts
.hasKey("maxiter") );
30 assert( opts
.hasKey("verbose") );
31 assert( opts
.hasKey("doubleloop") );
32 assert( opts
.hasKey("clusters") );
34 props
.tol
= opts
.getStringAs
<double>("tol");
35 props
.maxiter
= opts
.getStringAs
<size_t>("maxiter");
36 props
.verbose
= opts
.getStringAs
<size_t>("verbose");
37 props
.doubleloop
= opts
.getStringAs
<bool>("doubleloop");
38 props
.clusters
= opts
.getStringAs
<Properties::ClustersType
>("clusters");
40 if( opts
.hasKey("loopdepth") )
41 props
.loopdepth
= opts
.getStringAs
<size_t>("loopdepth");
43 assert( props
.clusters
!= Properties::ClustersType::LOOP
);
44 if( opts
.hasKey("damping") )
45 props
.damping
= opts
.getStringAs
<double>("damping");
51 PropertySet
HAK::getProperties() const {
53 opts
.Set( "tol", props
.tol
);
54 opts
.Set( "maxiter", props
.maxiter
);
55 opts
.Set( "verbose", props
.verbose
);
56 opts
.Set( "doubleloop", props
.doubleloop
);
57 opts
.Set( "clusters", props
.clusters
);
58 opts
.Set( "loopdepth", props
.loopdepth
);
59 opts
.Set( "damping", props
.damping
);
64 string
HAK::printProperties() const {
65 stringstream
s( stringstream::out
);
67 s
<< "tol=" << props
.tol
<< ",";
68 s
<< "maxiter=" << props
.maxiter
<< ",";
69 s
<< "verbose=" << props
.verbose
<< ",";
70 s
<< "doubleloop=" << props
.doubleloop
<< ",";
71 s
<< "clusters=" << props
.clusters
<< ",";
72 s
<< "loopdepth=" << props
.loopdepth
<< ",";
73 s
<< "damping=" << props
.damping
<< "]";
78 void HAK::constructMessages() {
79 // Create outer beliefs
82 for( size_t alpha
= 0; alpha
< nrORs(); alpha
++ )
83 _Qa
.push_back( Factor( OR(alpha
).vars() ) );
85 // Create inner beliefs
88 for( size_t beta
= 0; beta
< nrIRs(); beta
++ )
89 _Qb
.push_back( Factor( IR(beta
) ) );
93 _muab
.reserve( nrORs() );
95 _muba
.reserve( nrORs() );
96 for( size_t alpha
= 0; alpha
< nrORs(); alpha
++ ) {
97 _muab
.push_back( vector
<Factor
>() );
98 _muba
.push_back( vector
<Factor
>() );
99 _muab
[alpha
].reserve( nbOR(alpha
).size() );
100 _muba
[alpha
].reserve( nbOR(alpha
).size() );
101 foreach( const Neighbor
&beta
, nbOR(alpha
) ) {
102 _muab
[alpha
].push_back( Factor( IR(beta
) ) );
103 _muba
[alpha
].push_back( Factor( IR(beta
) ) );
109 HAK::HAK( const RegionGraph
&rg
, const PropertySet
&opts
) : DAIAlgRG(rg
), _Qa(), _Qb(), _muab(), _muba(), _maxdiff(0.0), _iters(0U), props() {
110 setProperties( opts
);
116 void HAK::findLoopClusters( const FactorGraph
& fg
, std::set
<VarSet
> &allcl
, VarSet newcl
, const Var
& root
, size_t length
, VarSet vars
) {
117 for( VarSet::const_iterator in
= vars
.begin(); in
!= vars
.end(); in
++ ) {
118 VarSet ind
= fg
.delta( fg
.findVar( *in
) );
119 if( (newcl
.size()) >= 2 && ind
.contains( root
) ) {
120 allcl
.insert( newcl
| *in
);
122 else if( length
> 1 )
123 findLoopClusters( fg
, allcl
, newcl
| *in
, root
, length
- 1, ind
/ newcl
);
128 HAK::HAK(const FactorGraph
& fg
, const PropertySet
&opts
) : DAIAlgRG(), _Qa(), _Qb(), _muab(), _muba(), _maxdiff(0.0), _iters(0U), props() {
129 setProperties( opts
);
132 if( props
.clusters
== Properties::ClustersType::MIN
) {
134 } else if( props
.clusters
== Properties::ClustersType::DELTA
) {
135 for( size_t i
= 0; i
< fg
.nrVars(); i
++ )
136 cl
.push_back(fg
.Delta(i
));
137 } else if( props
.clusters
== Properties::ClustersType::LOOP
) {
140 for( size_t i0
= 0; i0
< fg
.nrVars(); i0
++ ) {
141 VarSet i0d
= fg
.delta(i0
);
142 if( props
.loopdepth
> 1 )
143 findLoopClusters( fg
, scl
, fg
.var(i0
), fg
.var(i0
), props
.loopdepth
- 1, fg
.delta(i0
) );
145 for( set
<VarSet
>::const_iterator c
= scl
.begin(); c
!= scl
.end(); c
++ )
147 if( props
.verbose
>= 3 ) {
148 cerr
<< Name
<< " uses the following clusters: " << endl
;
149 for( vector
<VarSet
>::const_iterator cli
= cl
.begin(); cli
!= cl
.end(); cli
++ )
150 cerr
<< *cli
<< endl
;
153 DAI_THROW(UNKNOWN_ENUM_VALUE
);
155 RegionGraph
rg(fg
,cl
);
156 RegionGraph::operator=(rg
);
159 if( props
.verbose
>= 3 )
160 cerr
<< Name
<< " regiongraph: " << *this << endl
;
164 string
HAK::identify() const {
165 return string(Name
) + printProperties();
169 void HAK::init( const VarSet
&ns
) {
170 for( vector
<Factor
>::iterator alpha
= _Qa
.begin(); alpha
!= _Qa
.end(); alpha
++ )
171 if( alpha
->vars().intersects( ns
) )
172 alpha
->fill( 1.0 / alpha
->states() );
174 for( size_t beta
= 0; beta
< nrIRs(); beta
++ )
175 if( IR(beta
).intersects( ns
) ) {
176 _Qb
[beta
].fill( 1.0 );
177 foreach( const Neighbor
&alpha
, nbIR(beta
) ) {
178 size_t _beta
= alpha
.dual
;
179 muab( alpha
, _beta
).fill( 1.0 );
180 muba( alpha
, _beta
).fill( 1.0 );
187 for( vector
<Factor
>::iterator alpha
= _Qa
.begin(); alpha
!= _Qa
.end(); alpha
++ )
188 alpha
->fill( 1.0 / alpha
->states() );
190 for( vector
<Factor
>::iterator beta
= _Qb
.begin(); beta
!= _Qb
.end(); beta
++ )
191 beta
->fill( 1.0 / beta
->states() );
193 for( size_t alpha
= 0; alpha
< nrORs(); alpha
++ )
194 foreach( const Neighbor
&beta
, nbOR(alpha
) ) {
195 size_t _beta
= beta
.iter
;
196 muab( alpha
, _beta
).fill( 1.0 / muab( alpha
, _beta
).states() );
197 muba( alpha
, _beta
).fill( 1.0 / muab( alpha
, _beta
).states() );
202 double HAK::doGBP() {
203 if( props
.verbose
>= 1 )
204 cerr
<< "Starting " << identify() << "...";
205 if( props
.verbose
>= 3)
210 // Check whether counting numbers won't lead to problems
211 for( size_t beta
= 0; beta
< nrIRs(); beta
++ )
212 assert( nbIR(beta
).size() + IR(beta
).c() != 0.0 );
214 // Keep old beliefs to check convergence
215 vector
<Factor
> old_beliefs
;
216 old_beliefs
.reserve( nrVars() );
217 for( size_t i
= 0; i
< nrVars(); i
++ )
218 old_beliefs
.push_back( belief( var(i
) ) );
220 // Differences in single node beliefs
221 Diffs
diffs(nrVars(), 1.0);
223 // do several passes over the network until maximum number of iterations has
224 // been reached or until the maximum belief difference is smaller than tolerance
225 for( _iters
= 0; _iters
< props
.maxiter
&& diffs
.maxDiff() > props
.tol
; _iters
++ ) {
226 for( size_t beta
= 0; beta
< nrIRs(); beta
++ ) {
227 foreach( const Neighbor
&alpha
, nbIR(beta
) ) {
228 size_t _beta
= alpha
.dual
;
229 muab( alpha
, _beta
) = _Qa
[alpha
].marginal(IR(beta
)) / muba(alpha
,_beta
);
230 /* TODO: INVESTIGATE THIS PROBLEM
232 * In some cases, the muab's can have very large entries because the muba's have very
233 * small entries. This may cause NANs later on (e.g., multiplying large quantities may
234 * result in +inf; normalization then tries to calculate inf / inf which is NAN).
235 * A fix of this problem would consist in normalizing the messages muab.
236 * However, it is not obvious whether this is a real solution, because it has a
237 * negative performance impact and the NAN's seem to be a symptom of a fundamental
238 * numerical unstability.
240 muab(alpha
,_beta
).normalize();
244 foreach( const Neighbor
&alpha
, nbIR(beta
) ) {
245 size_t _beta
= alpha
.dual
;
246 Qb_new
*= muab(alpha
,_beta
) ^ (1 / (nbIR(beta
).size() + IR(beta
).c()));
250 if( Qb_new
.hasNaNs() ) {
251 // TODO: WHAT TO DO IN THIS CASE?
252 cerr
<< Name
<< "::doGBP: Qb_new has NaNs!" << endl
;
255 /* TODO: WHAT IS THE PURPOSE OF THE FOLLOWING CODE?
257 * _Qb[beta] = Qb_new.makeZero(1e-100);
260 if( props
.doubleloop
|| props
.damping
== 0.0 )
261 _Qb
[beta
] = Qb_new
; // no damping for double loop
263 _Qb
[beta
] = (Qb_new
^(1.0 - props
.damping
)) * (_Qb
[beta
]^props
.damping
);
265 foreach( const Neighbor
&alpha
, nbIR(beta
) ) {
266 size_t _beta
= alpha
.dual
;
267 muba(alpha
,_beta
) = _Qb
[beta
] / muab(alpha
,_beta
);
269 /* TODO: INVESTIGATE WHETHER THIS HACK (INVENTED BY KEES) TO PREVENT NANS MAKES SENSE
271 * muba(beta,*alpha).makePositive(1e-100);
275 Factor Qa_new
= OR(alpha
);
276 foreach( const Neighbor
&gamma
, nbOR(alpha
) )
277 Qa_new
*= muba(alpha
,gamma
.iter
);
278 Qa_new
^= (1.0 / OR(alpha
).c());
280 if( Qa_new
.hasNaNs() ) {
281 cerr
<< Name
<< "::doGBP: Qa_new has NaNs!" << endl
;
284 /* TODO: WHAT IS THE PURPOSE OF THE FOLLOWING CODE?
286 * _Qb[beta] = Qb_new.makeZero(1e-100);
289 if( props
.doubleloop
|| props
.damping
== 0.0 )
290 _Qa
[alpha
] = Qa_new
; // no damping for double loop
292 // FIXME: GEOMETRIC DAMPING IS SLOW!
293 _Qa
[alpha
] = (Qa_new
^(1.0 - props
.damping
)) * (_Qa
[alpha
]^props
.damping
);
297 // Calculate new single variable beliefs and compare with old ones
298 for( size_t i
= 0; i
< nrVars(); i
++ ) {
299 Factor new_belief
= belief( var( i
) );
300 diffs
.push( dist( new_belief
, old_beliefs
[i
], Prob::DISTLINF
) );
301 old_beliefs
[i
] = new_belief
;
304 if( props
.verbose
>= 3 )
305 cerr
<< Name
<< "::doGBP: maxdiff " << diffs
.maxDiff() << " after " << _iters
+1 << " passes" << endl
;
308 if( diffs
.maxDiff() > _maxdiff
)
309 _maxdiff
= diffs
.maxDiff();
311 if( props
.verbose
>= 1 ) {
312 if( diffs
.maxDiff() > props
.tol
) {
313 if( props
.verbose
== 1 )
315 cerr
<< Name
<< "::doGBP: WARNING: not converged within " << props
.maxiter
<< " passes (" << toc() - tic
<< " seconds)...final maxdiff:" << diffs
.maxDiff() << endl
;
317 if( props
.verbose
>= 2 )
318 cerr
<< Name
<< "::doGBP: ";
319 cerr
<< "converged in " << _iters
<< " passes (" << toc() - tic
<< " seconds)." << endl
;
323 return diffs
.maxDiff();
327 double HAK::doDoubleLoop() {
328 if( props
.verbose
>= 1 )
329 cerr
<< "Starting " << identify() << "...";
330 if( props
.verbose
>= 3)
335 // Save original outer regions
336 vector
<FRegion
> org_ORs
= ORs
;
338 // Save original inner counting numbers and set negative counting numbers to zero
339 vector
<double> org_IR_cs( nrIRs(), 0.0 );
340 for( size_t beta
= 0; beta
< nrIRs(); beta
++ ) {
341 org_IR_cs
[beta
] = IR(beta
).c();
342 if( IR(beta
).c() < 0.0 )
346 // Keep old beliefs to check convergence
347 vector
<Factor
> old_beliefs
;
348 old_beliefs
.reserve( nrVars() );
349 for( size_t i
= 0; i
< nrVars(); i
++ )
350 old_beliefs
.push_back( belief( var(i
) ) );
352 // Differences in single node beliefs
353 Diffs
diffs(nrVars(), 1.0);
355 size_t outer_maxiter
= props
.maxiter
;
356 double outer_tol
= props
.tol
;
357 size_t outer_verbose
= props
.verbose
;
358 double org_maxdiff
= _maxdiff
;
360 // Set parameters for inner loop
362 props
.verbose
= outer_verbose
? outer_verbose
- 1 : 0;
364 size_t outer_iter
= 0;
365 size_t total_iter
= 0;
366 for( outer_iter
= 0; outer_iter
< outer_maxiter
&& diffs
.maxDiff() > outer_tol
; outer_iter
++ ) {
367 // Calculate new outer regions
368 for( size_t alpha
= 0; alpha
< nrORs(); alpha
++ ) {
369 OR(alpha
) = org_ORs
[alpha
];
370 foreach( const Neighbor
&beta
, nbOR(alpha
) )
371 OR(alpha
) *= _Qb
[beta
] ^ ((IR(beta
).c() - org_IR_cs
[beta
]) / nbIR(beta
).size());
375 if( isnan( doGBP() ) )
378 // Calculate new single variable beliefs and compare with old ones
379 for( size_t i
= 0; i
< nrVars(); ++i
) {
380 Factor new_belief
= belief( var( i
) );
381 diffs
.push( dist( new_belief
, old_beliefs
[i
], Prob::DISTLINF
) );
382 old_beliefs
[i
] = new_belief
;
385 total_iter
+= Iterations();
387 if( props
.verbose
>= 3 )
388 cerr
<< Name
<< "::doDoubleLoop: maxdiff " << diffs
.maxDiff() << " after " << total_iter
<< " passes" << endl
;
391 // restore _maxiter, _verbose and _maxdiff
392 props
.maxiter
= outer_maxiter
;
393 props
.verbose
= outer_verbose
;
394 _maxdiff
= org_maxdiff
;
397 if( diffs
.maxDiff() > _maxdiff
)
398 _maxdiff
= diffs
.maxDiff();
400 // Restore original outer regions
403 // Restore original inner counting numbers
404 for( size_t beta
= 0; beta
< nrIRs(); ++beta
)
405 IR(beta
).c() = org_IR_cs
[beta
];
407 if( props
.verbose
>= 1 ) {
408 if( diffs
.maxDiff() > props
.tol
) {
409 if( props
.verbose
== 1 )
411 cerr
<< Name
<< "::doDoubleLoop: WARNING: not converged within " << outer_maxiter
<< " passes (" << toc() - tic
<< " seconds)...final maxdiff:" << diffs
.maxDiff() << endl
;
413 if( props
.verbose
>= 3 )
414 cerr
<< Name
<< "::doDoubleLoop: ";
415 cerr
<< "converged in " << total_iter
<< " passes (" << toc() - tic
<< " seconds)." << endl
;
419 return diffs
.maxDiff();
424 if( props
.doubleloop
)
425 return doDoubleLoop();
431 Factor
HAK::belief( const VarSet
&ns
) const {
432 vector
<Factor
>::const_iterator beta
;
433 for( beta
= _Qb
.begin(); beta
!= _Qb
.end(); beta
++ )
434 if( beta
->vars() >> ns
)
436 if( beta
!= _Qb
.end() )
437 return( beta
->marginal(ns
) );
439 vector
<Factor
>::const_iterator alpha
;
440 for( alpha
= _Qa
.begin(); alpha
!= _Qa
.end(); alpha
++ )
441 if( alpha
->vars() >> ns
)
443 assert( alpha
!= _Qa
.end() );
444 return( alpha
->marginal(ns
) );
449 Factor
HAK::belief( const Var
&n
) const {
450 return belief( (VarSet
)n
);
454 vector
<Factor
> HAK::beliefs() const {
455 vector
<Factor
> result
;
456 for( size_t beta
= 0; beta
< nrIRs(); beta
++ )
457 result
.push_back( Qb(beta
) );
458 for( size_t alpha
= 0; alpha
< nrORs(); alpha
++ )
459 result
.push_back( Qa(alpha
) );
464 Real
HAK::logZ() const {
466 for( size_t beta
= 0; beta
< nrIRs(); beta
++ )
467 s
+= IR(beta
).c() * Qb(beta
).entropy();
468 for( size_t alpha
= 0; alpha
< nrORs(); alpha
++ ) {
469 s
+= OR(alpha
).c() * Qa(alpha
).entropy();
470 s
+= (OR(alpha
).log(true) * Qa(alpha
)).sum();
476 } // end of namespace dai