1 /* Copyright (C) 2006-2008 Joris Mooij [joris dot mooij at tuebingen dot mpg dot de]
2 Radboud University Nijmegen, The Netherlands /
3 Max Planck Institute for Biological Cybernetics, Germany
5 This file is part of libDAI.
7 libDAI is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2 of the License, or
10 (at your option) any later version.
12 libDAI is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with libDAI; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
26 #include <dai/diffs.h>
27 #include <dai/exceptions.h>
36 const char *HAK::Name
= "HAK";
39 void HAK::setProperties( const PropertySet
&opts
) {
40 assert( opts
.hasKey("tol") );
41 assert( opts
.hasKey("maxiter") );
42 assert( opts
.hasKey("verbose") );
43 assert( opts
.hasKey("doubleloop") );
44 assert( opts
.hasKey("clusters") );
46 props
.tol
= opts
.getStringAs
<double>("tol");
47 props
.maxiter
= opts
.getStringAs
<size_t>("maxiter");
48 props
.verbose
= opts
.getStringAs
<size_t>("verbose");
49 props
.doubleloop
= opts
.getStringAs
<bool>("doubleloop");
50 props
.clusters
= opts
.getStringAs
<Properties::ClustersType
>("clusters");
52 if( opts
.hasKey("loopdepth") )
53 props
.loopdepth
= opts
.getStringAs
<size_t>("loopdepth");
55 assert( props
.clusters
!= Properties::ClustersType::LOOP
);
56 if( opts
.hasKey("damping") )
57 props
.damping
= opts
.getStringAs
<double>("damping");
63 PropertySet
HAK::getProperties() const {
65 opts
.Set( "tol", props
.tol
);
66 opts
.Set( "maxiter", props
.maxiter
);
67 opts
.Set( "verbose", props
.verbose
);
68 opts
.Set( "doubleloop", props
.doubleloop
);
69 opts
.Set( "clusters", props
.clusters
);
70 opts
.Set( "loopdepth", props
.loopdepth
);
71 opts
.Set( "damping", props
.damping
);
76 string
HAK::printProperties() const {
77 stringstream
s( stringstream::out
);
79 s
<< "tol=" << props
.tol
<< ",";
80 s
<< "maxiter=" << props
.maxiter
<< ",";
81 s
<< "verbose=" << props
.verbose
<< ",";
82 s
<< "doubleloop=" << props
.doubleloop
<< ",";
83 s
<< "clusters=" << props
.clusters
<< ",";
84 s
<< "loopdepth=" << props
.loopdepth
<< ",";
85 s
<< "damping=" << props
.damping
<< "]";
90 void HAK::constructMessages() {
91 // Create outer beliefs
94 for( size_t alpha
= 0; alpha
< nrORs(); alpha
++ )
95 _Qa
.push_back( Factor( OR(alpha
).vars() ) );
97 // Create inner beliefs
100 for( size_t beta
= 0; beta
< nrIRs(); beta
++ )
101 _Qb
.push_back( Factor( IR(beta
) ) );
105 _muab
.reserve( nrORs() );
107 _muba
.reserve( nrORs() );
108 for( size_t alpha
= 0; alpha
< nrORs(); alpha
++ ) {
109 _muab
.push_back( vector
<Factor
>() );
110 _muba
.push_back( vector
<Factor
>() );
111 _muab
[alpha
].reserve( nbOR(alpha
).size() );
112 _muba
[alpha
].reserve( nbOR(alpha
).size() );
113 foreach( const Neighbor
&beta
, nbOR(alpha
) ) {
114 _muab
[alpha
].push_back( Factor( IR(beta
) ) );
115 _muba
[alpha
].push_back( Factor( IR(beta
) ) );
121 HAK::HAK( const RegionGraph
&rg
, const PropertySet
&opts
) : DAIAlgRG(rg
), _Qa(), _Qb(), _muab(), _muba(), _maxdiff(0.0), _iters(0U), props() {
122 setProperties( opts
);
128 void HAK::findLoopClusters( const FactorGraph
& fg
, std::set
<VarSet
> &allcl
, VarSet newcl
, const Var
& root
, size_t length
, VarSet vars
) {
129 for( VarSet::const_iterator in
= vars
.begin(); in
!= vars
.end(); in
++ ) {
130 VarSet ind
= fg
.delta( fg
.findVar( *in
) );
131 if( (newcl
.size()) >= 2 && ind
.contains( root
) ) {
132 allcl
.insert( newcl
| *in
);
134 else if( length
> 1 )
135 findLoopClusters( fg
, allcl
, newcl
| *in
, root
, length
- 1, ind
/ newcl
);
140 HAK::HAK(const FactorGraph
& fg
, const PropertySet
&opts
) : DAIAlgRG(), _Qa(), _Qb(), _muab(), _muba(), _maxdiff(0.0), _iters(0U), props() {
141 setProperties( opts
);
144 if( props
.clusters
== Properties::ClustersType::MIN
) {
146 } else if( props
.clusters
== Properties::ClustersType::DELTA
) {
147 for( size_t i
= 0; i
< fg
.nrVars(); i
++ )
148 cl
.push_back(fg
.Delta(i
));
149 } else if( props
.clusters
== Properties::ClustersType::LOOP
) {
152 for( size_t i0
= 0; i0
< fg
.nrVars(); i0
++ ) {
153 VarSet i0d
= fg
.delta(i0
);
154 if( props
.loopdepth
> 1 )
155 findLoopClusters( fg
, scl
, fg
.var(i0
), fg
.var(i0
), props
.loopdepth
- 1, fg
.delta(i0
) );
157 for( set
<VarSet
>::const_iterator c
= scl
.begin(); c
!= scl
.end(); c
++ )
159 if( props
.verbose
>= 3 ) {
160 cout
<< Name
<< " uses the following clusters: " << endl
;
161 for( vector
<VarSet
>::const_iterator cli
= cl
.begin(); cli
!= cl
.end(); cli
++ )
162 cout
<< *cli
<< endl
;
165 DAI_THROW(INTERNAL_ERROR
);
167 RegionGraph
rg(fg
,cl
);
168 RegionGraph::operator=(rg
);
171 if( props
.verbose
>= 3 )
172 cout
<< Name
<< " regiongraph: " << *this << endl
;
176 string
HAK::identify() const {
177 return string(Name
) + printProperties();
181 void HAK::init( const VarSet
&ns
) {
182 for( vector
<Factor
>::iterator alpha
= _Qa
.begin(); alpha
!= _Qa
.end(); alpha
++ )
183 if( alpha
->vars().intersects( ns
) )
184 alpha
->fill( 1.0 / alpha
->states() );
186 for( size_t beta
= 0; beta
< nrIRs(); beta
++ )
187 if( IR(beta
).intersects( ns
) ) {
188 _Qb
[beta
].fill( 1.0 );
189 foreach( const Neighbor
&alpha
, nbIR(beta
) ) {
190 size_t _beta
= alpha
.dual
;
191 muab( alpha
, _beta
).fill( 1.0 );
192 muba( alpha
, _beta
).fill( 1.0 );
199 for( vector
<Factor
>::iterator alpha
= _Qa
.begin(); alpha
!= _Qa
.end(); alpha
++ )
200 alpha
->fill( 1.0 / alpha
->states() );
202 for( vector
<Factor
>::iterator beta
= _Qb
.begin(); beta
!= _Qb
.end(); beta
++ )
203 beta
->fill( 1.0 / beta
->states() );
205 for( size_t alpha
= 0; alpha
< nrORs(); alpha
++ )
206 foreach( const Neighbor
&beta
, nbOR(alpha
) ) {
207 size_t _beta
= beta
.iter
;
208 muab( alpha
, _beta
).fill( 1.0 / muab( alpha
, _beta
).states() );
209 muba( alpha
, _beta
).fill( 1.0 / muab( alpha
, _beta
).states() );
214 double HAK::doGBP() {
215 if( props
.verbose
>= 1 )
216 cout
<< "Starting " << identify() << "...";
217 if( props
.verbose
>= 3)
222 // Check whether counting numbers won't lead to problems
223 for( size_t beta
= 0; beta
< nrIRs(); beta
++ )
224 assert( nbIR(beta
).size() + IR(beta
).c() != 0.0 );
226 // Keep old beliefs to check convergence
227 vector
<Factor
> old_beliefs
;
228 old_beliefs
.reserve( nrVars() );
229 for( size_t i
= 0; i
< nrVars(); i
++ )
230 old_beliefs
.push_back( belief( var(i
) ) );
232 // Differences in single node beliefs
233 Diffs
diffs(nrVars(), 1.0);
235 // do several passes over the network until maximum number of iterations has
236 // been reached or until the maximum belief difference is smaller than tolerance
237 for( _iters
= 0; _iters
< props
.maxiter
&& diffs
.maxDiff() > props
.tol
; _iters
++ ) {
238 for( size_t beta
= 0; beta
< nrIRs(); beta
++ ) {
239 foreach( const Neighbor
&alpha
, nbIR(beta
) ) {
240 size_t _beta
= alpha
.dual
;
241 muab( alpha
, _beta
) = _Qa
[alpha
].marginal(IR(beta
)).divided_by( muba(alpha
,_beta
) );
242 /* TODO: INVESTIGATE THIS PROBLEM
244 * In some cases, the muab's can have very large entries because the muba's have very
245 * small entries. This may cause NANs later on (e.g., multiplying large quantities may
246 * result in +inf; normalization then tries to calculate inf / inf which is NAN).
247 * A fix of this problem would consist in normalizing the messages muab.
248 * However, it is not obvious whether this is a real solution, because it has a
249 * negative performance impact and the NAN's seem to be a symptom of a fundamental
250 * numerical unstability.
252 muab(alpha
,_beta
).normalize();
256 foreach( const Neighbor
&alpha
, nbIR(beta
) ) {
257 size_t _beta
= alpha
.dual
;
258 Qb_new
*= muab(alpha
,_beta
) ^ (1 / (nbIR(beta
).size() + IR(beta
).c()));
262 if( Qb_new
.hasNaNs() ) {
263 // TODO: WHAT TO DO IN THIS CASE?
264 cout
<< Name
<< "::doGBP: Qb_new has NaNs!" << endl
;
267 /* TODO: WHAT IS THE PURPOSE OF THE FOLLOWING CODE?
269 * _Qb[beta] = Qb_new.makeZero(1e-100);
272 if( props
.doubleloop
|| props
.damping
== 0.0 )
273 _Qb
[beta
] = Qb_new
; // no damping for double loop
275 _Qb
[beta
] = (Qb_new
^(1.0 - props
.damping
)) * (_Qb
[beta
]^props
.damping
);
277 foreach( const Neighbor
&alpha
, nbIR(beta
) ) {
278 size_t _beta
= alpha
.dual
;
279 muba(alpha
,_beta
) = _Qb
[beta
].divided_by( muab(alpha
,_beta
) );
281 /* TODO: INVESTIGATE WHETHER THIS HACK (INVENTED BY KEES) TO PREVENT NANS MAKES SENSE
283 * muba(beta,*alpha).makePositive(1e-100);
287 Factor Qa_new
= OR(alpha
);
288 foreach( const Neighbor
&gamma
, nbOR(alpha
) )
289 Qa_new
*= muba(alpha
,gamma
.iter
);
290 Qa_new
^= (1.0 / OR(alpha
).c());
292 if( Qa_new
.hasNaNs() ) {
293 cout
<< Name
<< "::doGBP: Qa_new has NaNs!" << endl
;
296 /* TODO: WHAT IS THE PURPOSE OF THE FOLLOWING CODE?
298 * _Qb[beta] = Qb_new.makeZero(1e-100);
301 if( props
.doubleloop
|| props
.damping
== 0.0 )
302 _Qa
[alpha
] = Qa_new
; // no damping for double loop
304 // FIXME: GEOMETRIC DAMPING IS SLOW!
305 _Qa
[alpha
] = (Qa_new
^(1.0 - props
.damping
)) * (_Qa
[alpha
]^props
.damping
);
309 // Calculate new single variable beliefs and compare with old ones
310 for( size_t i
= 0; i
< nrVars(); i
++ ) {
311 Factor new_belief
= belief( var( i
) );
312 diffs
.push( dist( new_belief
, old_beliefs
[i
], Prob::DISTLINF
) );
313 old_beliefs
[i
] = new_belief
;
316 if( props
.verbose
>= 3 )
317 cout
<< Name
<< "::doGBP: maxdiff " << diffs
.maxDiff() << " after " << _iters
+1 << " passes" << endl
;
320 if( diffs
.maxDiff() > _maxdiff
)
321 _maxdiff
= diffs
.maxDiff();
323 if( props
.verbose
>= 1 ) {
324 if( diffs
.maxDiff() > props
.tol
) {
325 if( props
.verbose
== 1 )
327 cout
<< Name
<< "::doGBP: WARNING: not converged within " << props
.maxiter
<< " passes (" << toc() - tic
<< " seconds)...final maxdiff:" << diffs
.maxDiff() << endl
;
329 if( props
.verbose
>= 2 )
330 cout
<< Name
<< "::doGBP: ";
331 cout
<< "converged in " << _iters
<< " passes (" << toc() - tic
<< " seconds)." << endl
;
335 return diffs
.maxDiff();
339 double HAK::doDoubleLoop() {
340 if( props
.verbose
>= 1 )
341 cout
<< "Starting " << identify() << "...";
342 if( props
.verbose
>= 3)
347 // Save original outer regions
348 vector
<FRegion
> org_ORs
= ORs
;
350 // Save original inner counting numbers and set negative counting numbers to zero
351 vector
<double> org_IR_cs( nrIRs(), 0.0 );
352 for( size_t beta
= 0; beta
< nrIRs(); beta
++ ) {
353 org_IR_cs
[beta
] = IR(beta
).c();
354 if( IR(beta
).c() < 0.0 )
358 // Keep old beliefs to check convergence
359 vector
<Factor
> old_beliefs
;
360 old_beliefs
.reserve( nrVars() );
361 for( size_t i
= 0; i
< nrVars(); i
++ )
362 old_beliefs
.push_back( belief( var(i
) ) );
364 // Differences in single node beliefs
365 Diffs
diffs(nrVars(), 1.0);
367 size_t outer_maxiter
= props
.maxiter
;
368 double outer_tol
= props
.tol
;
369 size_t outer_verbose
= props
.verbose
;
370 double org_maxdiff
= _maxdiff
;
372 // Set parameters for inner loop
374 props
.verbose
= outer_verbose
? outer_verbose
- 1 : 0;
376 size_t outer_iter
= 0;
377 size_t total_iter
= 0;
378 for( outer_iter
= 0; outer_iter
< outer_maxiter
&& diffs
.maxDiff() > outer_tol
; outer_iter
++ ) {
379 // Calculate new outer regions
380 for( size_t alpha
= 0; alpha
< nrORs(); alpha
++ ) {
381 OR(alpha
) = org_ORs
[alpha
];
382 foreach( const Neighbor
&beta
, nbOR(alpha
) )
383 OR(alpha
) *= _Qb
[beta
] ^ ((IR(beta
).c() - org_IR_cs
[beta
]) / nbIR(beta
).size());
387 if( isnan( doGBP() ) )
390 // Calculate new single variable beliefs and compare with old ones
391 for( size_t i
= 0; i
< nrVars(); ++i
) {
392 Factor new_belief
= belief( var( i
) );
393 diffs
.push( dist( new_belief
, old_beliefs
[i
], Prob::DISTLINF
) );
394 old_beliefs
[i
] = new_belief
;
397 total_iter
+= Iterations();
399 if( props
.verbose
>= 3 )
400 cout
<< Name
<< "::doDoubleLoop: maxdiff " << diffs
.maxDiff() << " after " << total_iter
<< " passes" << endl
;
403 // restore _maxiter, _verbose and _maxdiff
404 props
.maxiter
= outer_maxiter
;
405 props
.verbose
= outer_verbose
;
406 _maxdiff
= org_maxdiff
;
409 if( diffs
.maxDiff() > _maxdiff
)
410 _maxdiff
= diffs
.maxDiff();
412 // Restore original outer regions
415 // Restore original inner counting numbers
416 for( size_t beta
= 0; beta
< nrIRs(); ++beta
)
417 IR(beta
).c() = org_IR_cs
[beta
];
419 if( props
.verbose
>= 1 ) {
420 if( diffs
.maxDiff() > props
.tol
) {
421 if( props
.verbose
== 1 )
423 cout
<< Name
<< "::doDoubleLoop: WARNING: not converged within " << outer_maxiter
<< " passes (" << toc() - tic
<< " seconds)...final maxdiff:" << diffs
.maxDiff() << endl
;
425 if( props
.verbose
>= 3 )
426 cout
<< Name
<< "::doDoubleLoop: ";
427 cout
<< "converged in " << total_iter
<< " passes (" << toc() - tic
<< " seconds)." << endl
;
431 return diffs
.maxDiff();
436 if( props
.doubleloop
)
437 return doDoubleLoop();
443 Factor
HAK::belief( const VarSet
&ns
) const {
444 vector
<Factor
>::const_iterator beta
;
445 for( beta
= _Qb
.begin(); beta
!= _Qb
.end(); beta
++ )
446 if( beta
->vars() >> ns
)
448 if( beta
!= _Qb
.end() )
449 return( beta
->marginal(ns
) );
451 vector
<Factor
>::const_iterator alpha
;
452 for( alpha
= _Qa
.begin(); alpha
!= _Qa
.end(); alpha
++ )
453 if( alpha
->vars() >> ns
)
455 assert( alpha
!= _Qa
.end() );
456 return( alpha
->marginal(ns
) );
461 Factor
HAK::belief( const Var
&n
) const {
462 return belief( (VarSet
)n
);
466 vector
<Factor
> HAK::beliefs() const {
467 vector
<Factor
> result
;
468 for( size_t beta
= 0; beta
< nrIRs(); beta
++ )
469 result
.push_back( Qb(beta
) );
470 for( size_t alpha
= 0; alpha
< nrORs(); alpha
++ )
471 result
.push_back( Qa(alpha
) );
476 Real
HAK::logZ() const {
478 for( size_t beta
= 0; beta
< nrIRs(); beta
++ )
479 sum
+= IR(beta
).c() * Qb(beta
).entropy();
480 for( size_t alpha
= 0; alpha
< nrORs(); alpha
++ ) {
481 sum
+= OR(alpha
).c() * Qa(alpha
).entropy();
482 sum
+= (OR(alpha
).log0() * Qa(alpha
)).totalSum();
488 } // end of namespace dai