Improved documentation of include/dai/hak.h
[libdai.git] / src / hak.cpp
1 /* This file is part of libDAI - http://www.libdai.org/
2 *
3 * libDAI is licensed under the terms of the GNU General Public License version
4 * 2, or (at your option) any later version. libDAI is distributed without any
5 * warranty. See the file COPYING for more details.
6 *
7 * Copyright (C) 2006-2009 Joris Mooij [joris dot mooij at libdai dot org]
8 * Copyright (C) 2006-2007 Radboud University Nijmegen, The Netherlands
9 */
10
11
12 #include <map>
13 #include <dai/hak.h>
14 #include <dai/util.h>
15 #include <dai/exceptions.h>
16
17
18 namespace dai {
19
20
21 using namespace std;
22
23
24 const char *HAK::Name = "HAK";
25
26
27 /// Sets factor entries that lie between 0 and \a epsilon to \a epsilon
28 template <class T>
29 TFactor<T>& makePositive( TFactor<T> &f, T epsilon ) {
30 for( size_t t = 0; t < f.states(); t++ )
31 if( (0 < f[t]) && (f[t] < epsilon) )
32 f[t] = epsilon;
33 return f;
34 }
35
36 /// Sets factor entries that are smaller (in absolute value) than \a epsilon to 0
37 template <class T>
38 TFactor<T>& makeZero( TFactor<T> &f, T epsilon ) {
39 for( size_t t = 0; t < f.states(); t++ )
40 if( f[t] < epsilon && f[t] > -epsilon )
41 f[t] = 0;
42 return f;
43 }
44
45
46 void HAK::setProperties( const PropertySet &opts ) {
47 DAI_ASSERT( opts.hasKey("tol") );
48 DAI_ASSERT( opts.hasKey("maxiter") );
49 DAI_ASSERT( opts.hasKey("verbose") );
50 DAI_ASSERT( opts.hasKey("doubleloop") );
51 DAI_ASSERT( opts.hasKey("clusters") );
52
53 props.tol = opts.getStringAs<Real>("tol");
54 props.maxiter = opts.getStringAs<size_t>("maxiter");
55 props.verbose = opts.getStringAs<size_t>("verbose");
56 props.doubleloop = opts.getStringAs<bool>("doubleloop");
57 props.clusters = opts.getStringAs<Properties::ClustersType>("clusters");
58
59 if( opts.hasKey("loopdepth") )
60 props.loopdepth = opts.getStringAs<size_t>("loopdepth");
61 else
62 DAI_ASSERT( props.clusters != Properties::ClustersType::LOOP );
63 if( opts.hasKey("damping") )
64 props.damping = opts.getStringAs<Real>("damping");
65 else
66 props.damping = 0.0;
67 if( opts.hasKey("init") )
68 props.init = opts.getStringAs<Properties::InitType>("init");
69 else
70 props.init = Properties::InitType::UNIFORM;
71 }
72
73
74 PropertySet HAK::getProperties() const {
75 PropertySet opts;
76 opts.Set( "tol", props.tol );
77 opts.Set( "maxiter", props.maxiter );
78 opts.Set( "verbose", props.verbose );
79 opts.Set( "doubleloop", props.doubleloop );
80 opts.Set( "clusters", props.clusters );
81 opts.Set( "init", props.init );
82 opts.Set( "loopdepth", props.loopdepth );
83 opts.Set( "damping", props.damping );
84 return opts;
85 }
86
87
88 string HAK::printProperties() const {
89 stringstream s( stringstream::out );
90 s << "[";
91 s << "tol=" << props.tol << ",";
92 s << "maxiter=" << props.maxiter << ",";
93 s << "verbose=" << props.verbose << ",";
94 s << "doubleloop=" << props.doubleloop << ",";
95 s << "clusters=" << props.clusters << ",";
96 s << "init=" << props.init << ",";
97 s << "loopdepth=" << props.loopdepth << ",";
98 s << "damping=" << props.damping << "]";
99 return s.str();
100 }
101
102
103 void HAK::construct() {
104 // Create outer beliefs
105 _Qa.clear();
106 _Qa.reserve(nrORs());
107 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
108 _Qa.push_back( Factor( OR(alpha).vars() ) );
109
110 // Create inner beliefs
111 _Qb.clear();
112 _Qb.reserve(nrIRs());
113 for( size_t beta = 0; beta < nrIRs(); beta++ )
114 _Qb.push_back( Factor( IR(beta) ) );
115
116 // Create messages
117 _muab.clear();
118 _muab.reserve( nrORs() );
119 _muba.clear();
120 _muba.reserve( nrORs() );
121 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
122 _muab.push_back( vector<Factor>() );
123 _muba.push_back( vector<Factor>() );
124 _muab[alpha].reserve( nbOR(alpha).size() );
125 _muba[alpha].reserve( nbOR(alpha).size() );
126 foreach( const Neighbor &beta, nbOR(alpha) ) {
127 _muab[alpha].push_back( Factor( IR(beta) ) );
128 _muba[alpha].push_back( Factor( IR(beta) ) );
129 }
130 }
131 }
132
133
134 HAK::HAK( const RegionGraph &rg, const PropertySet &opts ) : DAIAlgRG(rg), _Qa(), _Qb(), _muab(), _muba(), _maxdiff(0.0), _iters(0U), props() {
135 setProperties( opts );
136
137 construct();
138 }
139
140
141 void HAK::findLoopClusters( const FactorGraph & fg, std::set<VarSet> &allcl, VarSet newcl, const Var & root, size_t length, VarSet vars ) {
142 for( VarSet::const_iterator in = vars.begin(); in != vars.end(); in++ ) {
143 VarSet ind = fg.delta( fg.findVar( *in ) );
144 if( (newcl.size()) >= 2 && ind.contains( root ) )
145 allcl.insert( newcl | *in );
146 else if( length > 1 )
147 findLoopClusters( fg, allcl, newcl | *in, root, length - 1, ind / newcl );
148 }
149 }
150
151
152 HAK::HAK(const FactorGraph & fg, const PropertySet &opts) : DAIAlgRG(), _Qa(), _Qb(), _muab(), _muba(), _maxdiff(0.0), _iters(0U), props() {
153 setProperties( opts );
154
155 vector<VarSet> cl;
156 if( props.clusters == Properties::ClustersType::MIN ) {
157 cl = fg.Cliques();
158 } else if( props.clusters == Properties::ClustersType::DELTA ) {
159 for( size_t i = 0; i < fg.nrVars(); i++ )
160 cl.push_back(fg.Delta(i));
161 } else if( props.clusters == Properties::ClustersType::LOOP ) {
162 cl = fg.Cliques();
163 set<VarSet> scl;
164 for( size_t i0 = 0; i0 < fg.nrVars(); i0++ ) {
165 VarSet i0d = fg.delta(i0);
166 if( props.loopdepth > 1 )
167 findLoopClusters( fg, scl, fg.var(i0), fg.var(i0), props.loopdepth - 1, fg.delta(i0) );
168 }
169 for( set<VarSet>::const_iterator c = scl.begin(); c != scl.end(); c++ )
170 cl.push_back(*c);
171 if( props.verbose >= 3 ) {
172 cerr << Name << " uses the following clusters: " << endl;
173 for( vector<VarSet>::const_iterator cli = cl.begin(); cli != cl.end(); cli++ )
174 cerr << *cli << endl;
175 }
176 } else
177 DAI_THROW(UNKNOWN_ENUM_VALUE);
178
179 RegionGraph rg(fg,cl);
180 RegionGraph::operator=(rg);
181 construct();
182
183 if( props.verbose >= 3 )
184 cerr << Name << " regiongraph: " << *this << endl;
185 }
186
187
188 string HAK::identify() const {
189 return string(Name) + printProperties();
190 }
191
192
193 void HAK::init( const VarSet &ns ) {
194 for( vector<Factor>::iterator alpha = _Qa.begin(); alpha != _Qa.end(); alpha++ )
195 if( alpha->vars().intersects( ns ) ) {
196 if( props.init == Properties::InitType::UNIFORM )
197 alpha->fill( 1.0 / alpha->states() );
198 else
199 alpha->randomize();
200 }
201
202 for( size_t beta = 0; beta < nrIRs(); beta++ )
203 if( IR(beta).intersects( ns ) ) {
204 if( props.init == Properties::InitType::UNIFORM )
205 _Qb[beta].fill( 1.0 );
206 else
207 _Qb[beta].randomize();
208 foreach( const Neighbor &alpha, nbIR(beta) ) {
209 size_t _beta = alpha.dual;
210 if( props.init == Properties::InitType::UNIFORM ) {
211 muab( alpha, _beta ).fill( 1.0 );
212 muba( alpha, _beta ).fill( 1.0 );
213 } else {
214 muab( alpha, _beta ).randomize();
215 muba( alpha, _beta ).randomize();
216 }
217 }
218 }
219 }
220
221
222 void HAK::init() {
223 for( vector<Factor>::iterator alpha = _Qa.begin(); alpha != _Qa.end(); alpha++ )
224 if( props.init == Properties::InitType::UNIFORM )
225 alpha->fill( 1.0 / alpha->states() );
226 else
227 alpha->randomize();
228
229 for( vector<Factor>::iterator beta = _Qb.begin(); beta != _Qb.end(); beta++ )
230 if( props.init == Properties::InitType::UNIFORM )
231 beta->fill( 1.0 / beta->states() );
232 else
233 beta->randomize();
234
235 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
236 foreach( const Neighbor &beta, nbOR(alpha) ) {
237 size_t _beta = beta.iter;
238 if( props.init == Properties::InitType::UNIFORM ) {
239 muab( alpha, _beta ).fill( 1.0 / muab( alpha, _beta ).states() );
240 muba( alpha, _beta ).fill( 1.0 / muab( alpha, _beta ).states() );
241 } else {
242 muab( alpha, _beta ).randomize();
243 muba( alpha, _beta ).randomize();
244 }
245 }
246 }
247
248
249 Real HAK::doGBP() {
250 if( props.verbose >= 1 )
251 cerr << "Starting " << identify() << "...";
252 if( props.verbose >= 3)
253 cerr << endl;
254
255 double tic = toc();
256
257 // Check whether counting numbers won't lead to problems
258 for( size_t beta = 0; beta < nrIRs(); beta++ )
259 DAI_ASSERT( nbIR(beta).size() + IR(beta).c() != 0.0 );
260
261 // Keep old beliefs to check convergence
262 vector<Factor> old_beliefs;
263 old_beliefs.reserve( nrVars() );
264 for( size_t i = 0; i < nrVars(); i++ )
265 old_beliefs.push_back( belief( var(i) ) );
266
267 // Differences in single node beliefs
268 vector<Real> diffs( nrVars(), INFINITY );
269 Real maxDiff = INFINITY;
270
271 // do several passes over the network until maximum number of iterations has
272 // been reached or until the maximum belief difference is smaller than tolerance
273 for( _iters = 0; _iters < props.maxiter && maxDiff > props.tol; _iters++ ) {
274 for( size_t beta = 0; beta < nrIRs(); beta++ ) {
275 foreach( const Neighbor &alpha, nbIR(beta) ) {
276 size_t _beta = alpha.dual;
277 muab( alpha, _beta ) = _Qa[alpha].marginal(IR(beta)) / muba(alpha,_beta);
278 /* TODO: INVESTIGATE THIS PROBLEM
279 *
280 * In some cases, the muab's can have very large entries because the muba's have very
281 * small entries. This may cause NANs later on (e.g., multiplying large quantities may
282 * result in +inf; normalization then tries to calculate inf / inf which is NAN).
283 * A fix of this problem would consist in normalizing the messages muab.
284 * However, it is not obvious whether this is a real solution, because it has a
285 * negative performance impact and the NAN's seem to be a symptom of a fundamental
286 * numerical unstability.
287 */
288 muab(alpha,_beta).normalize();
289 }
290
291 Factor Qb_new;
292 foreach( const Neighbor &alpha, nbIR(beta) ) {
293 size_t _beta = alpha.dual;
294 Qb_new *= muab(alpha,_beta) ^ (1 / (nbIR(beta).size() + IR(beta).c()));
295 }
296
297 Qb_new.normalize();
298 if( Qb_new.hasNaNs() ) {
299 // TODO: WHAT TO DO IN THIS CASE?
300 cerr << Name << "::doGBP: Qb_new has NaNs!" << endl;
301 return 1.0;
302 }
303 /* TODO: WHAT IS THE PURPOSE OF THE FOLLOWING CODE?
304 *
305 * _Qb[beta] = Qb_new.makeZero(1e-100);
306 */
307
308 if( props.doubleloop || props.damping == 0.0 )
309 _Qb[beta] = Qb_new; // no damping for double loop
310 else
311 _Qb[beta] = (Qb_new^(1.0 - props.damping)) * (_Qb[beta]^props.damping);
312
313 foreach( const Neighbor &alpha, nbIR(beta) ) {
314 size_t _beta = alpha.dual;
315 muba(alpha,_beta) = _Qb[beta] / muab(alpha,_beta);
316
317 /* TODO: INVESTIGATE WHETHER THIS HACK (INVENTED BY KEES) TO PREVENT NANS MAKES SENSE
318 *
319 * muba(beta,*alpha).makePositive(1e-100);
320 *
321 */
322
323 Factor Qa_new = OR(alpha);
324 foreach( const Neighbor &gamma, nbOR(alpha) )
325 Qa_new *= muba(alpha,gamma.iter);
326 Qa_new ^= (1.0 / OR(alpha).c());
327 Qa_new.normalize();
328 if( Qa_new.hasNaNs() ) {
329 cerr << Name << "::doGBP: Qa_new has NaNs!" << endl;
330 return 1.0;
331 }
332 /* TODO: WHAT IS THE PURPOSE OF THE FOLLOWING CODE?
333 *
334 * _Qb[beta] = Qb_new.makeZero(1e-100);
335 */
336
337 if( props.doubleloop || props.damping == 0.0 )
338 _Qa[alpha] = Qa_new; // no damping for double loop
339 else
340 // FIXME: GEOMETRIC DAMPING IS SLOW!
341 _Qa[alpha] = (Qa_new^(1.0 - props.damping)) * (_Qa[alpha]^props.damping);
342 }
343 }
344
345 // Calculate new single variable beliefs and compare with old ones
346 for( size_t i = 0; i < nrVars(); i++ ) {
347 Factor new_belief = belief( var( i ) );
348 diffs[i] = dist( new_belief, old_beliefs[i], Prob::DISTLINF );
349 old_beliefs[i] = new_belief;
350 }
351 maxDiff = max( diffs );
352
353 if( props.verbose >= 3 )
354 cerr << Name << "::doGBP: maxdiff " << maxDiff << " after " << _iters+1 << " passes" << endl;
355 }
356
357 if( maxDiff > _maxdiff )
358 _maxdiff = maxDiff;
359
360 if( props.verbose >= 1 ) {
361 if( maxDiff > props.tol ) {
362 if( props.verbose == 1 )
363 cerr << endl;
364 cerr << Name << "::doGBP: WARNING: not converged within " << props.maxiter << " passes (" << toc() - tic << " seconds)...final maxdiff:" << maxDiff << endl;
365 } else {
366 if( props.verbose >= 2 )
367 cerr << Name << "::doGBP: ";
368 cerr << "converged in " << _iters << " passes (" << toc() - tic << " seconds)." << endl;
369 }
370 }
371
372 return maxDiff;
373 }
374
375
376 Real HAK::doDoubleLoop() {
377 if( props.verbose >= 1 )
378 cerr << "Starting " << identify() << "...";
379 if( props.verbose >= 3)
380 cerr << endl;
381
382 double tic = toc();
383
384 // Save original outer regions
385 vector<FRegion> org_ORs = ORs;
386
387 // Save original inner counting numbers and set negative counting numbers to zero
388 vector<Real> org_IR_cs( nrIRs(), 0.0 );
389 for( size_t beta = 0; beta < nrIRs(); beta++ ) {
390 org_IR_cs[beta] = IR(beta).c();
391 if( IR(beta).c() < 0.0 )
392 IR(beta).c() = 0.0;
393 }
394
395 // Keep old beliefs to check convergence
396 vector<Factor> old_beliefs;
397 old_beliefs.reserve( nrVars() );
398 for( size_t i = 0; i < nrVars(); i++ )
399 old_beliefs.push_back( belief( var(i) ) );
400
401 // Differences in single node beliefs
402 vector<Real> diffs( nrVars(), INFINITY );
403 Real maxDiff = INFINITY;
404
405 size_t outer_maxiter = props.maxiter;
406 Real outer_tol = props.tol;
407 size_t outer_verbose = props.verbose;
408 Real org_maxdiff = _maxdiff;
409
410 // Set parameters for inner loop
411 props.maxiter = 5;
412 props.verbose = outer_verbose ? outer_verbose - 1 : 0;
413
414 size_t outer_iter = 0;
415 size_t total_iter = 0;
416 for( outer_iter = 0; outer_iter < outer_maxiter && maxDiff > outer_tol; outer_iter++ ) {
417 // Calculate new outer regions
418 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
419 OR(alpha) = org_ORs[alpha];
420 foreach( const Neighbor &beta, nbOR(alpha) )
421 OR(alpha) *= _Qb[beta] ^ ((IR(beta).c() - org_IR_cs[beta]) / nbIR(beta).size());
422 }
423
424 // Inner loop
425 if( isnan( doGBP() ) )
426 return 1.0;
427
428 // Calculate new single variable beliefs and compare with old ones
429 for( size_t i = 0; i < nrVars(); ++i ) {
430 Factor new_belief = belief( var( i ) );
431 diffs[i] = dist( new_belief, old_beliefs[i], Prob::DISTLINF );
432 old_beliefs[i] = new_belief;
433 }
434 maxDiff = max( diffs );
435
436 total_iter += Iterations();
437
438 if( props.verbose >= 3 )
439 cerr << Name << "::doDoubleLoop: maxdiff " << maxDiff << " after " << total_iter << " passes" << endl;
440 }
441
442 // restore _maxiter, _verbose and _maxdiff
443 props.maxiter = outer_maxiter;
444 props.verbose = outer_verbose;
445 _maxdiff = org_maxdiff;
446
447 _iters = total_iter;
448 if( maxDiff > _maxdiff )
449 _maxdiff = maxDiff;
450
451 // Restore original outer regions
452 ORs = org_ORs;
453
454 // Restore original inner counting numbers
455 for( size_t beta = 0; beta < nrIRs(); ++beta )
456 IR(beta).c() = org_IR_cs[beta];
457
458 if( props.verbose >= 1 ) {
459 if( maxDiff > props.tol ) {
460 if( props.verbose == 1 )
461 cerr << endl;
462 cerr << Name << "::doDoubleLoop: WARNING: not converged within " << outer_maxiter << " passes (" << toc() - tic << " seconds)...final maxdiff:" << maxDiff << endl;
463 } else {
464 if( props.verbose >= 3 )
465 cerr << Name << "::doDoubleLoop: ";
466 cerr << "converged in " << total_iter << " passes (" << toc() - tic << " seconds)." << endl;
467 }
468 }
469
470 return maxDiff;
471 }
472
473
474 Real HAK::run() {
475 if( props.doubleloop )
476 return doDoubleLoop();
477 else
478 return doGBP();
479 }
480
481
482 Factor HAK::belief( const VarSet &ns ) const {
483 vector<Factor>::const_iterator beta;
484 for( beta = _Qb.begin(); beta != _Qb.end(); beta++ )
485 if( beta->vars() >> ns )
486 break;
487 if( beta != _Qb.end() )
488 return( beta->marginal(ns) );
489 else {
490 vector<Factor>::const_iterator alpha;
491 for( alpha = _Qa.begin(); alpha != _Qa.end(); alpha++ )
492 if( alpha->vars() >> ns )
493 break;
494 DAI_ASSERT( alpha != _Qa.end() );
495 return( alpha->marginal(ns) );
496 }
497 }
498
499
500 Factor HAK::belief( const Var &n ) const {
501 return belief( (VarSet)n );
502 }
503
504
505 vector<Factor> HAK::beliefs() const {
506 vector<Factor> result;
507 for( size_t beta = 0; beta < nrIRs(); beta++ )
508 result.push_back( Qb(beta) );
509 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
510 result.push_back( Qa(alpha) );
511 return result;
512 }
513
514
515 Real HAK::logZ() const {
516 Real s = 0.0;
517 for( size_t beta = 0; beta < nrIRs(); beta++ )
518 s += IR(beta).c() * Qb(beta).entropy();
519 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
520 s += OR(alpha).c() * Qa(alpha).entropy();
521 s += (OR(alpha).log(true) * Qa(alpha)).sum();
522 }
523 return s;
524 }
525
526
527 } // end of namespace dai