Various cleanups and documentation improvements for SmallSet and Prob
[libdai.git] / src / hak.cpp
1 /* This file is part of libDAI - http://www.libdai.org/
2 *
3 * libDAI is licensed under the terms of the GNU General Public License version
4 * 2, or (at your option) any later version. libDAI is distributed without any
5 * warranty. See the file COPYING for more details.
6 *
7 * Copyright (C) 2006-2009 Joris Mooij [joris dot mooij at libdai dot org]
8 * Copyright (C) 2006-2007 Radboud University Nijmegen, The Netherlands
9 */
10
11
12 #include <map>
13 #include <dai/hak.h>
14 #include <dai/util.h>
15 #include <dai/exceptions.h>
16
17
18 namespace dai {
19
20
21 using namespace std;
22
23
24 const char *HAK::Name = "HAK";
25
26
27
28 /// Sets factor entries that lie between 0 and \a epsilon to \a epsilon
29 template <class T>
30 TFactor<T>& makePositive( TFactor<T> &f, T epsilon ) {
31 for( size_t t = 0; t < f.states(); t++ )
32 if( (0 < f[t]) && (f[t] < epsilon) )
33 f[t] = epsilon;
34 return f;
35 }
36
37 /// Sets factor entries that are smaller (in absolute value) than \a epsilon to 0
38 template <class T>
39 TFactor<T>& makeZero( TFactor<T> &f, T epsilon ) {
40 for( size_t t = 0; t < f.states(); t++ )
41 if( f[t] < epsilon && f[t] > -epsilon )
42 f[t] = 0;
43 return f;
44 }
45
46
47 void HAK::setProperties( const PropertySet &opts ) {
48 DAI_ASSERT( opts.hasKey("tol") );
49 DAI_ASSERT( opts.hasKey("maxiter") );
50 DAI_ASSERT( opts.hasKey("verbose") );
51 DAI_ASSERT( opts.hasKey("doubleloop") );
52 DAI_ASSERT( opts.hasKey("clusters") );
53
54 props.tol = opts.getStringAs<double>("tol");
55 props.maxiter = opts.getStringAs<size_t>("maxiter");
56 props.verbose = opts.getStringAs<size_t>("verbose");
57 props.doubleloop = opts.getStringAs<bool>("doubleloop");
58 props.clusters = opts.getStringAs<Properties::ClustersType>("clusters");
59
60 if( opts.hasKey("loopdepth") )
61 props.loopdepth = opts.getStringAs<size_t>("loopdepth");
62 else
63 DAI_ASSERT( props.clusters != Properties::ClustersType::LOOP );
64 if( opts.hasKey("damping") )
65 props.damping = opts.getStringAs<double>("damping");
66 else
67 props.damping = 0.0;
68 if( opts.hasKey("init") )
69 props.init = opts.getStringAs<Properties::InitType>("init");
70 else
71 props.init = Properties::InitType::UNIFORM;
72 }
73
74
75 PropertySet HAK::getProperties() const {
76 PropertySet opts;
77 opts.Set( "tol", props.tol );
78 opts.Set( "maxiter", props.maxiter );
79 opts.Set( "verbose", props.verbose );
80 opts.Set( "doubleloop", props.doubleloop );
81 opts.Set( "clusters", props.clusters );
82 opts.Set( "init", props.init );
83 opts.Set( "loopdepth", props.loopdepth );
84 opts.Set( "damping", props.damping );
85 return opts;
86 }
87
88
89 string HAK::printProperties() const {
90 stringstream s( stringstream::out );
91 s << "[";
92 s << "tol=" << props.tol << ",";
93 s << "maxiter=" << props.maxiter << ",";
94 s << "verbose=" << props.verbose << ",";
95 s << "doubleloop=" << props.doubleloop << ",";
96 s << "clusters=" << props.clusters << ",";
97 s << "init=" << props.init << ",";
98 s << "loopdepth=" << props.loopdepth << ",";
99 s << "damping=" << props.damping << "]";
100 return s.str();
101 }
102
103
104 void HAK::constructMessages() {
105 // Create outer beliefs
106 _Qa.clear();
107 _Qa.reserve(nrORs());
108 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
109 _Qa.push_back( Factor( OR(alpha).vars() ) );
110
111 // Create inner beliefs
112 _Qb.clear();
113 _Qb.reserve(nrIRs());
114 for( size_t beta = 0; beta < nrIRs(); beta++ )
115 _Qb.push_back( Factor( IR(beta) ) );
116
117 // Create messages
118 _muab.clear();
119 _muab.reserve( nrORs() );
120 _muba.clear();
121 _muba.reserve( nrORs() );
122 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
123 _muab.push_back( vector<Factor>() );
124 _muba.push_back( vector<Factor>() );
125 _muab[alpha].reserve( nbOR(alpha).size() );
126 _muba[alpha].reserve( nbOR(alpha).size() );
127 foreach( const Neighbor &beta, nbOR(alpha) ) {
128 _muab[alpha].push_back( Factor( IR(beta) ) );
129 _muba[alpha].push_back( Factor( IR(beta) ) );
130 }
131 }
132 }
133
134
135 HAK::HAK( const RegionGraph &rg, const PropertySet &opts ) : DAIAlgRG(rg), _Qa(), _Qb(), _muab(), _muba(), _maxdiff(0.0), _iters(0U), props() {
136 setProperties( opts );
137
138 constructMessages();
139 }
140
141
142 void HAK::findLoopClusters( const FactorGraph & fg, std::set<VarSet> &allcl, VarSet newcl, const Var & root, size_t length, VarSet vars ) {
143 for( VarSet::const_iterator in = vars.begin(); in != vars.end(); in++ ) {
144 VarSet ind = fg.delta( fg.findVar( *in ) );
145 if( (newcl.size()) >= 2 && ind.contains( root ) ) {
146 allcl.insert( newcl | *in );
147 }
148 else if( length > 1 )
149 findLoopClusters( fg, allcl, newcl | *in, root, length - 1, ind / newcl );
150 }
151 }
152
153
154 HAK::HAK(const FactorGraph & fg, const PropertySet &opts) : DAIAlgRG(), _Qa(), _Qb(), _muab(), _muba(), _maxdiff(0.0), _iters(0U), props() {
155 setProperties( opts );
156
157 vector<VarSet> cl;
158 if( props.clusters == Properties::ClustersType::MIN ) {
159 cl = fg.Cliques();
160 } else if( props.clusters == Properties::ClustersType::DELTA ) {
161 for( size_t i = 0; i < fg.nrVars(); i++ )
162 cl.push_back(fg.Delta(i));
163 } else if( props.clusters == Properties::ClustersType::LOOP ) {
164 cl = fg.Cliques();
165 set<VarSet> scl;
166 for( size_t i0 = 0; i0 < fg.nrVars(); i0++ ) {
167 VarSet i0d = fg.delta(i0);
168 if( props.loopdepth > 1 )
169 findLoopClusters( fg, scl, fg.var(i0), fg.var(i0), props.loopdepth - 1, fg.delta(i0) );
170 }
171 for( set<VarSet>::const_iterator c = scl.begin(); c != scl.end(); c++ )
172 cl.push_back(*c);
173 if( props.verbose >= 3 ) {
174 cerr << Name << " uses the following clusters: " << endl;
175 for( vector<VarSet>::const_iterator cli = cl.begin(); cli != cl.end(); cli++ )
176 cerr << *cli << endl;
177 }
178 } else
179 DAI_THROW(UNKNOWN_ENUM_VALUE);
180
181 RegionGraph rg(fg,cl);
182 RegionGraph::operator=(rg);
183 constructMessages();
184
185 if( props.verbose >= 3 )
186 cerr << Name << " regiongraph: " << *this << endl;
187 }
188
189
190 string HAK::identify() const {
191 return string(Name) + printProperties();
192 }
193
194
195 void HAK::init( const VarSet &ns ) {
196 for( vector<Factor>::iterator alpha = _Qa.begin(); alpha != _Qa.end(); alpha++ )
197 if( alpha->vars().intersects( ns ) ) {
198 if( props.init == Properties::InitType::UNIFORM )
199 alpha->fill( 1.0 / alpha->states() );
200 else
201 alpha->randomize();
202 }
203
204 for( size_t beta = 0; beta < nrIRs(); beta++ )
205 if( IR(beta).intersects( ns ) ) {
206 if( props.init == Properties::InitType::UNIFORM )
207 _Qb[beta].fill( 1.0 );
208 else
209 _Qb[beta].randomize();
210 foreach( const Neighbor &alpha, nbIR(beta) ) {
211 size_t _beta = alpha.dual;
212 if( props.init == Properties::InitType::UNIFORM ) {
213 muab( alpha, _beta ).fill( 1.0 );
214 muba( alpha, _beta ).fill( 1.0 );
215 } else {
216 muab( alpha, _beta ).randomize();
217 muba( alpha, _beta ).randomize();
218 }
219 }
220 }
221 }
222
223
224 void HAK::init() {
225 for( vector<Factor>::iterator alpha = _Qa.begin(); alpha != _Qa.end(); alpha++ )
226 if( props.init == Properties::InitType::UNIFORM )
227 alpha->fill( 1.0 / alpha->states() );
228 else
229 alpha->randomize();
230
231 for( vector<Factor>::iterator beta = _Qb.begin(); beta != _Qb.end(); beta++ )
232 if( props.init == Properties::InitType::UNIFORM )
233 beta->fill( 1.0 / beta->states() );
234 else
235 beta->randomize();
236
237 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
238 foreach( const Neighbor &beta, nbOR(alpha) ) {
239 size_t _beta = beta.iter;
240 if( props.init == Properties::InitType::UNIFORM ) {
241 muab( alpha, _beta ).fill( 1.0 / muab( alpha, _beta ).states() );
242 muba( alpha, _beta ).fill( 1.0 / muab( alpha, _beta ).states() );
243 } else {
244 muab( alpha, _beta ).randomize();
245 muba( alpha, _beta ).randomize();
246 }
247 }
248 }
249
250
251 double HAK::doGBP() {
252 if( props.verbose >= 1 )
253 cerr << "Starting " << identify() << "...";
254 if( props.verbose >= 3)
255 cerr << endl;
256
257 double tic = toc();
258
259 // Check whether counting numbers won't lead to problems
260 for( size_t beta = 0; beta < nrIRs(); beta++ )
261 DAI_ASSERT( nbIR(beta).size() + IR(beta).c() != 0.0 );
262
263 // Keep old beliefs to check convergence
264 vector<Factor> old_beliefs;
265 old_beliefs.reserve( nrVars() );
266 for( size_t i = 0; i < nrVars(); i++ )
267 old_beliefs.push_back( belief( var(i) ) );
268
269 // Differences in single node beliefs
270 Diffs diffs(nrVars(), 1.0);
271
272 // do several passes over the network until maximum number of iterations has
273 // been reached or until the maximum belief difference is smaller than tolerance
274 for( _iters = 0; _iters < props.maxiter && diffs.maxDiff() > props.tol; _iters++ ) {
275 for( size_t beta = 0; beta < nrIRs(); beta++ ) {
276 foreach( const Neighbor &alpha, nbIR(beta) ) {
277 size_t _beta = alpha.dual;
278 muab( alpha, _beta ) = _Qa[alpha].marginal(IR(beta)) / muba(alpha,_beta);
279 /* TODO: INVESTIGATE THIS PROBLEM
280 *
281 * In some cases, the muab's can have very large entries because the muba's have very
282 * small entries. This may cause NANs later on (e.g., multiplying large quantities may
283 * result in +inf; normalization then tries to calculate inf / inf which is NAN).
284 * A fix of this problem would consist in normalizing the messages muab.
285 * However, it is not obvious whether this is a real solution, because it has a
286 * negative performance impact and the NAN's seem to be a symptom of a fundamental
287 * numerical unstability.
288 */
289 muab(alpha,_beta).normalize();
290 }
291
292 Factor Qb_new;
293 foreach( const Neighbor &alpha, nbIR(beta) ) {
294 size_t _beta = alpha.dual;
295 Qb_new *= muab(alpha,_beta) ^ (1 / (nbIR(beta).size() + IR(beta).c()));
296 }
297
298 Qb_new.normalize();
299 if( Qb_new.hasNaNs() ) {
300 // TODO: WHAT TO DO IN THIS CASE?
301 cerr << Name << "::doGBP: Qb_new has NaNs!" << endl;
302 return 1.0;
303 }
304 /* TODO: WHAT IS THE PURPOSE OF THE FOLLOWING CODE?
305 *
306 * _Qb[beta] = Qb_new.makeZero(1e-100);
307 */
308
309 if( props.doubleloop || props.damping == 0.0 )
310 _Qb[beta] = Qb_new; // no damping for double loop
311 else
312 _Qb[beta] = (Qb_new^(1.0 - props.damping)) * (_Qb[beta]^props.damping);
313
314 foreach( const Neighbor &alpha, nbIR(beta) ) {
315 size_t _beta = alpha.dual;
316 muba(alpha,_beta) = _Qb[beta] / muab(alpha,_beta);
317
318 /* TODO: INVESTIGATE WHETHER THIS HACK (INVENTED BY KEES) TO PREVENT NANS MAKES SENSE
319 *
320 * muba(beta,*alpha).makePositive(1e-100);
321 *
322 */
323
324 Factor Qa_new = OR(alpha);
325 foreach( const Neighbor &gamma, nbOR(alpha) )
326 Qa_new *= muba(alpha,gamma.iter);
327 Qa_new ^= (1.0 / OR(alpha).c());
328 Qa_new.normalize();
329 if( Qa_new.hasNaNs() ) {
330 cerr << Name << "::doGBP: Qa_new has NaNs!" << endl;
331 return 1.0;
332 }
333 /* TODO: WHAT IS THE PURPOSE OF THE FOLLOWING CODE?
334 *
335 * _Qb[beta] = Qb_new.makeZero(1e-100);
336 */
337
338 if( props.doubleloop || props.damping == 0.0 )
339 _Qa[alpha] = Qa_new; // no damping for double loop
340 else
341 // FIXME: GEOMETRIC DAMPING IS SLOW!
342 _Qa[alpha] = (Qa_new^(1.0 - props.damping)) * (_Qa[alpha]^props.damping);
343 }
344 }
345
346 // Calculate new single variable beliefs and compare with old ones
347 for( size_t i = 0; i < nrVars(); i++ ) {
348 Factor new_belief = belief( var( i ) );
349 diffs.push( dist( new_belief, old_beliefs[i], Prob::DISTLINF ) );
350 old_beliefs[i] = new_belief;
351 }
352
353 if( props.verbose >= 3 )
354 cerr << Name << "::doGBP: maxdiff " << diffs.maxDiff() << " after " << _iters+1 << " passes" << endl;
355 }
356
357 if( diffs.maxDiff() > _maxdiff )
358 _maxdiff = diffs.maxDiff();
359
360 if( props.verbose >= 1 ) {
361 if( diffs.maxDiff() > props.tol ) {
362 if( props.verbose == 1 )
363 cerr << endl;
364 cerr << Name << "::doGBP: WARNING: not converged within " << props.maxiter << " passes (" << toc() - tic << " seconds)...final maxdiff:" << diffs.maxDiff() << endl;
365 } else {
366 if( props.verbose >= 2 )
367 cerr << Name << "::doGBP: ";
368 cerr << "converged in " << _iters << " passes (" << toc() - tic << " seconds)." << endl;
369 }
370 }
371
372 return diffs.maxDiff();
373 }
374
375
376 double HAK::doDoubleLoop() {
377 if( props.verbose >= 1 )
378 cerr << "Starting " << identify() << "...";
379 if( props.verbose >= 3)
380 cerr << endl;
381
382 double tic = toc();
383
384 // Save original outer regions
385 vector<FRegion> org_ORs = ORs;
386
387 // Save original inner counting numbers and set negative counting numbers to zero
388 vector<double> org_IR_cs( nrIRs(), 0.0 );
389 for( size_t beta = 0; beta < nrIRs(); beta++ ) {
390 org_IR_cs[beta] = IR(beta).c();
391 if( IR(beta).c() < 0.0 )
392 IR(beta).c() = 0.0;
393 }
394
395 // Keep old beliefs to check convergence
396 vector<Factor> old_beliefs;
397 old_beliefs.reserve( nrVars() );
398 for( size_t i = 0; i < nrVars(); i++ )
399 old_beliefs.push_back( belief( var(i) ) );
400
401 // Differences in single node beliefs
402 Diffs diffs(nrVars(), 1.0);
403
404 size_t outer_maxiter = props.maxiter;
405 double outer_tol = props.tol;
406 size_t outer_verbose = props.verbose;
407 double org_maxdiff = _maxdiff;
408
409 // Set parameters for inner loop
410 props.maxiter = 5;
411 props.verbose = outer_verbose ? outer_verbose - 1 : 0;
412
413 size_t outer_iter = 0;
414 size_t total_iter = 0;
415 for( outer_iter = 0; outer_iter < outer_maxiter && diffs.maxDiff() > outer_tol; outer_iter++ ) {
416 // Calculate new outer regions
417 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
418 OR(alpha) = org_ORs[alpha];
419 foreach( const Neighbor &beta, nbOR(alpha) )
420 OR(alpha) *= _Qb[beta] ^ ((IR(beta).c() - org_IR_cs[beta]) / nbIR(beta).size());
421 }
422
423 // Inner loop
424 if( isnan( doGBP() ) )
425 return 1.0;
426
427 // Calculate new single variable beliefs and compare with old ones
428 for( size_t i = 0; i < nrVars(); ++i ) {
429 Factor new_belief = belief( var( i ) );
430 diffs.push( dist( new_belief, old_beliefs[i], Prob::DISTLINF ) );
431 old_beliefs[i] = new_belief;
432 }
433
434 total_iter += Iterations();
435
436 if( props.verbose >= 3 )
437 cerr << Name << "::doDoubleLoop: maxdiff " << diffs.maxDiff() << " after " << total_iter << " passes" << endl;
438 }
439
440 // restore _maxiter, _verbose and _maxdiff
441 props.maxiter = outer_maxiter;
442 props.verbose = outer_verbose;
443 _maxdiff = org_maxdiff;
444
445 _iters = total_iter;
446 if( diffs.maxDiff() > _maxdiff )
447 _maxdiff = diffs.maxDiff();
448
449 // Restore original outer regions
450 ORs = org_ORs;
451
452 // Restore original inner counting numbers
453 for( size_t beta = 0; beta < nrIRs(); ++beta )
454 IR(beta).c() = org_IR_cs[beta];
455
456 if( props.verbose >= 1 ) {
457 if( diffs.maxDiff() > props.tol ) {
458 if( props.verbose == 1 )
459 cerr << endl;
460 cerr << Name << "::doDoubleLoop: WARNING: not converged within " << outer_maxiter << " passes (" << toc() - tic << " seconds)...final maxdiff:" << diffs.maxDiff() << endl;
461 } else {
462 if( props.verbose >= 3 )
463 cerr << Name << "::doDoubleLoop: ";
464 cerr << "converged in " << total_iter << " passes (" << toc() - tic << " seconds)." << endl;
465 }
466 }
467
468 return diffs.maxDiff();
469 }
470
471
472 double HAK::run() {
473 if( props.doubleloop )
474 return doDoubleLoop();
475 else
476 return doGBP();
477 }
478
479
480 Factor HAK::belief( const VarSet &ns ) const {
481 vector<Factor>::const_iterator beta;
482 for( beta = _Qb.begin(); beta != _Qb.end(); beta++ )
483 if( beta->vars() >> ns )
484 break;
485 if( beta != _Qb.end() )
486 return( beta->marginal(ns) );
487 else {
488 vector<Factor>::const_iterator alpha;
489 for( alpha = _Qa.begin(); alpha != _Qa.end(); alpha++ )
490 if( alpha->vars() >> ns )
491 break;
492 DAI_ASSERT( alpha != _Qa.end() );
493 return( alpha->marginal(ns) );
494 }
495 }
496
497
498 Factor HAK::belief( const Var &n ) const {
499 return belief( (VarSet)n );
500 }
501
502
503 vector<Factor> HAK::beliefs() const {
504 vector<Factor> result;
505 for( size_t beta = 0; beta < nrIRs(); beta++ )
506 result.push_back( Qb(beta) );
507 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
508 result.push_back( Qa(alpha) );
509 return result;
510 }
511
512
513 Real HAK::logZ() const {
514 Real s = 0.0;
515 for( size_t beta = 0; beta < nrIRs(); beta++ )
516 s += IR(beta).c() * Qb(beta).entropy();
517 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
518 s += OR(alpha).c() * Qa(alpha).entropy();
519 s += (OR(alpha).log(true) * Qa(alpha)).sum();
520 }
521 return s;
522 }
523
524
525 } // end of namespace dai