Replaced the standard assert() macro by DAI_ASSERT
[libdai.git] / src / hak.cpp
1 /* This file is part of libDAI - http://www.libdai.org/
2 *
3 * libDAI is licensed under the terms of the GNU General Public License version
4 * 2, or (at your option) any later version. libDAI is distributed without any
5 * warranty. See the file COPYING for more details.
6 *
7 * Copyright (C) 2006-2009 Joris Mooij [joris dot mooij at libdai dot org]
8 * Copyright (C) 2006-2007 Radboud University Nijmegen, The Netherlands
9 */
10
11
12 #include <map>
13 #include <dai/hak.h>
14 #include <dai/util.h>
15 #include <dai/exceptions.h>
16
17
18 namespace dai {
19
20
21 using namespace std;
22
23
24 const char *HAK::Name = "HAK";
25
26
27 void HAK::setProperties( const PropertySet &opts ) {
28 DAI_ASSERT( opts.hasKey("tol") );
29 DAI_ASSERT( opts.hasKey("maxiter") );
30 DAI_ASSERT( opts.hasKey("verbose") );
31 DAI_ASSERT( opts.hasKey("doubleloop") );
32 DAI_ASSERT( opts.hasKey("clusters") );
33
34 props.tol = opts.getStringAs<double>("tol");
35 props.maxiter = opts.getStringAs<size_t>("maxiter");
36 props.verbose = opts.getStringAs<size_t>("verbose");
37 props.doubleloop = opts.getStringAs<bool>("doubleloop");
38 props.clusters = opts.getStringAs<Properties::ClustersType>("clusters");
39
40 if( opts.hasKey("loopdepth") )
41 props.loopdepth = opts.getStringAs<size_t>("loopdepth");
42 else
43 DAI_ASSERT( props.clusters != Properties::ClustersType::LOOP );
44 if( opts.hasKey("damping") )
45 props.damping = opts.getStringAs<double>("damping");
46 else
47 props.damping = 0.0;
48 }
49
50
51 PropertySet HAK::getProperties() const {
52 PropertySet opts;
53 opts.Set( "tol", props.tol );
54 opts.Set( "maxiter", props.maxiter );
55 opts.Set( "verbose", props.verbose );
56 opts.Set( "doubleloop", props.doubleloop );
57 opts.Set( "clusters", props.clusters );
58 opts.Set( "loopdepth", props.loopdepth );
59 opts.Set( "damping", props.damping );
60 return opts;
61 }
62
63
64 string HAK::printProperties() const {
65 stringstream s( stringstream::out );
66 s << "[";
67 s << "tol=" << props.tol << ",";
68 s << "maxiter=" << props.maxiter << ",";
69 s << "verbose=" << props.verbose << ",";
70 s << "doubleloop=" << props.doubleloop << ",";
71 s << "clusters=" << props.clusters << ",";
72 s << "loopdepth=" << props.loopdepth << ",";
73 s << "damping=" << props.damping << "]";
74 return s.str();
75 }
76
77
78 void HAK::constructMessages() {
79 // Create outer beliefs
80 _Qa.clear();
81 _Qa.reserve(nrORs());
82 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
83 _Qa.push_back( Factor( OR(alpha).vars() ) );
84
85 // Create inner beliefs
86 _Qb.clear();
87 _Qb.reserve(nrIRs());
88 for( size_t beta = 0; beta < nrIRs(); beta++ )
89 _Qb.push_back( Factor( IR(beta) ) );
90
91 // Create messages
92 _muab.clear();
93 _muab.reserve( nrORs() );
94 _muba.clear();
95 _muba.reserve( nrORs() );
96 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
97 _muab.push_back( vector<Factor>() );
98 _muba.push_back( vector<Factor>() );
99 _muab[alpha].reserve( nbOR(alpha).size() );
100 _muba[alpha].reserve( nbOR(alpha).size() );
101 foreach( const Neighbor &beta, nbOR(alpha) ) {
102 _muab[alpha].push_back( Factor( IR(beta) ) );
103 _muba[alpha].push_back( Factor( IR(beta) ) );
104 }
105 }
106 }
107
108
109 HAK::HAK( const RegionGraph &rg, const PropertySet &opts ) : DAIAlgRG(rg), _Qa(), _Qb(), _muab(), _muba(), _maxdiff(0.0), _iters(0U), props() {
110 setProperties( opts );
111
112 constructMessages();
113 }
114
115
116 void HAK::findLoopClusters( const FactorGraph & fg, std::set<VarSet> &allcl, VarSet newcl, const Var & root, size_t length, VarSet vars ) {
117 for( VarSet::const_iterator in = vars.begin(); in != vars.end(); in++ ) {
118 VarSet ind = fg.delta( fg.findVar( *in ) );
119 if( (newcl.size()) >= 2 && ind.contains( root ) ) {
120 allcl.insert( newcl | *in );
121 }
122 else if( length > 1 )
123 findLoopClusters( fg, allcl, newcl | *in, root, length - 1, ind / newcl );
124 }
125 }
126
127
128 HAK::HAK(const FactorGraph & fg, const PropertySet &opts) : DAIAlgRG(), _Qa(), _Qb(), _muab(), _muba(), _maxdiff(0.0), _iters(0U), props() {
129 setProperties( opts );
130
131 vector<VarSet> cl;
132 if( props.clusters == Properties::ClustersType::MIN ) {
133 cl = fg.Cliques();
134 } else if( props.clusters == Properties::ClustersType::DELTA ) {
135 for( size_t i = 0; i < fg.nrVars(); i++ )
136 cl.push_back(fg.Delta(i));
137 } else if( props.clusters == Properties::ClustersType::LOOP ) {
138 cl = fg.Cliques();
139 set<VarSet> scl;
140 for( size_t i0 = 0; i0 < fg.nrVars(); i0++ ) {
141 VarSet i0d = fg.delta(i0);
142 if( props.loopdepth > 1 )
143 findLoopClusters( fg, scl, fg.var(i0), fg.var(i0), props.loopdepth - 1, fg.delta(i0) );
144 }
145 for( set<VarSet>::const_iterator c = scl.begin(); c != scl.end(); c++ )
146 cl.push_back(*c);
147 if( props.verbose >= 3 ) {
148 cerr << Name << " uses the following clusters: " << endl;
149 for( vector<VarSet>::const_iterator cli = cl.begin(); cli != cl.end(); cli++ )
150 cerr << *cli << endl;
151 }
152 } else
153 DAI_THROW(UNKNOWN_ENUM_VALUE);
154
155 RegionGraph rg(fg,cl);
156 RegionGraph::operator=(rg);
157 constructMessages();
158
159 if( props.verbose >= 3 )
160 cerr << Name << " regiongraph: " << *this << endl;
161 }
162
163
164 string HAK::identify() const {
165 return string(Name) + printProperties();
166 }
167
168
169 void HAK::init( const VarSet &ns ) {
170 for( vector<Factor>::iterator alpha = _Qa.begin(); alpha != _Qa.end(); alpha++ )
171 if( alpha->vars().intersects( ns ) )
172 alpha->fill( 1.0 / alpha->states() );
173
174 for( size_t beta = 0; beta < nrIRs(); beta++ )
175 if( IR(beta).intersects( ns ) ) {
176 _Qb[beta].fill( 1.0 );
177 foreach( const Neighbor &alpha, nbIR(beta) ) {
178 size_t _beta = alpha.dual;
179 muab( alpha, _beta ).fill( 1.0 );
180 muba( alpha, _beta ).fill( 1.0 );
181 }
182 }
183 }
184
185
186 void HAK::init() {
187 for( vector<Factor>::iterator alpha = _Qa.begin(); alpha != _Qa.end(); alpha++ )
188 alpha->fill( 1.0 / alpha->states() );
189
190 for( vector<Factor>::iterator beta = _Qb.begin(); beta != _Qb.end(); beta++ )
191 beta->fill( 1.0 / beta->states() );
192
193 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
194 foreach( const Neighbor &beta, nbOR(alpha) ) {
195 size_t _beta = beta.iter;
196 muab( alpha, _beta ).fill( 1.0 / muab( alpha, _beta ).states() );
197 muba( alpha, _beta ).fill( 1.0 / muab( alpha, _beta ).states() );
198 }
199 }
200
201
202 double HAK::doGBP() {
203 if( props.verbose >= 1 )
204 cerr << "Starting " << identify() << "...";
205 if( props.verbose >= 3)
206 cerr << endl;
207
208 double tic = toc();
209
210 // Check whether counting numbers won't lead to problems
211 for( size_t beta = 0; beta < nrIRs(); beta++ )
212 DAI_ASSERT( nbIR(beta).size() + IR(beta).c() != 0.0 );
213
214 // Keep old beliefs to check convergence
215 vector<Factor> old_beliefs;
216 old_beliefs.reserve( nrVars() );
217 for( size_t i = 0; i < nrVars(); i++ )
218 old_beliefs.push_back( belief( var(i) ) );
219
220 // Differences in single node beliefs
221 Diffs diffs(nrVars(), 1.0);
222
223 // do several passes over the network until maximum number of iterations has
224 // been reached or until the maximum belief difference is smaller than tolerance
225 for( _iters = 0; _iters < props.maxiter && diffs.maxDiff() > props.tol; _iters++ ) {
226 for( size_t beta = 0; beta < nrIRs(); beta++ ) {
227 foreach( const Neighbor &alpha, nbIR(beta) ) {
228 size_t _beta = alpha.dual;
229 muab( alpha, _beta ) = _Qa[alpha].marginal(IR(beta)) / muba(alpha,_beta);
230 /* TODO: INVESTIGATE THIS PROBLEM
231 *
232 * In some cases, the muab's can have very large entries because the muba's have very
233 * small entries. This may cause NANs later on (e.g., multiplying large quantities may
234 * result in +inf; normalization then tries to calculate inf / inf which is NAN).
235 * A fix of this problem would consist in normalizing the messages muab.
236 * However, it is not obvious whether this is a real solution, because it has a
237 * negative performance impact and the NAN's seem to be a symptom of a fundamental
238 * numerical unstability.
239 */
240 muab(alpha,_beta).normalize();
241 }
242
243 Factor Qb_new;
244 foreach( const Neighbor &alpha, nbIR(beta) ) {
245 size_t _beta = alpha.dual;
246 Qb_new *= muab(alpha,_beta) ^ (1 / (nbIR(beta).size() + IR(beta).c()));
247 }
248
249 Qb_new.normalize();
250 if( Qb_new.hasNaNs() ) {
251 // TODO: WHAT TO DO IN THIS CASE?
252 cerr << Name << "::doGBP: Qb_new has NaNs!" << endl;
253 return 1.0;
254 }
255 /* TODO: WHAT IS THE PURPOSE OF THE FOLLOWING CODE?
256 *
257 * _Qb[beta] = Qb_new.makeZero(1e-100);
258 */
259
260 if( props.doubleloop || props.damping == 0.0 )
261 _Qb[beta] = Qb_new; // no damping for double loop
262 else
263 _Qb[beta] = (Qb_new^(1.0 - props.damping)) * (_Qb[beta]^props.damping);
264
265 foreach( const Neighbor &alpha, nbIR(beta) ) {
266 size_t _beta = alpha.dual;
267 muba(alpha,_beta) = _Qb[beta] / muab(alpha,_beta);
268
269 /* TODO: INVESTIGATE WHETHER THIS HACK (INVENTED BY KEES) TO PREVENT NANS MAKES SENSE
270 *
271 * muba(beta,*alpha).makePositive(1e-100);
272 *
273 */
274
275 Factor Qa_new = OR(alpha);
276 foreach( const Neighbor &gamma, nbOR(alpha) )
277 Qa_new *= muba(alpha,gamma.iter);
278 Qa_new ^= (1.0 / OR(alpha).c());
279 Qa_new.normalize();
280 if( Qa_new.hasNaNs() ) {
281 cerr << Name << "::doGBP: Qa_new has NaNs!" << endl;
282 return 1.0;
283 }
284 /* TODO: WHAT IS THE PURPOSE OF THE FOLLOWING CODE?
285 *
286 * _Qb[beta] = Qb_new.makeZero(1e-100);
287 */
288
289 if( props.doubleloop || props.damping == 0.0 )
290 _Qa[alpha] = Qa_new; // no damping for double loop
291 else
292 // FIXME: GEOMETRIC DAMPING IS SLOW!
293 _Qa[alpha] = (Qa_new^(1.0 - props.damping)) * (_Qa[alpha]^props.damping);
294 }
295 }
296
297 // Calculate new single variable beliefs and compare with old ones
298 for( size_t i = 0; i < nrVars(); i++ ) {
299 Factor new_belief = belief( var( i ) );
300 diffs.push( dist( new_belief, old_beliefs[i], Prob::DISTLINF ) );
301 old_beliefs[i] = new_belief;
302 }
303
304 if( props.verbose >= 3 )
305 cerr << Name << "::doGBP: maxdiff " << diffs.maxDiff() << " after " << _iters+1 << " passes" << endl;
306 }
307
308 if( diffs.maxDiff() > _maxdiff )
309 _maxdiff = diffs.maxDiff();
310
311 if( props.verbose >= 1 ) {
312 if( diffs.maxDiff() > props.tol ) {
313 if( props.verbose == 1 )
314 cerr << endl;
315 cerr << Name << "::doGBP: WARNING: not converged within " << props.maxiter << " passes (" << toc() - tic << " seconds)...final maxdiff:" << diffs.maxDiff() << endl;
316 } else {
317 if( props.verbose >= 2 )
318 cerr << Name << "::doGBP: ";
319 cerr << "converged in " << _iters << " passes (" << toc() - tic << " seconds)." << endl;
320 }
321 }
322
323 return diffs.maxDiff();
324 }
325
326
327 double HAK::doDoubleLoop() {
328 if( props.verbose >= 1 )
329 cerr << "Starting " << identify() << "...";
330 if( props.verbose >= 3)
331 cerr << endl;
332
333 double tic = toc();
334
335 // Save original outer regions
336 vector<FRegion> org_ORs = ORs;
337
338 // Save original inner counting numbers and set negative counting numbers to zero
339 vector<double> org_IR_cs( nrIRs(), 0.0 );
340 for( size_t beta = 0; beta < nrIRs(); beta++ ) {
341 org_IR_cs[beta] = IR(beta).c();
342 if( IR(beta).c() < 0.0 )
343 IR(beta).c() = 0.0;
344 }
345
346 // Keep old beliefs to check convergence
347 vector<Factor> old_beliefs;
348 old_beliefs.reserve( nrVars() );
349 for( size_t i = 0; i < nrVars(); i++ )
350 old_beliefs.push_back( belief( var(i) ) );
351
352 // Differences in single node beliefs
353 Diffs diffs(nrVars(), 1.0);
354
355 size_t outer_maxiter = props.maxiter;
356 double outer_tol = props.tol;
357 size_t outer_verbose = props.verbose;
358 double org_maxdiff = _maxdiff;
359
360 // Set parameters for inner loop
361 props.maxiter = 5;
362 props.verbose = outer_verbose ? outer_verbose - 1 : 0;
363
364 size_t outer_iter = 0;
365 size_t total_iter = 0;
366 for( outer_iter = 0; outer_iter < outer_maxiter && diffs.maxDiff() > outer_tol; outer_iter++ ) {
367 // Calculate new outer regions
368 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
369 OR(alpha) = org_ORs[alpha];
370 foreach( const Neighbor &beta, nbOR(alpha) )
371 OR(alpha) *= _Qb[beta] ^ ((IR(beta).c() - org_IR_cs[beta]) / nbIR(beta).size());
372 }
373
374 // Inner loop
375 if( isnan( doGBP() ) )
376 return 1.0;
377
378 // Calculate new single variable beliefs and compare with old ones
379 for( size_t i = 0; i < nrVars(); ++i ) {
380 Factor new_belief = belief( var( i ) );
381 diffs.push( dist( new_belief, old_beliefs[i], Prob::DISTLINF ) );
382 old_beliefs[i] = new_belief;
383 }
384
385 total_iter += Iterations();
386
387 if( props.verbose >= 3 )
388 cerr << Name << "::doDoubleLoop: maxdiff " << diffs.maxDiff() << " after " << total_iter << " passes" << endl;
389 }
390
391 // restore _maxiter, _verbose and _maxdiff
392 props.maxiter = outer_maxiter;
393 props.verbose = outer_verbose;
394 _maxdiff = org_maxdiff;
395
396 _iters = total_iter;
397 if( diffs.maxDiff() > _maxdiff )
398 _maxdiff = diffs.maxDiff();
399
400 // Restore original outer regions
401 ORs = org_ORs;
402
403 // Restore original inner counting numbers
404 for( size_t beta = 0; beta < nrIRs(); ++beta )
405 IR(beta).c() = org_IR_cs[beta];
406
407 if( props.verbose >= 1 ) {
408 if( diffs.maxDiff() > props.tol ) {
409 if( props.verbose == 1 )
410 cerr << endl;
411 cerr << Name << "::doDoubleLoop: WARNING: not converged within " << outer_maxiter << " passes (" << toc() - tic << " seconds)...final maxdiff:" << diffs.maxDiff() << endl;
412 } else {
413 if( props.verbose >= 3 )
414 cerr << Name << "::doDoubleLoop: ";
415 cerr << "converged in " << total_iter << " passes (" << toc() - tic << " seconds)." << endl;
416 }
417 }
418
419 return diffs.maxDiff();
420 }
421
422
423 double HAK::run() {
424 if( props.doubleloop )
425 return doDoubleLoop();
426 else
427 return doGBP();
428 }
429
430
431 Factor HAK::belief( const VarSet &ns ) const {
432 vector<Factor>::const_iterator beta;
433 for( beta = _Qb.begin(); beta != _Qb.end(); beta++ )
434 if( beta->vars() >> ns )
435 break;
436 if( beta != _Qb.end() )
437 return( beta->marginal(ns) );
438 else {
439 vector<Factor>::const_iterator alpha;
440 for( alpha = _Qa.begin(); alpha != _Qa.end(); alpha++ )
441 if( alpha->vars() >> ns )
442 break;
443 DAI_ASSERT( alpha != _Qa.end() );
444 return( alpha->marginal(ns) );
445 }
446 }
447
448
449 Factor HAK::belief( const Var &n ) const {
450 return belief( (VarSet)n );
451 }
452
453
454 vector<Factor> HAK::beliefs() const {
455 vector<Factor> result;
456 for( size_t beta = 0; beta < nrIRs(); beta++ )
457 result.push_back( Qb(beta) );
458 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
459 result.push_back( Qa(alpha) );
460 return result;
461 }
462
463
464 Real HAK::logZ() const {
465 Real s = 0.0;
466 for( size_t beta = 0; beta < nrIRs(); beta++ )
467 s += IR(beta).c() * Qb(beta).entropy();
468 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
469 s += OR(alpha).c() * Qa(alpha).entropy();
470 s += (OR(alpha).log(true) * Qa(alpha)).sum();
471 }
472 return s;
473 }
474
475
476 } // end of namespace dai