Fixed bug in findMaximum(): inconsistent max-beliefs are now detected,
[libdai.git] / src / jtree.cpp
1 /* This file is part of libDAI - http://www.libdai.org/
2 *
3 * libDAI is licensed under the terms of the GNU General Public License version
4 * 2, or (at your option) any later version. libDAI is distributed without any
5 * warranty. See the file COPYING for more details.
6 *
7 * Copyright (C) 2006-2010 Joris Mooij [joris dot mooij at libdai dot org]
8 * Copyright (C) 2006-2007 Radboud University Nijmegen, The Netherlands
9 */
10
11
12 #include <iostream>
13 #include <stack>
14 #include <dai/jtree.h>
15
16
17 namespace dai {
18
19
20 using namespace std;
21
22
23 void JTree::setProperties( const PropertySet &opts ) {
24 DAI_ASSERT( opts.hasKey("updates") );
25
26 props.updates = opts.getStringAs<Properties::UpdateType>("updates");
27 if( opts.hasKey("verbose") )
28 props.verbose = opts.getStringAs<size_t>("verbose");
29 else
30 props.verbose = 0;
31 if( opts.hasKey("inference") )
32 props.inference = opts.getStringAs<Properties::InfType>("inference");
33 else
34 props.inference = Properties::InfType::SUMPROD;
35 if( opts.hasKey("heuristic") )
36 props.heuristic = opts.getStringAs<Properties::HeuristicType>("heuristic");
37 else
38 props.heuristic = Properties::HeuristicType::MINFILL;
39 if( opts.hasKey("maxmem") )
40 props.maxmem = opts.getStringAs<size_t>("maxmem");
41 else
42 props.maxmem = 0;
43 }
44
45
46 PropertySet JTree::getProperties() const {
47 PropertySet opts;
48 opts.set( "verbose", props.verbose );
49 opts.set( "updates", props.updates );
50 opts.set( "inference", props.inference );
51 opts.set( "heuristic", props.heuristic );
52 opts.set( "maxmem", props.maxmem );
53 return opts;
54 }
55
56
57 string JTree::printProperties() const {
58 stringstream s( stringstream::out );
59 s << "[";
60 s << "verbose=" << props.verbose << ",";
61 s << "updates=" << props.updates << ",";
62 s << "heuristic=" << props.heuristic << ",";
63 s << "inference=" << props.inference << ",";
64 s << "maxmem=" << props.maxmem << "]";
65 return s.str();
66 }
67
68
69 JTree::JTree( const FactorGraph &fg, const PropertySet &opts, bool automatic ) : DAIAlgRG(), _mes(), _logZ(), RTree(), Qa(), Qb(), props() {
70 setProperties( opts );
71
72 if( automatic ) {
73 // Create ClusterGraph which contains maximal factors as clusters
74 ClusterGraph _cg( fg, true );
75 if( props.verbose >= 3 )
76 cerr << "Initial clusters: " << _cg << endl;
77
78 // Use heuristic to guess optimal elimination sequence
79 greedyVariableElimination::eliminationCostFunction ec(NULL);
80 switch( (size_t)props.heuristic ) {
81 case Properties::HeuristicType::MINNEIGHBORS:
82 ec = eliminationCost_MinNeighbors;
83 break;
84 case Properties::HeuristicType::MINWEIGHT:
85 ec = eliminationCost_MinWeight;
86 break;
87 case Properties::HeuristicType::MINFILL:
88 ec = eliminationCost_MinFill;
89 break;
90 case Properties::HeuristicType::WEIGHTEDMINFILL:
91 ec = eliminationCost_WeightedMinFill;
92 break;
93 default:
94 DAI_THROW(UNKNOWN_ENUM_VALUE);
95 }
96 size_t fudge = 6; // this yields a rough estimate of the memory needed (for some reason not yet clearly understood)
97 vector<VarSet> ElimVec = _cg.VarElim( greedyVariableElimination( ec ), props.maxmem / (sizeof(Real) * fudge) ).eraseNonMaximal().clusters();
98 if( props.verbose >= 3 )
99 cerr << "VarElim result: " << ElimVec << endl;
100
101 // Estimate memory needed (rough upper bound)
102 long double memneeded = 0;
103 foreach( const VarSet& cl, ElimVec )
104 memneeded += cl.nrStates();
105 memneeded *= sizeof(Real) * fudge;
106 if( props.verbose >= 1 ) {
107 cerr << "Estimate of needed memory: " << memneeded / 1024 << "kB" << endl;
108 cerr << "Maximum memory: ";
109 if( props.maxmem )
110 cerr << props.maxmem / 1024 << "kB" << endl;
111 else
112 cerr << "unlimited" << endl;
113 }
114 if( props.maxmem && memneeded > props.maxmem )
115 DAI_THROW(OUT_OF_MEMORY);
116
117 // Generate the junction tree corresponding to the elimination sequence
118 GenerateJT( fg, ElimVec );
119 }
120 }
121
122
123 void JTree::construct( const FactorGraph &fg, const std::vector<VarSet> &cl, bool verify ) {
124 // Copy the factor graph
125 FactorGraph::operator=( fg );
126
127 // Construct a weighted graph (each edge is weighted with the cardinality
128 // of the intersection of the nodes, where the nodes are the elements of cl).
129 WeightedGraph<int> JuncGraph;
130 // Start by connecting all clusters with cluster zero, and weight zero,
131 // in order to get a connected weighted graph
132 for( size_t i = 1; i < cl.size(); i++ )
133 JuncGraph[UEdge(i,0)] = 0;
134 for( size_t i = 0; i < cl.size(); i++ ) {
135 for( size_t j = i + 1; j < cl.size(); j++ ) {
136 size_t w = (cl[i] & cl[j]).size();
137 if( w )
138 JuncGraph[UEdge(i,j)] = w;
139 }
140 }
141 if( props.verbose >= 3 )
142 cerr << "Weightedgraph: " << JuncGraph << endl;
143
144 // Construct maximal spanning tree using Prim's algorithm
145 RTree = MaxSpanningTree( JuncGraph, true );
146 if( props.verbose >= 3 )
147 cerr << "Spanning tree: " << RTree << endl;
148 DAI_DEBASSERT( RTree.size() == cl.size() - 1 );
149
150 // Construct corresponding region graph
151
152 // Create outer regions
153 _ORs.clear();
154 _ORs.reserve( cl.size() );
155 for( size_t i = 0; i < cl.size(); i++ )
156 _ORs.push_back( FRegion( Factor(cl[i], 1.0), 1.0 ) );
157
158 // For each factor, find an outer region that subsumes that factor.
159 // Then, multiply the outer region with that factor.
160 _fac2OR.clear();
161 _fac2OR.resize( nrFactors(), -1U );
162 for( size_t I = 0; I < nrFactors(); I++ ) {
163 size_t alpha;
164 for( alpha = 0; alpha < nrORs(); alpha++ )
165 if( OR(alpha).vars() >> factor(I).vars() ) {
166 _fac2OR[I] = alpha;
167 break;
168 }
169 if( verify )
170 DAI_ASSERT( alpha != nrORs() );
171 }
172 recomputeORs();
173
174 // Create inner regions and edges
175 _IRs.clear();
176 _IRs.reserve( RTree.size() );
177 vector<Edge> edges;
178 edges.reserve( 2 * RTree.size() );
179 for( size_t i = 0; i < RTree.size(); i++ ) {
180 edges.push_back( Edge( RTree[i].first, nrIRs() ) );
181 edges.push_back( Edge( RTree[i].second, nrIRs() ) );
182 // inner clusters have counting number -1, except if they are empty
183 VarSet intersection = cl[RTree[i].first] & cl[RTree[i].second];
184 _IRs.push_back( Region( intersection, intersection.size() ? -1.0 : 0.0 ) );
185 }
186
187 // create bipartite graph
188 _G.construct( nrORs(), nrIRs(), edges.begin(), edges.end() );
189
190 // Check counting numbers
191 #ifdef DAI_DEBUG
192 checkCountingNumbers();
193 #endif
194
195 // Create beliefs
196 Qa.clear();
197 Qa.reserve( nrORs() );
198 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
199 Qa.push_back( OR(alpha) );
200
201 Qb.clear();
202 Qb.reserve( nrIRs() );
203 for( size_t beta = 0; beta < nrIRs(); beta++ )
204 Qb.push_back( Factor( IR(beta), 1.0 ) );
205 }
206
207
208 void JTree::GenerateJT( const FactorGraph &fg, const std::vector<VarSet> &cl ) {
209 construct( fg, cl, true );
210
211 // Create messages
212 _mes.clear();
213 _mes.reserve( nrORs() );
214 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
215 _mes.push_back( vector<Factor>() );
216 _mes[alpha].reserve( nbOR(alpha).size() );
217 foreach( const Neighbor &beta, nbOR(alpha) )
218 _mes[alpha].push_back( Factor( IR(beta), 1.0 ) );
219 }
220
221 if( props.verbose >= 3 )
222 cerr << "Regiongraph generated by JTree::GenerateJT: " << *this << endl;
223 }
224
225
226 Factor JTree::belief( const VarSet &vs ) const {
227 vector<Factor>::const_iterator beta;
228 for( beta = Qb.begin(); beta != Qb.end(); beta++ )
229 if( beta->vars() >> vs )
230 break;
231 if( beta != Qb.end() ) {
232 if( props.inference == Properties::InfType::SUMPROD )
233 return( beta->marginal(vs) );
234 else
235 return( beta->maxMarginal(vs) );
236 } else {
237 vector<Factor>::const_iterator alpha;
238 for( alpha = Qa.begin(); alpha != Qa.end(); alpha++ )
239 if( alpha->vars() >> vs )
240 break;
241 if( alpha == Qa.end() ) {
242 DAI_THROW(BELIEF_NOT_AVAILABLE);
243 return Factor();
244 } else {
245 if( props.inference == Properties::InfType::SUMPROD )
246 return( alpha->marginal(vs) );
247 else
248 return( alpha->maxMarginal(vs) );
249 }
250 }
251 }
252
253
254 vector<Factor> JTree::beliefs() const {
255 vector<Factor> result;
256 for( size_t beta = 0; beta < nrIRs(); beta++ )
257 result.push_back( Qb[beta] );
258 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
259 result.push_back( Qa[alpha] );
260 return result;
261 }
262
263
264 void JTree::runHUGIN() {
265 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
266 Qa[alpha] = OR(alpha);
267
268 for( size_t beta = 0; beta < nrIRs(); beta++ )
269 Qb[beta].fill( 1.0 );
270
271 // CollectEvidence
272 _logZ = 0.0;
273 for( size_t i = RTree.size(); (i--) != 0; ) {
274 // Make outer region RTree[i].first consistent with outer region RTree[i].second
275 // IR(i) = seperator OR(RTree[i].first) && OR(RTree[i].second)
276 Factor new_Qb;
277 if( props.inference == Properties::InfType::SUMPROD )
278 new_Qb = Qa[RTree[i].second].marginal( IR( i ), false );
279 else
280 new_Qb = Qa[RTree[i].second].maxMarginal( IR( i ), false );
281
282 _logZ += log(new_Qb.normalize());
283 Qa[RTree[i].first] *= new_Qb / Qb[i];
284 Qb[i] = new_Qb;
285 }
286 if( RTree.empty() )
287 _logZ += log(Qa[0].normalize() );
288 else
289 _logZ += log(Qa[RTree[0].first].normalize());
290
291 // DistributeEvidence
292 for( size_t i = 0; i < RTree.size(); i++ ) {
293 // Make outer region RTree[i].second consistent with outer region RTree[i].first
294 // IR(i) = seperator OR(RTree[i].first) && OR(RTree[i].second)
295 Factor new_Qb;
296 if( props.inference == Properties::InfType::SUMPROD )
297 new_Qb = Qa[RTree[i].first].marginal( IR( i ) );
298 else
299 new_Qb = Qa[RTree[i].first].maxMarginal( IR( i ) );
300
301 Qa[RTree[i].second] *= new_Qb / Qb[i];
302 Qb[i] = new_Qb;
303 }
304
305 // Normalize
306 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
307 Qa[alpha].normalize();
308 }
309
310
311 void JTree::runShaferShenoy() {
312 // First pass
313 _logZ = 0.0;
314 for( size_t e = nrIRs(); (e--) != 0; ) {
315 // send a message from RTree[e].second to RTree[e].first
316 // or, actually, from the seperator IR(e) to RTree[e].first
317
318 size_t i = nbIR(e)[1].node; // = RTree[e].second
319 size_t j = nbIR(e)[0].node; // = RTree[e].first
320 size_t _e = nbIR(e)[0].dual;
321
322 Factor msg = OR(i);
323 foreach( const Neighbor &k, nbOR(i) )
324 if( k != e )
325 msg *= message( i, k.iter );
326 if( props.inference == Properties::InfType::SUMPROD )
327 message( j, _e ) = msg.marginal( IR(e), false );
328 else
329 message( j, _e ) = msg.maxMarginal( IR(e), false );
330 _logZ += log( message(j,_e).normalize() );
331 }
332
333 // Second pass
334 for( size_t e = 0; e < nrIRs(); e++ ) {
335 size_t i = nbIR(e)[0].node; // = RTree[e].first
336 size_t j = nbIR(e)[1].node; // = RTree[e].second
337 size_t _e = nbIR(e)[1].dual;
338
339 Factor msg = OR(i);
340 foreach( const Neighbor &k, nbOR(i) )
341 if( k != e )
342 msg *= message( i, k.iter );
343 if( props.inference == Properties::InfType::SUMPROD )
344 message( j, _e ) = msg.marginal( IR(e) );
345 else
346 message( j, _e ) = msg.maxMarginal( IR(e) );
347 }
348
349 // Calculate beliefs
350 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
351 Factor piet = OR(alpha);
352 foreach( const Neighbor &k, nbOR(alpha) )
353 piet *= message( alpha, k.iter );
354 if( nrIRs() == 0 ) {
355 _logZ += log( piet.normalize() );
356 Qa[alpha] = piet;
357 } else if( alpha == nbIR(0)[0].node /*RTree[0].first*/ ) {
358 _logZ += log( piet.normalize() );
359 Qa[alpha] = piet;
360 } else
361 Qa[alpha] = piet.normalized();
362 }
363
364 // Only for logZ (and for belief)...
365 for( size_t beta = 0; beta < nrIRs(); beta++ ) {
366 if( props.inference == Properties::InfType::SUMPROD )
367 Qb[beta] = Qa[nbIR(beta)[0].node].marginal( IR(beta) );
368 else
369 Qb[beta] = Qa[nbIR(beta)[0].node].maxMarginal( IR(beta) );
370 }
371 }
372
373
374 Real JTree::run() {
375 if( props.updates == Properties::UpdateType::HUGIN )
376 runHUGIN();
377 else if( props.updates == Properties::UpdateType::SHSH )
378 runShaferShenoy();
379 return 0.0;
380 }
381
382
383 Real JTree::logZ() const {
384 /* Real s = 0.0;
385 for( size_t beta = 0; beta < nrIRs(); beta++ )
386 s += IR(beta).c() * Qb[beta].entropy();
387 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
388 s += OR(alpha).c() * Qa[alpha].entropy();
389 s += (OR(alpha).log(true) * Qa[alpha]).sum();
390 }
391 DAI_ASSERT( abs( _logZ - s ) < 1e-8 );
392 return s;*/
393 return _logZ;
394 }
395
396
397 size_t JTree::findEfficientTree( const VarSet& vs, RootedTree &Tree, size_t PreviousRoot ) const {
398 // find new root clique (the one with maximal statespace overlap with vs)
399 long double maxval = 0.0;
400 size_t maxalpha = 0;
401 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
402 long double val = VarSet(vs & OR(alpha).vars()).nrStates();
403 if( val > maxval ) {
404 maxval = val;
405 maxalpha = alpha;
406 }
407 }
408
409 // reorder the tree edges such that maxalpha becomes the new root
410 RootedTree newTree( GraphEL( RTree.begin(), RTree.end() ), maxalpha );
411
412 // identify subtree that contains all variables of vs which are not in the new root
413 set<DEdge> subTree;
414 // for each variable in vs
415 for( VarSet::const_iterator n = vs.begin(); n != vs.end(); n++ ) {
416 for( size_t e = 0; e < newTree.size(); e++ ) {
417 if( OR(newTree[e].second).vars().contains( *n ) ) {
418 size_t f = e;
419 subTree.insert( newTree[f] );
420 size_t pos = newTree[f].first;
421 for( ; f > 0; f-- )
422 if( newTree[f-1].second == pos ) {
423 subTree.insert( newTree[f-1] );
424 pos = newTree[f-1].first;
425 }
426 }
427 }
428 }
429 if( PreviousRoot != (size_t)-1 && PreviousRoot != maxalpha) {
430 // find first occurence of PreviousRoot in the tree, which is closest to the new root
431 size_t e = 0;
432 for( ; e != newTree.size(); e++ ) {
433 if( newTree[e].second == PreviousRoot )
434 break;
435 }
436 DAI_ASSERT( e != newTree.size() );
437
438 // track-back path to root and add edges to subTree
439 subTree.insert( newTree[e] );
440 size_t pos = newTree[e].first;
441 for( ; e > 0; e-- )
442 if( newTree[e-1].second == pos ) {
443 subTree.insert( newTree[e-1] );
444 pos = newTree[e-1].first;
445 }
446 }
447
448 // Resulting Tree is a reordered copy of newTree
449 // First add edges in subTree to Tree
450 Tree.clear();
451 vector<DEdge> remTree;
452 for( RootedTree::const_iterator e = newTree.begin(); e != newTree.end(); e++ )
453 if( subTree.count( *e ) )
454 Tree.push_back( *e );
455 else
456 remTree.push_back( *e );
457 size_t subTreeSize = Tree.size();
458 // Then add remaining edges
459 copy( remTree.begin(), remTree.end(), back_inserter( Tree ) );
460
461 return subTreeSize;
462 }
463
464
465 Factor JTree::calcMarginal( const VarSet& vs ) {
466 vector<Factor>::const_iterator beta;
467 for( beta = Qb.begin(); beta != Qb.end(); beta++ )
468 if( beta->vars() >> vs )
469 break;
470 if( beta != Qb.end() ) {
471 if( props.inference == Properties::InfType::SUMPROD )
472 return( beta->marginal(vs) );
473 else
474 return( beta->maxMarginal(vs) );
475 } else {
476 vector<Factor>::const_iterator alpha;
477 for( alpha = Qa.begin(); alpha != Qa.end(); alpha++ )
478 if( alpha->vars() >> vs )
479 break;
480 if( alpha != Qa.end() ) {
481 if( props.inference == Properties::InfType::SUMPROD )
482 return( alpha->marginal(vs) );
483 else
484 return( alpha->maxMarginal(vs) );
485 } else {
486 // Find subtree to do efficient inference
487 RootedTree T;
488 size_t Tsize = findEfficientTree( vs, T );
489
490 // Find remaining variables (which are not in the new root)
491 VarSet vsrem = vs / OR(T.front().first).vars();
492 Factor Pvs (vs, 0.0);
493
494 // Save Qa and Qb on the subtree
495 map<size_t,Factor> Qa_old;
496 map<size_t,Factor> Qb_old;
497 vector<size_t> b(Tsize, 0);
498 for( size_t i = Tsize; (i--) != 0; ) {
499 size_t alpha1 = T[i].first;
500 size_t alpha2 = T[i].second;
501 size_t beta;
502 for( beta = 0; beta < nrIRs(); beta++ )
503 if( UEdge( RTree[beta].first, RTree[beta].second ) == UEdge( alpha1, alpha2 ) )
504 break;
505 DAI_ASSERT( beta != nrIRs() );
506 b[i] = beta;
507
508 if( !Qa_old.count( alpha1 ) )
509 Qa_old[alpha1] = Qa[alpha1];
510 if( !Qa_old.count( alpha2 ) )
511 Qa_old[alpha2] = Qa[alpha2];
512 if( !Qb_old.count( beta ) )
513 Qb_old[beta] = Qb[beta];
514 }
515
516 // For all states of vsrem
517 for( State s(vsrem); s.valid(); s++ ) {
518 // CollectEvidence
519 Real logZ = 0.0;
520 for( size_t i = Tsize; (i--) != 0; ) {
521 // Make outer region T[i].first consistent with outer region T[i].second
522 // IR(i) = seperator OR(T[i].first) && OR(T[i].second)
523
524 for( VarSet::const_iterator n = vsrem.begin(); n != vsrem.end(); n++ )
525 if( Qa[T[i].second].vars() >> *n ) {
526 Factor piet( *n, 0.0 );
527 piet.set( s(*n), 1.0 );
528 Qa[T[i].second] *= piet;
529 }
530
531 Factor new_Qb;
532 if( props.inference == Properties::InfType::SUMPROD )
533 new_Qb = Qa[T[i].second].marginal( IR( b[i] ), false );
534 else
535 new_Qb = Qa[T[i].second].maxMarginal( IR( b[i] ), false );
536 logZ += log(new_Qb.normalize());
537 Qa[T[i].first] *= new_Qb / Qb[b[i]];
538 Qb[b[i]] = new_Qb;
539 }
540 logZ += log(Qa[T[0].first].normalize());
541
542 Factor piet( vsrem, 0.0 );
543 piet.set( s, exp(logZ) );
544 if( props.inference == Properties::InfType::SUMPROD )
545 Pvs += piet * Qa[T[0].first].marginal( vs / vsrem, false ); // OPTIMIZE ME
546 else
547 Pvs += piet * Qa[T[0].first].maxMarginal( vs / vsrem, false ); // OPTIMIZE ME
548
549 // Restore clamped beliefs
550 for( map<size_t,Factor>::const_iterator alpha = Qa_old.begin(); alpha != Qa_old.end(); alpha++ )
551 Qa[alpha->first] = alpha->second;
552 for( map<size_t,Factor>::const_iterator beta = Qb_old.begin(); beta != Qb_old.end(); beta++ )
553 Qb[beta->first] = beta->second;
554 }
555
556 return( Pvs.normalized() );
557 }
558 }
559 }
560
561
562 std::pair<size_t,long double> boundTreewidth( const FactorGraph &fg, greedyVariableElimination::eliminationCostFunction fn, size_t maxStates ) {
563 // Create cluster graph from factor graph
564 ClusterGraph _cg( fg, true );
565
566 // Obtain elimination sequence
567 vector<VarSet> ElimVec = _cg.VarElim( greedyVariableElimination( fn ), maxStates ).eraseNonMaximal().clusters();
568
569 // Calculate treewidth
570 size_t treewidth = 0;
571 double nrstates = 0.0;
572 for( size_t i = 0; i < ElimVec.size(); i++ ) {
573 if( ElimVec[i].size() > treewidth )
574 treewidth = ElimVec[i].size();
575 long double s = ElimVec[i].nrStates();
576 if( s > nrstates )
577 nrstates = s;
578 }
579
580 return make_pair(treewidth, nrstates);
581 }
582
583
584 std::vector<size_t> JTree::findMaximum() const {
585 vector<size_t> maximum( nrVars() );
586 vector<bool> visitedVars( nrVars(), false );
587 vector<bool> visitedORs( nrORs(), false );
588 stack<size_t> scheduledORs;
589 scheduledORs.push( 0 );
590 while( !scheduledORs.empty() ) {
591 size_t alpha = scheduledORs.top();
592 scheduledORs.pop();
593 if( visitedORs[alpha] )
594 continue;
595 visitedORs[alpha] = true;
596
597 // Get marginal of outer region alpha
598 Prob probF = Qa[alpha].p();
599
600 // The allowed configuration is restrained according to the variables assigned so far:
601 // pick the argmax amongst the allowed states
602 Real maxProb = -numeric_limits<Real>::max();
603 State maxState( OR(alpha).vars() );
604 size_t maxcount = 0;
605 for( State s( OR(alpha).vars() ); s.valid(); ++s ) {
606 // First, calculate whether this state is consistent with variables that
607 // have been assigned already
608 bool allowedState = true;
609 foreach( const Var& j, OR(alpha).vars() ) {
610 size_t j_index = findVar(j);
611 if( visitedVars[j_index] && maximum[j_index] != s(j) ) {
612 allowedState = false;
613 break;
614 }
615 }
616 // If it is consistent, check if its probability is larger than what we have seen so far
617 if( allowedState ) {
618 if( probF[s] > maxProb ) {
619 maxState = s;
620 maxProb = probF[s];
621 maxcount = 1;
622 } else
623 maxcount++;
624 }
625 }
626 DAI_ASSERT( maxProb != 0.0 );
627 DAI_ASSERT( Qa[alpha][maxState] != 0.0 );
628
629 // Decode the argmax
630 foreach( const Var& j, OR(alpha).vars() ) {
631 size_t j_index = findVar(j);
632 if( visitedVars[j_index] ) {
633 // We have already visited j earlier - hopefully our state is consistent
634 if( maximum[j_index] != maxState( j ) )
635 DAI_THROWE(RUNTIME_ERROR,"MAP state inconsistent due to loops");
636 } else {
637 // We found a consistent state for variable j
638 visitedVars[j_index] = true;
639 maximum[j_index] = maxState( j );
640 foreach( const Neighbor &beta, nbOR(alpha) )
641 foreach( const Neighbor &alpha2, nbIR(beta) )
642 if( !visitedORs[alpha2] )
643 scheduledORs.push(alpha2);
644 }
645 }
646 }
647 return maximum;
648 }
649
650
651 } // end of namespace dai