Now uses GMP big integers to represent linear states / total number of states
[libdai.git] / src / jtree.cpp
1 /* This file is part of libDAI - http://www.libdai.org/
2 *
3 * Copyright (c) 2006-2011, The libDAI authors. All rights reserved.
4 *
5 * Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
6 */
7
8
9 #include <iostream>
10 #include <stack>
11 #include <dai/jtree.h>
12
13
14 namespace dai {
15
16
17 using namespace std;
18
19
20 void JTree::setProperties( const PropertySet &opts ) {
21 DAI_ASSERT( opts.hasKey("updates") );
22
23 props.updates = opts.getStringAs<Properties::UpdateType>("updates");
24 if( opts.hasKey("verbose") )
25 props.verbose = opts.getStringAs<size_t>("verbose");
26 else
27 props.verbose = 0;
28 if( opts.hasKey("inference") )
29 props.inference = opts.getStringAs<Properties::InfType>("inference");
30 else
31 props.inference = Properties::InfType::SUMPROD;
32 if( opts.hasKey("heuristic") )
33 props.heuristic = opts.getStringAs<Properties::HeuristicType>("heuristic");
34 else
35 props.heuristic = Properties::HeuristicType::MINFILL;
36 if( opts.hasKey("maxmem") )
37 props.maxmem = opts.getStringAs<size_t>("maxmem");
38 else
39 props.maxmem = 0;
40 }
41
42
43 PropertySet JTree::getProperties() const {
44 PropertySet opts;
45 opts.set( "verbose", props.verbose );
46 opts.set( "updates", props.updates );
47 opts.set( "inference", props.inference );
48 opts.set( "heuristic", props.heuristic );
49 opts.set( "maxmem", props.maxmem );
50 return opts;
51 }
52
53
54 string JTree::printProperties() const {
55 stringstream s( stringstream::out );
56 s << "[";
57 s << "verbose=" << props.verbose << ",";
58 s << "updates=" << props.updates << ",";
59 s << "heuristic=" << props.heuristic << ",";
60 s << "inference=" << props.inference << ",";
61 s << "maxmem=" << props.maxmem << "]";
62 return s.str();
63 }
64
65
66 JTree::JTree( const FactorGraph &fg, const PropertySet &opts, bool automatic ) : DAIAlgRG(), _mes(), _logZ(), RTree(), Qa(), Qb(), props() {
67 setProperties( opts );
68
69 if( automatic ) {
70 // Create ClusterGraph which contains maximal factors as clusters
71 ClusterGraph _cg( fg, true );
72 if( props.verbose >= 3 )
73 cerr << "Initial clusters: " << _cg << endl;
74
75 // Use heuristic to guess optimal elimination sequence
76 greedyVariableElimination::eliminationCostFunction ec(NULL);
77 switch( (size_t)props.heuristic ) {
78 case Properties::HeuristicType::MINNEIGHBORS:
79 ec = eliminationCost_MinNeighbors;
80 break;
81 case Properties::HeuristicType::MINWEIGHT:
82 ec = eliminationCost_MinWeight;
83 break;
84 case Properties::HeuristicType::MINFILL:
85 ec = eliminationCost_MinFill;
86 break;
87 case Properties::HeuristicType::WEIGHTEDMINFILL:
88 ec = eliminationCost_WeightedMinFill;
89 break;
90 default:
91 DAI_THROW(UNKNOWN_ENUM_VALUE);
92 }
93 size_t fudge = 6; // this yields a rough estimate of the memory needed (for some reason not yet clearly understood)
94 vector<VarSet> ElimVec = _cg.VarElim( greedyVariableElimination( ec ), props.maxmem / (sizeof(Real) * fudge) ).eraseNonMaximal().clusters();
95 if( props.verbose >= 3 )
96 cerr << "VarElim result: " << ElimVec << endl;
97
98 // Estimate memory needed (rough upper bound)
99 BigInt memneeded = 0;
100 foreach( const VarSet& cl, ElimVec )
101 memneeded += cl.nrStates();
102 memneeded *= sizeof(Real) * fudge;
103 if( props.verbose >= 1 ) {
104 cerr << "Estimate of needed memory: " << memneeded / 1024 << "kB" << endl;
105 cerr << "Maximum memory: ";
106 if( props.maxmem )
107 cerr << props.maxmem / 1024 << "kB" << endl;
108 else
109 cerr << "unlimited" << endl;
110 }
111 if( props.maxmem && memneeded > props.maxmem )
112 DAI_THROW(OUT_OF_MEMORY);
113
114 // Generate the junction tree corresponding to the elimination sequence
115 GenerateJT( fg, ElimVec );
116 }
117 }
118
119
120 void JTree::construct( const FactorGraph &fg, const std::vector<VarSet> &cl, bool verify ) {
121 // Copy the factor graph
122 FactorGraph::operator=( fg );
123
124 // Construct a weighted graph (each edge is weighted with the cardinality
125 // of the intersection of the nodes, where the nodes are the elements of cl).
126 WeightedGraph<int> JuncGraph;
127 // Start by connecting all clusters with cluster zero, and weight zero,
128 // in order to get a connected weighted graph
129 for( size_t i = 1; i < cl.size(); i++ )
130 JuncGraph[UEdge(i,0)] = 0;
131 for( size_t i = 0; i < cl.size(); i++ ) {
132 for( size_t j = i + 1; j < cl.size(); j++ ) {
133 size_t w = (cl[i] & cl[j]).size();
134 if( w )
135 JuncGraph[UEdge(i,j)] = w;
136 }
137 }
138 if( props.verbose >= 3 )
139 cerr << "Weightedgraph: " << JuncGraph << endl;
140
141 // Construct maximal spanning tree using Prim's algorithm
142 RTree = MaxSpanningTree( JuncGraph, true );
143 if( props.verbose >= 3 )
144 cerr << "Spanning tree: " << RTree << endl;
145 DAI_DEBASSERT( RTree.size() == cl.size() - 1 );
146
147 // Construct corresponding region graph
148
149 // Create outer regions
150 _ORs.clear();
151 _ORs.reserve( cl.size() );
152 for( size_t i = 0; i < cl.size(); i++ )
153 _ORs.push_back( FRegion( Factor(cl[i], 1.0), 1.0 ) );
154
155 // For each factor, find an outer region that subsumes that factor.
156 // Then, multiply the outer region with that factor.
157 _fac2OR.clear();
158 _fac2OR.resize( nrFactors(), -1U );
159 for( size_t I = 0; I < nrFactors(); I++ ) {
160 size_t alpha;
161 for( alpha = 0; alpha < nrORs(); alpha++ )
162 if( OR(alpha).vars() >> factor(I).vars() ) {
163 _fac2OR[I] = alpha;
164 break;
165 }
166 if( verify )
167 DAI_ASSERT( alpha != nrORs() );
168 }
169 recomputeORs();
170
171 // Create inner regions and edges
172 _IRs.clear();
173 _IRs.reserve( RTree.size() );
174 vector<Edge> edges;
175 edges.reserve( 2 * RTree.size() );
176 for( size_t i = 0; i < RTree.size(); i++ ) {
177 edges.push_back( Edge( RTree[i].first, nrIRs() ) );
178 edges.push_back( Edge( RTree[i].second, nrIRs() ) );
179 // inner clusters have counting number -1, except if they are empty
180 VarSet intersection = cl[RTree[i].first] & cl[RTree[i].second];
181 _IRs.push_back( Region( intersection, intersection.size() ? -1.0 : 0.0 ) );
182 }
183
184 // create bipartite graph
185 _G.construct( nrORs(), nrIRs(), edges.begin(), edges.end() );
186
187 // Check counting numbers
188 #ifdef DAI_DEBUG
189 checkCountingNumbers();
190 #endif
191
192 // Create beliefs
193 Qa.clear();
194 Qa.reserve( nrORs() );
195 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
196 Qa.push_back( OR(alpha) );
197
198 Qb.clear();
199 Qb.reserve( nrIRs() );
200 for( size_t beta = 0; beta < nrIRs(); beta++ )
201 Qb.push_back( Factor( IR(beta), 1.0 ) );
202 }
203
204
205 void JTree::GenerateJT( const FactorGraph &fg, const std::vector<VarSet> &cl ) {
206 construct( fg, cl, true );
207
208 // Create messages
209 _mes.clear();
210 _mes.reserve( nrORs() );
211 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
212 _mes.push_back( vector<Factor>() );
213 _mes[alpha].reserve( nbOR(alpha).size() );
214 foreach( const Neighbor &beta, nbOR(alpha) )
215 _mes[alpha].push_back( Factor( IR(beta), 1.0 ) );
216 }
217
218 if( props.verbose >= 3 )
219 cerr << "Regiongraph generated by JTree::GenerateJT: " << *this << endl;
220 }
221
222
223 Factor JTree::belief( const VarSet &vs ) const {
224 vector<Factor>::const_iterator beta;
225 for( beta = Qb.begin(); beta != Qb.end(); beta++ )
226 if( beta->vars() >> vs )
227 break;
228 if( beta != Qb.end() ) {
229 if( props.inference == Properties::InfType::SUMPROD )
230 return( beta->marginal(vs) );
231 else
232 return( beta->maxMarginal(vs) );
233 } else {
234 vector<Factor>::const_iterator alpha;
235 for( alpha = Qa.begin(); alpha != Qa.end(); alpha++ )
236 if( alpha->vars() >> vs )
237 break;
238 if( alpha == Qa.end() ) {
239 DAI_THROW(BELIEF_NOT_AVAILABLE);
240 return Factor();
241 } else {
242 if( props.inference == Properties::InfType::SUMPROD )
243 return( alpha->marginal(vs) );
244 else
245 return( alpha->maxMarginal(vs) );
246 }
247 }
248 }
249
250
251 vector<Factor> JTree::beliefs() const {
252 vector<Factor> result;
253 for( size_t beta = 0; beta < nrIRs(); beta++ )
254 result.push_back( Qb[beta] );
255 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
256 result.push_back( Qa[alpha] );
257 return result;
258 }
259
260
261 void JTree::runHUGIN() {
262 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
263 Qa[alpha] = OR(alpha);
264
265 for( size_t beta = 0; beta < nrIRs(); beta++ )
266 Qb[beta].fill( 1.0 );
267
268 // CollectEvidence
269 _logZ = 0.0;
270 for( size_t i = RTree.size(); (i--) != 0; ) {
271 // Make outer region RTree[i].first consistent with outer region RTree[i].second
272 // IR(i) = seperator OR(RTree[i].first) && OR(RTree[i].second)
273 Factor new_Qb;
274 if( props.inference == Properties::InfType::SUMPROD )
275 new_Qb = Qa[RTree[i].second].marginal( IR( i ), false );
276 else
277 new_Qb = Qa[RTree[i].second].maxMarginal( IR( i ), false );
278
279 _logZ += log(new_Qb.normalize());
280 Qa[RTree[i].first] *= new_Qb / Qb[i];
281 Qb[i] = new_Qb;
282 }
283 if( RTree.empty() )
284 _logZ += log(Qa[0].normalize() );
285 else
286 _logZ += log(Qa[RTree[0].first].normalize());
287
288 // DistributeEvidence
289 for( size_t i = 0; i < RTree.size(); i++ ) {
290 // Make outer region RTree[i].second consistent with outer region RTree[i].first
291 // IR(i) = seperator OR(RTree[i].first) && OR(RTree[i].second)
292 Factor new_Qb;
293 if( props.inference == Properties::InfType::SUMPROD )
294 new_Qb = Qa[RTree[i].first].marginal( IR( i ) );
295 else
296 new_Qb = Qa[RTree[i].first].maxMarginal( IR( i ) );
297
298 Qa[RTree[i].second] *= new_Qb / Qb[i];
299 Qb[i] = new_Qb;
300 }
301
302 // Normalize
303 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
304 Qa[alpha].normalize();
305 }
306
307
308 void JTree::runShaferShenoy() {
309 // First pass
310 _logZ = 0.0;
311 for( size_t e = nrIRs(); (e--) != 0; ) {
312 // send a message from RTree[e].second to RTree[e].first
313 // or, actually, from the seperator IR(e) to RTree[e].first
314
315 size_t i = nbIR(e)[1].node; // = RTree[e].second
316 size_t j = nbIR(e)[0].node; // = RTree[e].first
317 size_t _e = nbIR(e)[0].dual;
318
319 Factor msg = OR(i);
320 foreach( const Neighbor &k, nbOR(i) )
321 if( k != e )
322 msg *= message( i, k.iter );
323 if( props.inference == Properties::InfType::SUMPROD )
324 message( j, _e ) = msg.marginal( IR(e), false );
325 else
326 message( j, _e ) = msg.maxMarginal( IR(e), false );
327 _logZ += log( message(j,_e).normalize() );
328 }
329
330 // Second pass
331 for( size_t e = 0; e < nrIRs(); e++ ) {
332 size_t i = nbIR(e)[0].node; // = RTree[e].first
333 size_t j = nbIR(e)[1].node; // = RTree[e].second
334 size_t _e = nbIR(e)[1].dual;
335
336 Factor msg = OR(i);
337 foreach( const Neighbor &k, nbOR(i) )
338 if( k != e )
339 msg *= message( i, k.iter );
340 if( props.inference == Properties::InfType::SUMPROD )
341 message( j, _e ) = msg.marginal( IR(e) );
342 else
343 message( j, _e ) = msg.maxMarginal( IR(e) );
344 }
345
346 // Calculate beliefs
347 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
348 Factor piet = OR(alpha);
349 foreach( const Neighbor &k, nbOR(alpha) )
350 piet *= message( alpha, k.iter );
351 if( nrIRs() == 0 ) {
352 _logZ += log( piet.normalize() );
353 Qa[alpha] = piet;
354 } else if( alpha == nbIR(0)[0].node /*RTree[0].first*/ ) {
355 _logZ += log( piet.normalize() );
356 Qa[alpha] = piet;
357 } else
358 Qa[alpha] = piet.normalized();
359 }
360
361 // Only for logZ (and for belief)...
362 for( size_t beta = 0; beta < nrIRs(); beta++ ) {
363 if( props.inference == Properties::InfType::SUMPROD )
364 Qb[beta] = Qa[nbIR(beta)[0].node].marginal( IR(beta) );
365 else
366 Qb[beta] = Qa[nbIR(beta)[0].node].maxMarginal( IR(beta) );
367 }
368 }
369
370
371 Real JTree::run() {
372 if( props.updates == Properties::UpdateType::HUGIN )
373 runHUGIN();
374 else if( props.updates == Properties::UpdateType::SHSH )
375 runShaferShenoy();
376 return 0.0;
377 }
378
379
380 Real JTree::logZ() const {
381 /* Real s = 0.0;
382 for( size_t beta = 0; beta < nrIRs(); beta++ )
383 s += IR(beta).c() * Qb[beta].entropy();
384 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
385 s += OR(alpha).c() * Qa[alpha].entropy();
386 s += (OR(alpha).log(true) * Qa[alpha]).sum();
387 }
388 DAI_ASSERT( abs( _logZ - s ) < 1e-8 );
389 return s;*/
390 return _logZ;
391 }
392
393
394 size_t JTree::findEfficientTree( const VarSet& vs, RootedTree &Tree, size_t PreviousRoot ) const {
395 // find new root clique (the one with maximal statespace overlap with vs)
396 BigInt maxval = 0;
397 size_t maxalpha = 0;
398 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
399 BigInt val = VarSet(vs & OR(alpha).vars()).nrStates();
400 if( val > maxval ) {
401 maxval = val;
402 maxalpha = alpha;
403 }
404 }
405
406 // reorder the tree edges such that maxalpha becomes the new root
407 RootedTree newTree( GraphEL( RTree.begin(), RTree.end() ), maxalpha );
408
409 // identify subtree that contains all variables of vs which are not in the new root
410 set<DEdge> subTree;
411 // for each variable in vs
412 for( VarSet::const_iterator n = vs.begin(); n != vs.end(); n++ ) {
413 for( size_t e = 0; e < newTree.size(); e++ ) {
414 if( OR(newTree[e].second).vars().contains( *n ) ) {
415 size_t f = e;
416 subTree.insert( newTree[f] );
417 size_t pos = newTree[f].first;
418 for( ; f > 0; f-- )
419 if( newTree[f-1].second == pos ) {
420 subTree.insert( newTree[f-1] );
421 pos = newTree[f-1].first;
422 }
423 }
424 }
425 }
426 if( PreviousRoot != (size_t)-1 && PreviousRoot != maxalpha) {
427 // find first occurence of PreviousRoot in the tree, which is closest to the new root
428 size_t e = 0;
429 for( ; e != newTree.size(); e++ ) {
430 if( newTree[e].second == PreviousRoot )
431 break;
432 }
433 DAI_ASSERT( e != newTree.size() );
434
435 // track-back path to root and add edges to subTree
436 subTree.insert( newTree[e] );
437 size_t pos = newTree[e].first;
438 for( ; e > 0; e-- )
439 if( newTree[e-1].second == pos ) {
440 subTree.insert( newTree[e-1] );
441 pos = newTree[e-1].first;
442 }
443 }
444
445 // Resulting Tree is a reordered copy of newTree
446 // First add edges in subTree to Tree
447 Tree.clear();
448 vector<DEdge> remTree;
449 for( RootedTree::const_iterator e = newTree.begin(); e != newTree.end(); e++ )
450 if( subTree.count( *e ) )
451 Tree.push_back( *e );
452 else
453 remTree.push_back( *e );
454 size_t subTreeSize = Tree.size();
455 // Then add remaining edges
456 copy( remTree.begin(), remTree.end(), back_inserter( Tree ) );
457
458 return subTreeSize;
459 }
460
461
462 Factor JTree::calcMarginal( const VarSet& vs ) {
463 vector<Factor>::const_iterator beta;
464 for( beta = Qb.begin(); beta != Qb.end(); beta++ )
465 if( beta->vars() >> vs )
466 break;
467 if( beta != Qb.end() ) {
468 if( props.inference == Properties::InfType::SUMPROD )
469 return( beta->marginal(vs) );
470 else
471 return( beta->maxMarginal(vs) );
472 } else {
473 vector<Factor>::const_iterator alpha;
474 for( alpha = Qa.begin(); alpha != Qa.end(); alpha++ )
475 if( alpha->vars() >> vs )
476 break;
477 if( alpha != Qa.end() ) {
478 if( props.inference == Properties::InfType::SUMPROD )
479 return( alpha->marginal(vs) );
480 else
481 return( alpha->maxMarginal(vs) );
482 } else {
483 // Find subtree to do efficient inference
484 RootedTree T;
485 size_t Tsize = findEfficientTree( vs, T );
486
487 // Find remaining variables (which are not in the new root)
488 VarSet vsrem = vs / OR(T.front().first).vars();
489 Factor Pvs (vs, 0.0);
490
491 // Save Qa and Qb on the subtree
492 map<size_t,Factor> Qa_old;
493 map<size_t,Factor> Qb_old;
494 vector<size_t> b(Tsize, 0);
495 for( size_t i = Tsize; (i--) != 0; ) {
496 size_t alpha1 = T[i].first;
497 size_t alpha2 = T[i].second;
498 size_t beta;
499 for( beta = 0; beta < nrIRs(); beta++ )
500 if( UEdge( RTree[beta].first, RTree[beta].second ) == UEdge( alpha1, alpha2 ) )
501 break;
502 DAI_ASSERT( beta != nrIRs() );
503 b[i] = beta;
504
505 if( !Qa_old.count( alpha1 ) )
506 Qa_old[alpha1] = Qa[alpha1];
507 if( !Qa_old.count( alpha2 ) )
508 Qa_old[alpha2] = Qa[alpha2];
509 if( !Qb_old.count( beta ) )
510 Qb_old[beta] = Qb[beta];
511 }
512
513 // For all states of vsrem
514 for( State s(vsrem); s.valid(); s++ ) {
515 // CollectEvidence
516 Real logZ = 0.0;
517 for( size_t i = Tsize; (i--) != 0; ) {
518 // Make outer region T[i].first consistent with outer region T[i].second
519 // IR(i) = seperator OR(T[i].first) && OR(T[i].second)
520
521 for( VarSet::const_iterator n = vsrem.begin(); n != vsrem.end(); n++ )
522 if( Qa[T[i].second].vars() >> *n ) {
523 Factor piet( *n, 0.0 );
524 piet.set( s(*n), 1.0 );
525 Qa[T[i].second] *= piet;
526 }
527
528 Factor new_Qb;
529 if( props.inference == Properties::InfType::SUMPROD )
530 new_Qb = Qa[T[i].second].marginal( IR( b[i] ), false );
531 else
532 new_Qb = Qa[T[i].second].maxMarginal( IR( b[i] ), false );
533 logZ += log(new_Qb.normalize());
534 Qa[T[i].first] *= new_Qb / Qb[b[i]];
535 Qb[b[i]] = new_Qb;
536 }
537 logZ += log(Qa[T[0].first].normalize());
538
539 Factor piet( vsrem, 0.0 );
540 piet.set( s, exp(logZ) );
541 if( props.inference == Properties::InfType::SUMPROD )
542 Pvs += piet * Qa[T[0].first].marginal( vs / vsrem, false ); // OPTIMIZE ME
543 else
544 Pvs += piet * Qa[T[0].first].maxMarginal( vs / vsrem, false ); // OPTIMIZE ME
545
546 // Restore clamped beliefs
547 for( map<size_t,Factor>::const_iterator alpha = Qa_old.begin(); alpha != Qa_old.end(); alpha++ )
548 Qa[alpha->first] = alpha->second;
549 for( map<size_t,Factor>::const_iterator beta = Qb_old.begin(); beta != Qb_old.end(); beta++ )
550 Qb[beta->first] = beta->second;
551 }
552
553 return( Pvs.normalized() );
554 }
555 }
556 }
557
558
559 std::pair<size_t,BigInt> boundTreewidth( const FactorGraph &fg, greedyVariableElimination::eliminationCostFunction fn, size_t maxStates ) {
560 // Create cluster graph from factor graph
561 ClusterGraph _cg( fg, true );
562
563 // Obtain elimination sequence
564 vector<VarSet> ElimVec = _cg.VarElim( greedyVariableElimination( fn ), maxStates ).eraseNonMaximal().clusters();
565
566 // Calculate treewidth
567 size_t treewidth = 0;
568 BigInt nrstates = 0.0;
569 for( size_t i = 0; i < ElimVec.size(); i++ ) {
570 if( ElimVec[i].size() > treewidth )
571 treewidth = ElimVec[i].size();
572 BigInt s = ElimVec[i].nrStates();
573 if( s > nrstates )
574 nrstates = s;
575 }
576
577 return make_pair(treewidth, nrstates);
578 }
579
580
581 std::vector<size_t> JTree::findMaximum() const {
582 vector<size_t> maximum( nrVars() );
583 vector<bool> visitedVars( nrVars(), false );
584 vector<bool> visitedORs( nrORs(), false );
585 stack<size_t> scheduledORs;
586 scheduledORs.push( 0 );
587 while( !scheduledORs.empty() ) {
588 size_t alpha = scheduledORs.top();
589 scheduledORs.pop();
590 if( visitedORs[alpha] )
591 continue;
592 visitedORs[alpha] = true;
593
594 // Get marginal of outer region alpha
595 Prob probF = Qa[alpha].p();
596
597 // The allowed configuration is restrained according to the variables assigned so far:
598 // pick the argmax amongst the allowed states
599 Real maxProb = -numeric_limits<Real>::max();
600 State maxState( OR(alpha).vars() );
601 size_t maxcount = 0;
602 for( State s( OR(alpha).vars() ); s.valid(); ++s ) {
603 // First, calculate whether this state is consistent with variables that
604 // have been assigned already
605 bool allowedState = true;
606 foreach( const Var& j, OR(alpha).vars() ) {
607 size_t j_index = findVar(j);
608 if( visitedVars[j_index] && maximum[j_index] != s(j) ) {
609 allowedState = false;
610 break;
611 }
612 }
613 // If it is consistent, check if its probability is larger than what we have seen so far
614 if( allowedState ) {
615 if( probF[s] > maxProb ) {
616 maxState = s;
617 maxProb = probF[s];
618 maxcount = 1;
619 } else
620 maxcount++;
621 }
622 }
623 DAI_ASSERT( maxProb != 0.0 );
624 DAI_ASSERT( Qa[alpha][maxState] != 0.0 );
625
626 // Decode the argmax
627 foreach( const Var& j, OR(alpha).vars() ) {
628 size_t j_index = findVar(j);
629 if( visitedVars[j_index] ) {
630 // We have already visited j earlier - hopefully our state is consistent
631 if( maximum[j_index] != maxState( j ) )
632 DAI_THROWE(RUNTIME_ERROR,"MAP state inconsistent due to loops");
633 } else {
634 // We found a consistent state for variable j
635 visitedVars[j_index] = true;
636 maximum[j_index] = maxState( j );
637 foreach( const Neighbor &beta, nbOR(alpha) )
638 foreach( const Neighbor &alpha2, nbIR(beta) )
639 if( !visitedORs[alpha2] )
640 scheduledORs.push(alpha2);
641 }
642 }
643 }
644 return maximum;
645 }
646
647
648 } // end of namespace dai