Fixed bug in BP (introduced in commit b8f96214...) and added regression test for...
[libdai.git] / src / jtree.cpp
1 /* This file is part of libDAI - http://www.libdai.org/
2 *
3 * libDAI is licensed under the terms of the GNU General Public License version
4 * 2, or (at your option) any later version. libDAI is distributed without any
5 * warranty. See the file COPYING for more details.
6 *
7 * Copyright (C) 2006-2010 Joris Mooij [joris dot mooij at libdai dot org]
8 * Copyright (C) 2006-2007 Radboud University Nijmegen, The Netherlands
9 */
10
11
12 #include <iostream>
13 #include <stack>
14 #include <dai/jtree.h>
15
16
17 namespace dai {
18
19
20 using namespace std;
21
22
23 const char *JTree::Name = "JTREE";
24
25
26 void JTree::setProperties( const PropertySet &opts ) {
27 DAI_ASSERT( opts.hasKey("updates") );
28
29 props.updates = opts.getStringAs<Properties::UpdateType>("updates");
30 if( opts.hasKey("verbose") )
31 props.verbose = opts.getStringAs<size_t>("verbose");
32 else
33 props.verbose = 0;
34 if( opts.hasKey("inference") )
35 props.inference = opts.getStringAs<Properties::InfType>("inference");
36 else
37 props.inference = Properties::InfType::SUMPROD;
38 if( opts.hasKey("heuristic") )
39 props.heuristic = opts.getStringAs<Properties::HeuristicType>("heuristic");
40 else
41 props.heuristic = Properties::HeuristicType::MINFILL;
42 if( opts.hasKey("maxmem") )
43 props.maxmem = opts.getStringAs<size_t>("maxmem");
44 else
45 props.maxmem = 0;
46 }
47
48
49 PropertySet JTree::getProperties() const {
50 PropertySet opts;
51 opts.set( "verbose", props.verbose );
52 opts.set( "updates", props.updates );
53 opts.set( "inference", props.inference );
54 opts.set( "heuristic", props.heuristic );
55 opts.set( "maxmem", props.maxmem );
56 return opts;
57 }
58
59
60 string JTree::printProperties() const {
61 stringstream s( stringstream::out );
62 s << "[";
63 s << "verbose=" << props.verbose << ",";
64 s << "updates=" << props.updates << ",";
65 s << "heuristic=" << props.heuristic << ",";
66 s << "inference=" << props.inference << ",";
67 s << "maxmem=" << props.maxmem << "]";
68 return s.str();
69 }
70
71
72 JTree::JTree( const FactorGraph &fg, const PropertySet &opts, bool automatic ) : DAIAlgRG(), _mes(), _logZ(), RTree(), Qa(), Qb(), props() {
73 setProperties( opts );
74
75 if( automatic ) {
76 // Create ClusterGraph which contains maximal factors as clusters
77 ClusterGraph _cg( fg, true );
78 if( props.verbose >= 3 )
79 cerr << "Initial clusters: " << _cg << endl;
80
81 // Use heuristic to guess optimal elimination sequence
82 greedyVariableElimination::eliminationCostFunction ec(NULL);
83 switch( (size_t)props.heuristic ) {
84 case Properties::HeuristicType::MINNEIGHBORS:
85 ec = eliminationCost_MinNeighbors;
86 break;
87 case Properties::HeuristicType::MINWEIGHT:
88 ec = eliminationCost_MinWeight;
89 break;
90 case Properties::HeuristicType::MINFILL:
91 ec = eliminationCost_MinFill;
92 break;
93 case Properties::HeuristicType::WEIGHTEDMINFILL:
94 ec = eliminationCost_WeightedMinFill;
95 break;
96 default:
97 DAI_THROW(UNKNOWN_ENUM_VALUE);
98 }
99 size_t fudge = 6; // this yields a rough estimate of the memory needed (for some reason not yet clearly understood)
100 vector<VarSet> ElimVec = _cg.VarElim( greedyVariableElimination( ec ), props.maxmem / (sizeof(Real) * fudge) ).eraseNonMaximal().clusters();
101 if( props.verbose >= 3 )
102 cerr << "VarElim result: " << ElimVec << endl;
103
104 // Estimate memory needed (rough upper bound)
105 long double memneeded = 0;
106 foreach( const VarSet& cl, ElimVec )
107 memneeded += cl.nrStates();
108 memneeded *= sizeof(Real) * fudge;
109 if( props.verbose >= 1 ) {
110 cerr << "Estimate of needed memory: " << memneeded / 1024 << "kB" << endl;
111 cerr << "Maximum memory: ";
112 if( props.maxmem )
113 cerr << props.maxmem / 1024 << "kB" << endl;
114 else
115 cerr << "unlimited" << endl;
116 }
117 if( props.maxmem && memneeded > props.maxmem )
118 DAI_THROW(OUT_OF_MEMORY);
119
120 // Generate the junction tree corresponding to the elimination sequence
121 GenerateJT( fg, ElimVec );
122 }
123 }
124
125
126 void JTree::construct( const FactorGraph &fg, const std::vector<VarSet> &cl, bool verify ) {
127 // Copy the factor graph
128 FactorGraph::operator=( fg );
129
130 // Construct a weighted graph (each edge is weighted with the cardinality
131 // of the intersection of the nodes, where the nodes are the elements of cl).
132 WeightedGraph<int> JuncGraph;
133 // Start by connecting all clusters with cluster zero, and weight zero,
134 // in order to get a connected weighted graph
135 for( size_t i = 1; i < cl.size(); i++ )
136 JuncGraph[UEdge(i,0)] = 0;
137 for( size_t i = 0; i < cl.size(); i++ ) {
138 for( size_t j = i + 1; j < cl.size(); j++ ) {
139 size_t w = (cl[i] & cl[j]).size();
140 if( w )
141 JuncGraph[UEdge(i,j)] = w;
142 }
143 }
144 if( props.verbose >= 3 )
145 cerr << "Weightedgraph: " << JuncGraph << endl;
146
147 // Construct maximal spanning tree using Prim's algorithm
148 RTree = MaxSpanningTree( JuncGraph, true );
149 if( props.verbose >= 3 )
150 cerr << "Spanning tree: " << RTree << endl;
151 DAI_DEBASSERT( RTree.size() == cl.size() - 1 );
152
153 // Construct corresponding region graph
154
155 // Create outer regions
156 _ORs.clear();
157 _ORs.reserve( cl.size() );
158 for( size_t i = 0; i < cl.size(); i++ )
159 _ORs.push_back( FRegion( Factor(cl[i], 1.0), 1.0 ) );
160
161 // For each factor, find an outer region that subsumes that factor.
162 // Then, multiply the outer region with that factor.
163 _fac2OR.clear();
164 _fac2OR.resize( nrFactors(), -1U );
165 for( size_t I = 0; I < nrFactors(); I++ ) {
166 size_t alpha;
167 for( alpha = 0; alpha < nrORs(); alpha++ )
168 if( OR(alpha).vars() >> factor(I).vars() ) {
169 _fac2OR[I] = alpha;
170 break;
171 }
172 if( verify )
173 DAI_ASSERT( alpha != nrORs() );
174 }
175 recomputeORs();
176
177 // Create inner regions and edges
178 _IRs.clear();
179 _IRs.reserve( RTree.size() );
180 vector<Edge> edges;
181 edges.reserve( 2 * RTree.size() );
182 for( size_t i = 0; i < RTree.size(); i++ ) {
183 edges.push_back( Edge( RTree[i].first, nrIRs() ) );
184 edges.push_back( Edge( RTree[i].second, nrIRs() ) );
185 // inner clusters have counting number -1, except if they are empty
186 VarSet intersection = cl[RTree[i].first] & cl[RTree[i].second];
187 _IRs.push_back( Region( intersection, intersection.size() ? -1.0 : 0.0 ) );
188 }
189
190 // create bipartite graph
191 _G.construct( nrORs(), nrIRs(), edges.begin(), edges.end() );
192
193 // Check counting numbers
194 #ifdef DAI_DEBUG
195 checkCountingNumbers();
196 #endif
197
198 // Create beliefs
199 Qa.clear();
200 Qa.reserve( nrORs() );
201 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
202 Qa.push_back( OR(alpha) );
203
204 Qb.clear();
205 Qb.reserve( nrIRs() );
206 for( size_t beta = 0; beta < nrIRs(); beta++ )
207 Qb.push_back( Factor( IR(beta), 1.0 ) );
208 }
209
210
211 void JTree::GenerateJT( const FactorGraph &fg, const std::vector<VarSet> &cl ) {
212 construct( fg, cl, true );
213
214 // Create messages
215 _mes.clear();
216 _mes.reserve( nrORs() );
217 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
218 _mes.push_back( vector<Factor>() );
219 _mes[alpha].reserve( nbOR(alpha).size() );
220 foreach( const Neighbor &beta, nbOR(alpha) )
221 _mes[alpha].push_back( Factor( IR(beta), 1.0 ) );
222 }
223
224 if( props.verbose >= 3 )
225 cerr << "Regiongraph generated by JTree::GenerateJT: " << *this << endl;
226 }
227
228
229 string JTree::identify() const {
230 return string(Name) + printProperties();
231 }
232
233
234 Factor JTree::belief( const VarSet &vs ) const {
235 vector<Factor>::const_iterator beta;
236 for( beta = Qb.begin(); beta != Qb.end(); beta++ )
237 if( beta->vars() >> vs )
238 break;
239 if( beta != Qb.end() ) {
240 if( props.inference == Properties::InfType::SUMPROD )
241 return( beta->marginal(vs) );
242 else
243 return( beta->maxMarginal(vs) );
244 } else {
245 vector<Factor>::const_iterator alpha;
246 for( alpha = Qa.begin(); alpha != Qa.end(); alpha++ )
247 if( alpha->vars() >> vs )
248 break;
249 if( alpha == Qa.end() ) {
250 DAI_THROW(BELIEF_NOT_AVAILABLE);
251 return Factor();
252 } else {
253 if( props.inference == Properties::InfType::SUMPROD )
254 return( alpha->marginal(vs) );
255 else
256 return( alpha->maxMarginal(vs) );
257 }
258 }
259 }
260
261
262 vector<Factor> JTree::beliefs() const {
263 vector<Factor> result;
264 for( size_t beta = 0; beta < nrIRs(); beta++ )
265 result.push_back( Qb[beta] );
266 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
267 result.push_back( Qa[alpha] );
268 return result;
269 }
270
271
272 void JTree::runHUGIN() {
273 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
274 Qa[alpha] = OR(alpha);
275
276 for( size_t beta = 0; beta < nrIRs(); beta++ )
277 Qb[beta].fill( 1.0 );
278
279 // CollectEvidence
280 _logZ = 0.0;
281 for( size_t i = RTree.size(); (i--) != 0; ) {
282 // Make outer region RTree[i].first consistent with outer region RTree[i].second
283 // IR(i) = seperator OR(RTree[i].first) && OR(RTree[i].second)
284 Factor new_Qb;
285 if( props.inference == Properties::InfType::SUMPROD )
286 new_Qb = Qa[RTree[i].second].marginal( IR( i ), false );
287 else
288 new_Qb = Qa[RTree[i].second].maxMarginal( IR( i ), false );
289
290 _logZ += log(new_Qb.normalize());
291 Qa[RTree[i].first] *= new_Qb / Qb[i];
292 Qb[i] = new_Qb;
293 }
294 if( RTree.empty() )
295 _logZ += log(Qa[0].normalize() );
296 else
297 _logZ += log(Qa[RTree[0].first].normalize());
298
299 // DistributeEvidence
300 for( size_t i = 0; i < RTree.size(); i++ ) {
301 // Make outer region RTree[i].second consistent with outer region RTree[i].first
302 // IR(i) = seperator OR(RTree[i].first) && OR(RTree[i].second)
303 Factor new_Qb;
304 if( props.inference == Properties::InfType::SUMPROD )
305 new_Qb = Qa[RTree[i].first].marginal( IR( i ) );
306 else
307 new_Qb = Qa[RTree[i].first].maxMarginal( IR( i ) );
308
309 Qa[RTree[i].second] *= new_Qb / Qb[i];
310 Qb[i] = new_Qb;
311 }
312
313 // Normalize
314 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
315 Qa[alpha].normalize();
316 }
317
318
319 void JTree::runShaferShenoy() {
320 // First pass
321 _logZ = 0.0;
322 for( size_t e = nrIRs(); (e--) != 0; ) {
323 // send a message from RTree[e].second to RTree[e].first
324 // or, actually, from the seperator IR(e) to RTree[e].first
325
326 size_t i = nbIR(e)[1].node; // = RTree[e].second
327 size_t j = nbIR(e)[0].node; // = RTree[e].first
328 size_t _e = nbIR(e)[0].dual;
329
330 Factor msg = OR(i);
331 foreach( const Neighbor &k, nbOR(i) )
332 if( k != e )
333 msg *= message( i, k.iter );
334 if( props.inference == Properties::InfType::SUMPROD )
335 message( j, _e ) = msg.marginal( IR(e), false );
336 else
337 message( j, _e ) = msg.maxMarginal( IR(e), false );
338 _logZ += log( message(j,_e).normalize() );
339 }
340
341 // Second pass
342 for( size_t e = 0; e < nrIRs(); e++ ) {
343 size_t i = nbIR(e)[0].node; // = RTree[e].first
344 size_t j = nbIR(e)[1].node; // = RTree[e].second
345 size_t _e = nbIR(e)[1].dual;
346
347 Factor msg = OR(i);
348 foreach( const Neighbor &k, nbOR(i) )
349 if( k != e )
350 msg *= message( i, k.iter );
351 if( props.inference == Properties::InfType::SUMPROD )
352 message( j, _e ) = msg.marginal( IR(e) );
353 else
354 message( j, _e ) = msg.maxMarginal( IR(e) );
355 }
356
357 // Calculate beliefs
358 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
359 Factor piet = OR(alpha);
360 foreach( const Neighbor &k, nbOR(alpha) )
361 piet *= message( alpha, k.iter );
362 if( nrIRs() == 0 ) {
363 _logZ += log( piet.normalize() );
364 Qa[alpha] = piet;
365 } else if( alpha == nbIR(0)[0].node /*RTree[0].first*/ ) {
366 _logZ += log( piet.normalize() );
367 Qa[alpha] = piet;
368 } else
369 Qa[alpha] = piet.normalized();
370 }
371
372 // Only for logZ (and for belief)...
373 for( size_t beta = 0; beta < nrIRs(); beta++ ) {
374 if( props.inference == Properties::InfType::SUMPROD )
375 Qb[beta] = Qa[nbIR(beta)[0].node].marginal( IR(beta) );
376 else
377 Qb[beta] = Qa[nbIR(beta)[0].node].maxMarginal( IR(beta) );
378 }
379 }
380
381
382 Real JTree::run() {
383 if( props.updates == Properties::UpdateType::HUGIN )
384 runHUGIN();
385 else if( props.updates == Properties::UpdateType::SHSH )
386 runShaferShenoy();
387 return 0.0;
388 }
389
390
391 Real JTree::logZ() const {
392 /* Real s = 0.0;
393 for( size_t beta = 0; beta < nrIRs(); beta++ )
394 s += IR(beta).c() * Qb[beta].entropy();
395 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
396 s += OR(alpha).c() * Qa[alpha].entropy();
397 s += (OR(alpha).log(true) * Qa[alpha]).sum();
398 }
399 DAI_ASSERT( abs( _logZ - s ) < 1e-8 );
400 return s;*/
401 return _logZ;
402 }
403
404
405 size_t JTree::findEfficientTree( const VarSet& vs, RootedTree &Tree, size_t PreviousRoot ) const {
406 // find new root clique (the one with maximal statespace overlap with vs)
407 size_t maxval = 0, maxalpha = 0;
408 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
409 size_t val = VarSet(vs & OR(alpha).vars()).nrStates();
410 if( val > maxval ) {
411 maxval = val;
412 maxalpha = alpha;
413 }
414 }
415
416 // reorder the tree edges such that maxalpha becomes the new root
417 RootedTree newTree( GraphEL( RTree.begin(), RTree.end() ), maxalpha );
418
419 // identify subtree that contains all variables of vs which are not in the new root
420 set<DEdge> subTree;
421 // for each variable in vs
422 for( VarSet::const_iterator n = vs.begin(); n != vs.end(); n++ ) {
423 for( size_t e = 0; e < newTree.size(); e++ ) {
424 if( OR(newTree[e].second).vars().contains( *n ) ) {
425 size_t f = e;
426 subTree.insert( newTree[f] );
427 size_t pos = newTree[f].first;
428 for( ; f > 0; f-- )
429 if( newTree[f-1].second == pos ) {
430 subTree.insert( newTree[f-1] );
431 pos = newTree[f-1].first;
432 }
433 }
434 }
435 }
436 if( PreviousRoot != (size_t)-1 && PreviousRoot != maxalpha) {
437 // find first occurence of PreviousRoot in the tree, which is closest to the new root
438 size_t e = 0;
439 for( ; e != newTree.size(); e++ ) {
440 if( newTree[e].second == PreviousRoot )
441 break;
442 }
443 DAI_ASSERT( e != newTree.size() );
444
445 // track-back path to root and add edges to subTree
446 subTree.insert( newTree[e] );
447 size_t pos = newTree[e].first;
448 for( ; e > 0; e-- )
449 if( newTree[e-1].second == pos ) {
450 subTree.insert( newTree[e-1] );
451 pos = newTree[e-1].first;
452 }
453 }
454
455 // Resulting Tree is a reordered copy of newTree
456 // First add edges in subTree to Tree
457 Tree.clear();
458 vector<DEdge> remTree;
459 for( RootedTree::const_iterator e = newTree.begin(); e != newTree.end(); e++ )
460 if( subTree.count( *e ) )
461 Tree.push_back( *e );
462 else
463 remTree.push_back( *e );
464 size_t subTreeSize = Tree.size();
465 // Then add remaining edges
466 copy( remTree.begin(), remTree.end(), back_inserter( Tree ) );
467
468 return subTreeSize;
469 }
470
471
472 Factor JTree::calcMarginal( const VarSet& vs ) {
473 vector<Factor>::const_iterator beta;
474 for( beta = Qb.begin(); beta != Qb.end(); beta++ )
475 if( beta->vars() >> vs )
476 break;
477 if( beta != Qb.end() ) {
478 if( props.inference == Properties::InfType::SUMPROD )
479 return( beta->marginal(vs) );
480 else
481 return( beta->maxMarginal(vs) );
482 } else {
483 vector<Factor>::const_iterator alpha;
484 for( alpha = Qa.begin(); alpha != Qa.end(); alpha++ )
485 if( alpha->vars() >> vs )
486 break;
487 if( alpha != Qa.end() ) {
488 if( props.inference == Properties::InfType::SUMPROD )
489 return( alpha->marginal(vs) );
490 else
491 return( alpha->maxMarginal(vs) );
492 } else {
493 // Find subtree to do efficient inference
494 RootedTree T;
495 size_t Tsize = findEfficientTree( vs, T );
496
497 // Find remaining variables (which are not in the new root)
498 VarSet vsrem = vs / OR(T.front().first).vars();
499 Factor Pvs (vs, 0.0);
500
501 // Save Qa and Qb on the subtree
502 map<size_t,Factor> Qa_old;
503 map<size_t,Factor> Qb_old;
504 vector<size_t> b(Tsize, 0);
505 for( size_t i = Tsize; (i--) != 0; ) {
506 size_t alpha1 = T[i].first;
507 size_t alpha2 = T[i].second;
508 size_t beta;
509 for( beta = 0; beta < nrIRs(); beta++ )
510 if( UEdge( RTree[beta].first, RTree[beta].second ) == UEdge( alpha1, alpha2 ) )
511 break;
512 DAI_ASSERT( beta != nrIRs() );
513 b[i] = beta;
514
515 if( !Qa_old.count( alpha1 ) )
516 Qa_old[alpha1] = Qa[alpha1];
517 if( !Qa_old.count( alpha2 ) )
518 Qa_old[alpha2] = Qa[alpha2];
519 if( !Qb_old.count( beta ) )
520 Qb_old[beta] = Qb[beta];
521 }
522
523 // For all states of vsrem
524 for( State s(vsrem); s.valid(); s++ ) {
525 // CollectEvidence
526 Real logZ = 0.0;
527 for( size_t i = Tsize; (i--) != 0; ) {
528 // Make outer region T[i].first consistent with outer region T[i].second
529 // IR(i) = seperator OR(T[i].first) && OR(T[i].second)
530
531 for( VarSet::const_iterator n = vsrem.begin(); n != vsrem.end(); n++ )
532 if( Qa[T[i].second].vars() >> *n ) {
533 Factor piet( *n, 0.0 );
534 piet.set( s(*n), 1.0 );
535 Qa[T[i].second] *= piet;
536 }
537
538 Factor new_Qb;
539 if( props.inference == Properties::InfType::SUMPROD )
540 new_Qb = Qa[T[i].second].marginal( IR( b[i] ), false );
541 else
542 new_Qb = Qa[T[i].second].maxMarginal( IR( b[i] ), false );
543 logZ += log(new_Qb.normalize());
544 Qa[T[i].first] *= new_Qb / Qb[b[i]];
545 Qb[b[i]] = new_Qb;
546 }
547 logZ += log(Qa[T[0].first].normalize());
548
549 Factor piet( vsrem, 0.0 );
550 piet.set( s, exp(logZ) );
551 if( props.inference == Properties::InfType::SUMPROD )
552 Pvs += piet * Qa[T[0].first].marginal( vs / vsrem, false ); // OPTIMIZE ME
553 else
554 Pvs += piet * Qa[T[0].first].maxMarginal( vs / vsrem, false ); // OPTIMIZE ME
555
556 // Restore clamped beliefs
557 for( map<size_t,Factor>::const_iterator alpha = Qa_old.begin(); alpha != Qa_old.end(); alpha++ )
558 Qa[alpha->first] = alpha->second;
559 for( map<size_t,Factor>::const_iterator beta = Qb_old.begin(); beta != Qb_old.end(); beta++ )
560 Qb[beta->first] = beta->second;
561 }
562
563 return( Pvs.normalized() );
564 }
565 }
566 }
567
568
569 std::pair<size_t,double> boundTreewidth( const FactorGraph &fg, greedyVariableElimination::eliminationCostFunction fn, size_t maxStates ) {
570 // Create cluster graph from factor graph
571 ClusterGraph _cg( fg, true );
572
573 // Obtain elimination sequence
574 vector<VarSet> ElimVec = _cg.VarElim( greedyVariableElimination( fn ), maxStates ).eraseNonMaximal().clusters();
575
576 // Calculate treewidth
577 size_t treewidth = 0;
578 double nrstates = 0.0;
579 for( size_t i = 0; i < ElimVec.size(); i++ ) {
580 if( ElimVec[i].size() > treewidth )
581 treewidth = ElimVec[i].size();
582 size_t s = ElimVec[i].nrStates();
583 if( s > nrstates )
584 nrstates = s;
585 }
586
587 return make_pair(treewidth, nrstates);
588 }
589
590
591 std::vector<size_t> JTree::findMaximum() const {
592 #ifdef DAI_DEBUG
593 // check consistency of variables and factors
594 for( size_t I = 0; I < nrFactors(); I++ ) {
595 size_t linearState = beliefF(I).p().argmax().first;
596 map<Var,size_t> state = calcState( factor(I).vars(), linearState );
597 foreach( const Neighbor& i, nbF(I) ) {
598 if( state[var(i)] != beliefV(i).p().argmax().first ) {
599 cerr << "ERROR: inconsistency between MAP beliefs: factor belief " << beliefF(I) << ": " << state << " is inconsistent with " << beliefV(i) << endl;
600 DAI_THROW(RUNTIME_ERROR);
601 }
602 }
603 }
604 // check consistency of
605 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
606 size_t linearStateQa = Qa[alpha].p().argmax().first;
607 map<Var,size_t> stateQa = calcState( Qa[alpha].vars(), linearStateQa );
608 foreach( const Neighbor& beta, nbOR(alpha) ) {
609 size_t linearStateQb = Qb[beta].p().argmax().first;
610 map<Var,size_t> stateQb = calcState( Qb[beta].vars(), linearStateQb );
611 for( map<Var,size_t>::const_iterator it = stateQb.begin(); it != stateQb.end(); it++ )
612 if( stateQa[it->first] != it->second ) {
613 cerr << "ERROR: inconsistency between MAP beliefs: OR belief " << Qa[alpha] << " (with MAP " << stateQa << ") is inconsistent with IR belief " << Qb[beta] << " (with MAP " << stateQb << ")" << endl;
614 DAI_THROW(RUNTIME_ERROR);
615 }
616 }
617 }
618 #endif
619
620 vector<size_t> maximum;
621 maximum.reserve( nrVars() );
622 for( size_t i = 0; i < nrVars(); i++ )
623 maximum.push_back( beliefV(i).p().argmax().first );
624 return maximum;
625 }
626
627
628 } // end of namespace dai