Improved ClusterGraph and JTree (added 'maxmem' property)
[libdai.git] / src / jtree.cpp
1 /* This file is part of libDAI - http://www.libdai.org/
2 *
3 * libDAI is licensed under the terms of the GNU General Public License version
4 * 2, or (at your option) any later version. libDAI is distributed without any
5 * warranty. See the file COPYING for more details.
6 *
7 * Copyright (C) 2006-2010 Joris Mooij [joris dot mooij at libdai dot org]
8 * Copyright (C) 2006-2007 Radboud University Nijmegen, The Netherlands
9 */
10
11
12 #include <iostream>
13 #include <stack>
14 #include <dai/jtree.h>
15
16
17 namespace dai {
18
19
20 using namespace std;
21
22
23 const char *JTree::Name = "JTREE";
24
25
26 void JTree::setProperties( const PropertySet &opts ) {
27 DAI_ASSERT( opts.hasKey("updates") );
28
29 props.updates = opts.getStringAs<Properties::UpdateType>("updates");
30 if( opts.hasKey("verbose") )
31 props.verbose = opts.getStringAs<size_t>("verbose");
32 else
33 props.verbose = 0;
34 if( opts.hasKey("inference") )
35 props.inference = opts.getStringAs<Properties::InfType>("inference");
36 else
37 props.inference = Properties::InfType::SUMPROD;
38 if( opts.hasKey("heuristic") )
39 props.heuristic = opts.getStringAs<Properties::HeuristicType>("heuristic");
40 else
41 props.heuristic = Properties::HeuristicType::MINFILL;
42 if( opts.hasKey("maxmem") )
43 props.maxmem = opts.getStringAs<size_t>("maxmem");
44 else
45 props.maxmem = 0;
46 }
47
48
49 PropertySet JTree::getProperties() const {
50 PropertySet opts;
51 opts.set( "verbose", props.verbose );
52 opts.set( "updates", props.updates );
53 opts.set( "inference", props.inference );
54 opts.set( "heuristic", props.heuristic );
55 opts.set( "maxmem", props.maxmem );
56 return opts;
57 }
58
59
60 string JTree::printProperties() const {
61 stringstream s( stringstream::out );
62 s << "[";
63 s << "verbose=" << props.verbose << ",";
64 s << "updates=" << props.updates << ",";
65 s << "heuristic=" << props.heuristic << ",";
66 s << "inference=" << props.inference << ",";
67 s << "maxmem=" << props.maxmem << "]";
68 return s.str();
69 }
70
71
72 JTree::JTree( const FactorGraph &fg, const PropertySet &opts, bool automatic ) : DAIAlgRG(), _mes(), _logZ(), RTree(), Qa(), Qb(), props() {
73 setProperties( opts );
74
75 if( automatic ) {
76 // Create ClusterGraph which contains maximal factors as clusters
77 ClusterGraph _cg( fg, true );
78 if( props.verbose >= 3 )
79 cerr << "Initial clusters: " << _cg << endl;
80
81 // Use heuristic to guess optimal elimination sequence
82 greedyVariableElimination::eliminationCostFunction ec(NULL);
83 switch( (size_t)props.heuristic ) {
84 case Properties::HeuristicType::MINNEIGHBORS:
85 ec = eliminationCost_MinNeighbors;
86 break;
87 case Properties::HeuristicType::MINWEIGHT:
88 ec = eliminationCost_MinWeight;
89 break;
90 case Properties::HeuristicType::MINFILL:
91 ec = eliminationCost_MinFill;
92 break;
93 case Properties::HeuristicType::WEIGHTEDMINFILL:
94 ec = eliminationCost_WeightedMinFill;
95 break;
96 default:
97 DAI_THROW(UNKNOWN_ENUM_VALUE);
98 }
99 size_t fudge = 6; // this yields a rough estimate of the memory needed (for some reason not yet clearly understood)
100 vector<VarSet> ElimVec = _cg.VarElim( greedyVariableElimination( ec ), props.maxmem / (sizeof(Real) * fudge) ).eraseNonMaximal().clusters();
101 if( props.verbose >= 3 )
102 cerr << "VarElim result: " << ElimVec << endl;
103
104 // Estimate memory needed (rough upper bound)
105 long double memneeded = 0;
106 foreach( const VarSet& cl, ElimVec )
107 memneeded += cl.nrStates();
108 memneeded *= sizeof(Real) * fudge;
109 if( props.verbose >= 1 ) {
110 cerr << "Estimate of needed memory: " << memneeded / 1024 << "kB" << endl;
111 cerr << "Maximum memory: " << props.maxmem / 1024 << "kB" << endl;
112 }
113 if( props.maxmem && memneeded > props.maxmem )
114 DAI_THROW(OUT_OF_MEMORY);
115
116 // Generate the junction tree corresponding to the elimination sequence
117 GenerateJT( fg, ElimVec );
118 }
119 }
120
121
122 void JTree::construct( const FactorGraph &fg, const std::vector<VarSet> &cl, bool verify ) {
123 // Copy the factor graph
124 FactorGraph::operator=( fg );
125
126 // Construct a weighted graph (each edge is weighted with the cardinality
127 // of the intersection of the nodes, where the nodes are the elements of cl).
128 WeightedGraph<int> JuncGraph;
129 // Start by connecting all clusters with cluster zero, and weight zero,
130 // in order to get a connected weighted graph
131 for( size_t i = 1; i < cl.size(); i++ )
132 JuncGraph[UEdge(i,0)] = 0;
133 for( size_t i = 0; i < cl.size(); i++ ) {
134 for( size_t j = i + 1; j < cl.size(); j++ ) {
135 size_t w = (cl[i] & cl[j]).size();
136 if( w )
137 JuncGraph[UEdge(i,j)] = w;
138 }
139 }
140 if( props.verbose >= 3 )
141 cerr << "Weightedgraph: " << JuncGraph << endl;
142
143 // Construct maximal spanning tree using Prim's algorithm
144 RTree = MaxSpanningTree( JuncGraph, true );
145 if( props.verbose >= 3 )
146 cerr << "Spanning tree: " << RTree << endl;
147 DAI_DEBASSERT( RTree.size() == cl.size() - 1 );
148
149 // Construct corresponding region graph
150
151 // Create outer regions
152 _ORs.clear();
153 _ORs.reserve( cl.size() );
154 for( size_t i = 0; i < cl.size(); i++ )
155 _ORs.push_back( FRegion( Factor(cl[i], 1.0), 1.0 ) );
156
157 // For each factor, find an outer region that subsumes that factor.
158 // Then, multiply the outer region with that factor.
159 _fac2OR.clear();
160 _fac2OR.resize( nrFactors(), -1U );
161 for( size_t I = 0; I < nrFactors(); I++ ) {
162 size_t alpha;
163 for( alpha = 0; alpha < nrORs(); alpha++ )
164 if( OR(alpha).vars() >> factor(I).vars() ) {
165 _fac2OR[I] = alpha;
166 break;
167 }
168 if( verify )
169 DAI_ASSERT( alpha != nrORs() );
170 }
171 recomputeORs();
172
173 // Create inner regions and edges
174 _IRs.clear();
175 _IRs.reserve( RTree.size() );
176 vector<Edge> edges;
177 edges.reserve( 2 * RTree.size() );
178 for( size_t i = 0; i < RTree.size(); i++ ) {
179 edges.push_back( Edge( RTree[i].first, nrIRs() ) );
180 edges.push_back( Edge( RTree[i].second, nrIRs() ) );
181 // inner clusters have counting number -1, except if they are empty
182 VarSet intersection = cl[RTree[i].first] & cl[RTree[i].second];
183 _IRs.push_back( Region( intersection, intersection.size() ? -1.0 : 0.0 ) );
184 }
185
186 // create bipartite graph
187 _G.construct( nrORs(), nrIRs(), edges.begin(), edges.end() );
188
189 // Check counting numbers
190 #ifdef DAI_DEBUG
191 checkCountingNumbers();
192 #endif
193
194 // Create beliefs
195 Qa.clear();
196 Qa.reserve( nrORs() );
197 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
198 Qa.push_back( OR(alpha) );
199
200 Qb.clear();
201 Qb.reserve( nrIRs() );
202 for( size_t beta = 0; beta < nrIRs(); beta++ )
203 Qb.push_back( Factor( IR(beta), 1.0 ) );
204 }
205
206
207 void JTree::GenerateJT( const FactorGraph &fg, const std::vector<VarSet> &cl ) {
208 construct( fg, cl, true );
209
210 // Create messages
211 _mes.clear();
212 _mes.reserve( nrORs() );
213 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
214 _mes.push_back( vector<Factor>() );
215 _mes[alpha].reserve( nbOR(alpha).size() );
216 foreach( const Neighbor &beta, nbOR(alpha) )
217 _mes[alpha].push_back( Factor( IR(beta), 1.0 ) );
218 }
219
220 if( props.verbose >= 3 )
221 cerr << "Regiongraph generated by JTree::GenerateJT: " << *this << endl;
222 }
223
224
225 string JTree::identify() const {
226 return string(Name) + printProperties();
227 }
228
229
230 Factor JTree::belief( const VarSet &vs ) const {
231 vector<Factor>::const_iterator beta;
232 for( beta = Qb.begin(); beta != Qb.end(); beta++ )
233 if( beta->vars() >> vs )
234 break;
235 if( beta != Qb.end() ) {
236 if( props.inference == Properties::InfType::SUMPROD )
237 return( beta->marginal(vs) );
238 else
239 return( beta->maxMarginal(vs) );
240 } else {
241 vector<Factor>::const_iterator alpha;
242 for( alpha = Qa.begin(); alpha != Qa.end(); alpha++ )
243 if( alpha->vars() >> vs )
244 break;
245 if( alpha == Qa.end() ) {
246 DAI_THROW(BELIEF_NOT_AVAILABLE);
247 return Factor();
248 } else {
249 if( props.inference == Properties::InfType::SUMPROD )
250 return( alpha->marginal(vs) );
251 else
252 return( alpha->maxMarginal(vs) );
253 }
254 }
255 }
256
257
258 vector<Factor> JTree::beliefs() const {
259 vector<Factor> result;
260 for( size_t beta = 0; beta < nrIRs(); beta++ )
261 result.push_back( Qb[beta] );
262 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
263 result.push_back( Qa[alpha] );
264 return result;
265 }
266
267
268 void JTree::runHUGIN() {
269 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
270 Qa[alpha] = OR(alpha);
271
272 for( size_t beta = 0; beta < nrIRs(); beta++ )
273 Qb[beta].fill( 1.0 );
274
275 // CollectEvidence
276 _logZ = 0.0;
277 for( size_t i = RTree.size(); (i--) != 0; ) {
278 // Make outer region RTree[i].first consistent with outer region RTree[i].second
279 // IR(i) = seperator OR(RTree[i].first) && OR(RTree[i].second)
280 Factor new_Qb;
281 if( props.inference == Properties::InfType::SUMPROD )
282 new_Qb = Qa[RTree[i].second].marginal( IR( i ), false );
283 else
284 new_Qb = Qa[RTree[i].second].maxMarginal( IR( i ), false );
285
286 _logZ += log(new_Qb.normalize());
287 Qa[RTree[i].first] *= new_Qb / Qb[i];
288 Qb[i] = new_Qb;
289 }
290 if( RTree.empty() )
291 _logZ += log(Qa[0].normalize() );
292 else
293 _logZ += log(Qa[RTree[0].first].normalize());
294
295 // DistributeEvidence
296 for( size_t i = 0; i < RTree.size(); i++ ) {
297 // Make outer region RTree[i].second consistent with outer region RTree[i].first
298 // IR(i) = seperator OR(RTree[i].first) && OR(RTree[i].second)
299 Factor new_Qb;
300 if( props.inference == Properties::InfType::SUMPROD )
301 new_Qb = Qa[RTree[i].first].marginal( IR( i ) );
302 else
303 new_Qb = Qa[RTree[i].first].maxMarginal( IR( i ) );
304
305 Qa[RTree[i].second] *= new_Qb / Qb[i];
306 Qb[i] = new_Qb;
307 }
308
309 // Normalize
310 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
311 Qa[alpha].normalize();
312 }
313
314
315 void JTree::runShaferShenoy() {
316 // First pass
317 _logZ = 0.0;
318 for( size_t e = nrIRs(); (e--) != 0; ) {
319 // send a message from RTree[e].second to RTree[e].first
320 // or, actually, from the seperator IR(e) to RTree[e].first
321
322 size_t i = nbIR(e)[1].node; // = RTree[e].second
323 size_t j = nbIR(e)[0].node; // = RTree[e].first
324 size_t _e = nbIR(e)[0].dual;
325
326 Factor msg = OR(i);
327 foreach( const Neighbor &k, nbOR(i) )
328 if( k != e )
329 msg *= message( i, k.iter );
330 if( props.inference == Properties::InfType::SUMPROD )
331 message( j, _e ) = msg.marginal( IR(e), false );
332 else
333 message( j, _e ) = msg.maxMarginal( IR(e), false );
334 _logZ += log( message(j,_e).normalize() );
335 }
336
337 // Second pass
338 for( size_t e = 0; e < nrIRs(); e++ ) {
339 size_t i = nbIR(e)[0].node; // = RTree[e].first
340 size_t j = nbIR(e)[1].node; // = RTree[e].second
341 size_t _e = nbIR(e)[1].dual;
342
343 Factor msg = OR(i);
344 foreach( const Neighbor &k, nbOR(i) )
345 if( k != e )
346 msg *= message( i, k.iter );
347 if( props.inference == Properties::InfType::SUMPROD )
348 message( j, _e ) = msg.marginal( IR(e) );
349 else
350 message( j, _e ) = msg.maxMarginal( IR(e) );
351 }
352
353 // Calculate beliefs
354 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
355 Factor piet = OR(alpha);
356 foreach( const Neighbor &k, nbOR(alpha) )
357 piet *= message( alpha, k.iter );
358 if( nrIRs() == 0 ) {
359 _logZ += log( piet.normalize() );
360 Qa[alpha] = piet;
361 } else if( alpha == nbIR(0)[0].node /*RTree[0].first*/ ) {
362 _logZ += log( piet.normalize() );
363 Qa[alpha] = piet;
364 } else
365 Qa[alpha] = piet.normalized();
366 }
367
368 // Only for logZ (and for belief)...
369 for( size_t beta = 0; beta < nrIRs(); beta++ ) {
370 if( props.inference == Properties::InfType::SUMPROD )
371 Qb[beta] = Qa[nbIR(beta)[0].node].marginal( IR(beta) );
372 else
373 Qb[beta] = Qa[nbIR(beta)[0].node].maxMarginal( IR(beta) );
374 }
375 }
376
377
378 Real JTree::run() {
379 if( props.updates == Properties::UpdateType::HUGIN )
380 runHUGIN();
381 else if( props.updates == Properties::UpdateType::SHSH )
382 runShaferShenoy();
383 return 0.0;
384 }
385
386
387 Real JTree::logZ() const {
388 /* Real s = 0.0;
389 for( size_t beta = 0; beta < nrIRs(); beta++ )
390 s += IR(beta).c() * Qb[beta].entropy();
391 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
392 s += OR(alpha).c() * Qa[alpha].entropy();
393 s += (OR(alpha).log(true) * Qa[alpha]).sum();
394 }
395 DAI_ASSERT( abs( _logZ - s ) < 1e-8 );
396 return s;*/
397 return _logZ;
398 }
399
400
401 size_t JTree::findEfficientTree( const VarSet& vs, RootedTree &Tree, size_t PreviousRoot ) const {
402 // find new root clique (the one with maximal statespace overlap with vs)
403 size_t maxval = 0, maxalpha = 0;
404 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
405 size_t val = VarSet(vs & OR(alpha).vars()).nrStates();
406 if( val > maxval ) {
407 maxval = val;
408 maxalpha = alpha;
409 }
410 }
411
412 // reorder the tree edges such that maxalpha becomes the new root
413 RootedTree newTree( GraphEL( RTree.begin(), RTree.end() ), maxalpha );
414
415 // identify subtree that contains all variables of vs which are not in the new root
416 set<DEdge> subTree;
417 // for each variable in vs
418 for( VarSet::const_iterator n = vs.begin(); n != vs.end(); n++ ) {
419 for( size_t e = 0; e < newTree.size(); e++ ) {
420 if( OR(newTree[e].second).vars().contains( *n ) ) {
421 size_t f = e;
422 subTree.insert( newTree[f] );
423 size_t pos = newTree[f].first;
424 for( ; f > 0; f-- )
425 if( newTree[f-1].second == pos ) {
426 subTree.insert( newTree[f-1] );
427 pos = newTree[f-1].first;
428 }
429 }
430 }
431 }
432 if( PreviousRoot != (size_t)-1 && PreviousRoot != maxalpha) {
433 // find first occurence of PreviousRoot in the tree, which is closest to the new root
434 size_t e = 0;
435 for( ; e != newTree.size(); e++ ) {
436 if( newTree[e].second == PreviousRoot )
437 break;
438 }
439 DAI_ASSERT( e != newTree.size() );
440
441 // track-back path to root and add edges to subTree
442 subTree.insert( newTree[e] );
443 size_t pos = newTree[e].first;
444 for( ; e > 0; e-- )
445 if( newTree[e-1].second == pos ) {
446 subTree.insert( newTree[e-1] );
447 pos = newTree[e-1].first;
448 }
449 }
450
451 // Resulting Tree is a reordered copy of newTree
452 // First add edges in subTree to Tree
453 Tree.clear();
454 vector<DEdge> remTree;
455 for( RootedTree::const_iterator e = newTree.begin(); e != newTree.end(); e++ )
456 if( subTree.count( *e ) )
457 Tree.push_back( *e );
458 else
459 remTree.push_back( *e );
460 size_t subTreeSize = Tree.size();
461 // Then add remaining edges
462 copy( remTree.begin(), remTree.end(), back_inserter( Tree ) );
463
464 return subTreeSize;
465 }
466
467
468 Factor JTree::calcMarginal( const VarSet& vs ) {
469 vector<Factor>::const_iterator beta;
470 for( beta = Qb.begin(); beta != Qb.end(); beta++ )
471 if( beta->vars() >> vs )
472 break;
473 if( beta != Qb.end() ) {
474 if( props.inference == Properties::InfType::SUMPROD )
475 return( beta->marginal(vs) );
476 else
477 return( beta->maxMarginal(vs) );
478 } else {
479 vector<Factor>::const_iterator alpha;
480 for( alpha = Qa.begin(); alpha != Qa.end(); alpha++ )
481 if( alpha->vars() >> vs )
482 break;
483 if( alpha != Qa.end() ) {
484 if( props.inference == Properties::InfType::SUMPROD )
485 return( alpha->marginal(vs) );
486 else
487 return( alpha->maxMarginal(vs) );
488 } else {
489 // Find subtree to do efficient inference
490 RootedTree T;
491 size_t Tsize = findEfficientTree( vs, T );
492
493 // Find remaining variables (which are not in the new root)
494 VarSet vsrem = vs / OR(T.front().first).vars();
495 Factor Pvs (vs, 0.0);
496
497 // Save Qa and Qb on the subtree
498 map<size_t,Factor> Qa_old;
499 map<size_t,Factor> Qb_old;
500 vector<size_t> b(Tsize, 0);
501 for( size_t i = Tsize; (i--) != 0; ) {
502 size_t alpha1 = T[i].first;
503 size_t alpha2 = T[i].second;
504 size_t beta;
505 for( beta = 0; beta < nrIRs(); beta++ )
506 if( UEdge( RTree[beta].first, RTree[beta].second ) == UEdge( alpha1, alpha2 ) )
507 break;
508 DAI_ASSERT( beta != nrIRs() );
509 b[i] = beta;
510
511 if( !Qa_old.count( alpha1 ) )
512 Qa_old[alpha1] = Qa[alpha1];
513 if( !Qa_old.count( alpha2 ) )
514 Qa_old[alpha2] = Qa[alpha2];
515 if( !Qb_old.count( beta ) )
516 Qb_old[beta] = Qb[beta];
517 }
518
519 // For all states of vsrem
520 for( State s(vsrem); s.valid(); s++ ) {
521 // CollectEvidence
522 Real logZ = 0.0;
523 for( size_t i = Tsize; (i--) != 0; ) {
524 // Make outer region T[i].first consistent with outer region T[i].second
525 // IR(i) = seperator OR(T[i].first) && OR(T[i].second)
526
527 for( VarSet::const_iterator n = vsrem.begin(); n != vsrem.end(); n++ )
528 if( Qa[T[i].second].vars() >> *n ) {
529 Factor piet( *n, 0.0 );
530 piet.set( s(*n), 1.0 );
531 Qa[T[i].second] *= piet;
532 }
533
534 Factor new_Qb;
535 if( props.inference == Properties::InfType::SUMPROD )
536 new_Qb = Qa[T[i].second].marginal( IR( b[i] ), false );
537 else
538 new_Qb = Qa[T[i].second].maxMarginal( IR( b[i] ), false );
539 logZ += log(new_Qb.normalize());
540 Qa[T[i].first] *= new_Qb / Qb[b[i]];
541 Qb[b[i]] = new_Qb;
542 }
543 logZ += log(Qa[T[0].first].normalize());
544
545 Factor piet( vsrem, 0.0 );
546 piet.set( s, exp(logZ) );
547 if( props.inference == Properties::InfType::SUMPROD )
548 Pvs += piet * Qa[T[0].first].marginal( vs / vsrem, false ); // OPTIMIZE ME
549 else
550 Pvs += piet * Qa[T[0].first].maxMarginal( vs / vsrem, false ); // OPTIMIZE ME
551
552 // Restore clamped beliefs
553 for( map<size_t,Factor>::const_iterator alpha = Qa_old.begin(); alpha != Qa_old.end(); alpha++ )
554 Qa[alpha->first] = alpha->second;
555 for( map<size_t,Factor>::const_iterator beta = Qb_old.begin(); beta != Qb_old.end(); beta++ )
556 Qb[beta->first] = beta->second;
557 }
558
559 return( Pvs.normalized() );
560 }
561 }
562 }
563
564
565 std::pair<size_t,double> boundTreewidth( const FactorGraph &fg, greedyVariableElimination::eliminationCostFunction fn, size_t maxStates ) {
566 // Create cluster graph from factor graph
567 ClusterGraph _cg( fg, true );
568
569 // Obtain elimination sequence
570 vector<VarSet> ElimVec = _cg.VarElim( greedyVariableElimination( fn ), maxStates ).eraseNonMaximal().clusters();
571
572 // Calculate treewidth
573 size_t treewidth = 0;
574 double nrstates = 0.0;
575 for( size_t i = 0; i < ElimVec.size(); i++ ) {
576 if( ElimVec[i].size() > treewidth )
577 treewidth = ElimVec[i].size();
578 size_t s = ElimVec[i].nrStates();
579 if( s > nrstates )
580 nrstates = s;
581 }
582
583 return make_pair(treewidth, nrstates);
584 }
585
586
587 std::vector<size_t> JTree::findMaximum() const {
588 #ifdef DAI_DEBUG
589 // check consistency of variables and factors
590 for( size_t I = 0; I < nrFactors(); I++ ) {
591 size_t linearState = beliefF(I).p().argmax().first;
592 map<Var,size_t> state = calcState( factor(I).vars(), linearState );
593 foreach( const Neighbor& i, nbF(I) ) {
594 if( state[var(i)] != beliefV(i).p().argmax().first ) {
595 cerr << "ERROR: inconsistency between MAP beliefs: factor belief " << beliefF(I) << ": " << state << " is inconsistent with " << beliefV(i) << endl;
596 DAI_THROW(RUNTIME_ERROR);
597 }
598 }
599 }
600 // check consistency of
601 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
602 size_t linearStateQa = Qa[alpha].p().argmax().first;
603 map<Var,size_t> stateQa = calcState( Qa[alpha].vars(), linearStateQa );
604 foreach( const Neighbor& beta, nbOR(alpha) ) {
605 size_t linearStateQb = Qb[beta].p().argmax().first;
606 map<Var,size_t> stateQb = calcState( Qb[beta].vars(), linearStateQb );
607 for( map<Var,size_t>::const_iterator it = stateQb.begin(); it != stateQb.end(); it++ )
608 if( stateQa[it->first] != it->second ) {
609 cerr << "ERROR: inconsistency between MAP beliefs: OR belief " << Qa[alpha] << " (with MAP " << stateQa << ") is inconsistent with IR belief " << Qb[beta] << " (with MAP " << stateQb << ")" << endl;
610 DAI_THROW(RUNTIME_ERROR);
611 }
612 }
613 }
614 #endif
615
616 vector<size_t> maximum;
617 maximum.reserve( nrVars() );
618 for( size_t i = 0; i < nrVars(); i++ )
619 maximum.push_back( beliefV(i).p().argmax().first );
620 return maximum;
621 }
622
623
624 } // end of namespace dai