Added example_imagesegmentation, BipartiteGraph::nb1Set() and nb2Set()
[libdai.git] / src / jtree.cpp
1 /* This file is part of libDAI - http://www.libdai.org/
2 *
3 * libDAI is licensed under the terms of the GNU General Public License version
4 * 2, or (at your option) any later version. libDAI is distributed without any
5 * warranty. See the file COPYING for more details.
6 *
7 * Copyright (C) 2006-2010 Joris Mooij [joris dot mooij at libdai dot org]
8 * Copyright (C) 2006-2007 Radboud University Nijmegen, The Netherlands
9 */
10
11
12 #include <iostream>
13 #include <stack>
14 #include <dai/jtree.h>
15
16
17 namespace dai {
18
19
20 using namespace std;
21
22
23 const char *JTree::Name = "JTREE";
24
25
26 void JTree::setProperties( const PropertySet &opts ) {
27 DAI_ASSERT( opts.hasKey("updates") );
28
29 props.updates = opts.getStringAs<Properties::UpdateType>("updates");
30 if( opts.hasKey("verbose") )
31 props.verbose = opts.getStringAs<size_t>("verbose");
32 else
33 props.verbose = 0;
34 if( opts.hasKey("inference") )
35 props.inference = opts.getStringAs<Properties::InfType>("inference");
36 else
37 props.inference = Properties::InfType::SUMPROD;
38 if( opts.hasKey("heuristic") )
39 props.heuristic = opts.getStringAs<Properties::HeuristicType>("heuristic");
40 else
41 props.heuristic = Properties::HeuristicType::MINFILL;
42 }
43
44
45 PropertySet JTree::getProperties() const {
46 PropertySet opts;
47 opts.set( "verbose", props.verbose );
48 opts.set( "updates", props.updates );
49 opts.set( "inference", props.inference );
50 opts.set( "heuristic", props.heuristic );
51 return opts;
52 }
53
54
55 string JTree::printProperties() const {
56 stringstream s( stringstream::out );
57 s << "[";
58 s << "verbose=" << props.verbose << ",";
59 s << "updates=" << props.updates << ",";
60 s << "heuristic=" << props.heuristic << ",";
61 s << "inference=" << props.inference << "]";
62 return s.str();
63 }
64
65
66 JTree::JTree( const FactorGraph &fg, const PropertySet &opts, bool automatic ) : DAIAlgRG(), _mes(), _logZ(), RTree(), Qa(), Qb(), props() {
67 setProperties( opts );
68
69 if( automatic ) {
70 // Create ClusterGraph which contains factors as clusters
71 vector<VarSet> cl;
72 cl.reserve( fg.nrFactors() );
73 for( size_t I = 0; I < fg.nrFactors(); I++ )
74 cl.push_back( fg.factor(I).vars() );
75 ClusterGraph _cg( cl );
76
77 if( props.verbose >= 3 )
78 cerr << "Initial clusters: " << _cg << endl;
79
80 // Retain only maximal clusters
81 _cg.eraseNonMaximal();
82 if( props.verbose >= 3 )
83 cerr << "Maximal clusters: " << _cg << endl;
84
85 // Use heuristic to guess optimal elimination sequence
86 greedyVariableElimination::eliminationCostFunction ec(NULL);
87 switch( (size_t)props.heuristic ) {
88 case Properties::HeuristicType::MINNEIGHBORS:
89 ec = eliminationCost_MinNeighbors;
90 break;
91 case Properties::HeuristicType::MINWEIGHT:
92 ec = eliminationCost_MinWeight;
93 break;
94 case Properties::HeuristicType::MINFILL:
95 ec = eliminationCost_MinFill;
96 break;
97 case Properties::HeuristicType::WEIGHTEDMINFILL:
98 ec = eliminationCost_WeightedMinFill;
99 break;
100 default:
101 DAI_THROW(UNKNOWN_ENUM_VALUE);
102 }
103 vector<VarSet> ElimVec = _cg.VarElim( greedyVariableElimination( ec ) ).eraseNonMaximal().clusters();
104 if( props.verbose >= 3 )
105 cerr << "VarElim result: " << ElimVec << endl;
106
107 // Generate the junction tree corresponding to the elimination sequence
108 GenerateJT( fg, ElimVec );
109 }
110 }
111
112
113 void JTree::construct( const FactorGraph &fg, const std::vector<VarSet> &cl, bool verify ) {
114 // Copy the factor graph
115 FactorGraph::operator=( fg );
116
117 // Construct a weighted graph (each edge is weighted with the cardinality
118 // of the intersection of the nodes, where the nodes are the elements of cl).
119 WeightedGraph<int> JuncGraph;
120 // Start by connecting all clusters with cluster zero, and weight zero,
121 // in order to get a connected weighted graph
122 for( size_t i = 1; i < cl.size(); i++ )
123 JuncGraph[UEdge(i,0)] = 0;
124 for( size_t i = 0; i < cl.size(); i++ ) {
125 for( size_t j = i + 1; j < cl.size(); j++ ) {
126 size_t w = (cl[i] & cl[j]).size();
127 if( w )
128 JuncGraph[UEdge(i,j)] = w;
129 }
130 }
131 if( props.verbose >= 3 )
132 cerr << "Weightedgraph: " << JuncGraph << endl;
133
134 // Construct maximal spanning tree using Prim's algorithm
135 RTree = MaxSpanningTree( JuncGraph, true );
136 if( props.verbose >= 3 )
137 cerr << "Spanning tree: " << RTree << endl;
138 DAI_DEBASSERT( RTree.size() == cl.size() - 1 );
139
140 // Construct corresponding region graph
141
142 // Create outer regions
143 _ORs.clear();
144 _ORs.reserve( cl.size() );
145 for( size_t i = 0; i < cl.size(); i++ )
146 _ORs.push_back( FRegion( Factor(cl[i], 1.0), 1.0 ) );
147
148 // For each factor, find an outer region that subsumes that factor.
149 // Then, multiply the outer region with that factor.
150 _fac2OR.clear();
151 _fac2OR.resize( nrFactors(), -1U );
152 for( size_t I = 0; I < nrFactors(); I++ ) {
153 size_t alpha;
154 for( alpha = 0; alpha < nrORs(); alpha++ )
155 if( OR(alpha).vars() >> factor(I).vars() ) {
156 _fac2OR[I] = alpha;
157 break;
158 }
159 if( verify )
160 DAI_ASSERT( alpha != nrORs() );
161 }
162 recomputeORs();
163
164 // Create inner regions and edges
165 _IRs.clear();
166 _IRs.reserve( RTree.size() );
167 vector<Edge> edges;
168 edges.reserve( 2 * RTree.size() );
169 for( size_t i = 0; i < RTree.size(); i++ ) {
170 edges.push_back( Edge( RTree[i].first, nrIRs() ) );
171 edges.push_back( Edge( RTree[i].second, nrIRs() ) );
172 // inner clusters have counting number -1, except if they are empty
173 VarSet intersection = cl[RTree[i].first] & cl[RTree[i].second];
174 _IRs.push_back( Region( intersection, intersection.size() ? -1.0 : 0.0 ) );
175 }
176
177 // create bipartite graph
178 _G.construct( nrORs(), nrIRs(), edges.begin(), edges.end() );
179
180 // Check counting numbers
181 #ifdef DAI_DEBUG
182 checkCountingNumbers();
183 #endif
184
185 // Create beliefs
186 Qa.clear();
187 Qa.reserve( nrORs() );
188 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
189 Qa.push_back( OR(alpha) );
190
191 Qb.clear();
192 Qb.reserve( nrIRs() );
193 for( size_t beta = 0; beta < nrIRs(); beta++ )
194 Qb.push_back( Factor( IR(beta), 1.0 ) );
195 }
196
197
198 void JTree::GenerateJT( const FactorGraph &fg, const std::vector<VarSet> &cl ) {
199 construct( fg, cl, true );
200
201 // Create messages
202 _mes.clear();
203 _mes.reserve( nrORs() );
204 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
205 _mes.push_back( vector<Factor>() );
206 _mes[alpha].reserve( nbOR(alpha).size() );
207 foreach( const Neighbor &beta, nbOR(alpha) )
208 _mes[alpha].push_back( Factor( IR(beta), 1.0 ) );
209 }
210
211 if( props.verbose >= 3 )
212 cerr << "Regiongraph generated by JTree::GenerateJT: " << *this << endl;
213 }
214
215
216 string JTree::identify() const {
217 return string(Name) + printProperties();
218 }
219
220
221 Factor JTree::belief( const VarSet &vs ) const {
222 vector<Factor>::const_iterator beta;
223 for( beta = Qb.begin(); beta != Qb.end(); beta++ )
224 if( beta->vars() >> vs )
225 break;
226 if( beta != Qb.end() ) {
227 if( props.inference == Properties::InfType::SUMPROD )
228 return( beta->marginal(vs) );
229 else
230 return( beta->maxMarginal(vs) );
231 } else {
232 vector<Factor>::const_iterator alpha;
233 for( alpha = Qa.begin(); alpha != Qa.end(); alpha++ )
234 if( alpha->vars() >> vs )
235 break;
236 if( alpha == Qa.end() ) {
237 DAI_THROW(BELIEF_NOT_AVAILABLE);
238 return Factor();
239 } else {
240 if( props.inference == Properties::InfType::SUMPROD )
241 return( alpha->marginal(vs) );
242 else
243 return( alpha->maxMarginal(vs) );
244 }
245 }
246 }
247
248
249 vector<Factor> JTree::beliefs() const {
250 vector<Factor> result;
251 for( size_t beta = 0; beta < nrIRs(); beta++ )
252 result.push_back( Qb[beta] );
253 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
254 result.push_back( Qa[alpha] );
255 return result;
256 }
257
258
259 void JTree::runHUGIN() {
260 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
261 Qa[alpha] = OR(alpha);
262
263 for( size_t beta = 0; beta < nrIRs(); beta++ )
264 Qb[beta].fill( 1.0 );
265
266 // CollectEvidence
267 _logZ = 0.0;
268 for( size_t i = RTree.size(); (i--) != 0; ) {
269 // Make outer region RTree[i].first consistent with outer region RTree[i].second
270 // IR(i) = seperator OR(RTree[i].first) && OR(RTree[i].second)
271 Factor new_Qb;
272 if( props.inference == Properties::InfType::SUMPROD )
273 new_Qb = Qa[RTree[i].second].marginal( IR( i ), false );
274 else
275 new_Qb = Qa[RTree[i].second].maxMarginal( IR( i ), false );
276
277 _logZ += log(new_Qb.normalize());
278 Qa[RTree[i].first] *= new_Qb / Qb[i];
279 Qb[i] = new_Qb;
280 }
281 if( RTree.empty() )
282 _logZ += log(Qa[0].normalize() );
283 else
284 _logZ += log(Qa[RTree[0].first].normalize());
285
286 // DistributeEvidence
287 for( size_t i = 0; i < RTree.size(); i++ ) {
288 // Make outer region RTree[i].second consistent with outer region RTree[i].first
289 // IR(i) = seperator OR(RTree[i].first) && OR(RTree[i].second)
290 Factor new_Qb;
291 if( props.inference == Properties::InfType::SUMPROD )
292 new_Qb = Qa[RTree[i].first].marginal( IR( i ) );
293 else
294 new_Qb = Qa[RTree[i].first].maxMarginal( IR( i ) );
295
296 Qa[RTree[i].second] *= new_Qb / Qb[i];
297 Qb[i] = new_Qb;
298 }
299
300 // Normalize
301 for( size_t alpha = 0; alpha < nrORs(); alpha++ )
302 Qa[alpha].normalize();
303 }
304
305
306 void JTree::runShaferShenoy() {
307 // First pass
308 _logZ = 0.0;
309 for( size_t e = nrIRs(); (e--) != 0; ) {
310 // send a message from RTree[e].second to RTree[e].first
311 // or, actually, from the seperator IR(e) to RTree[e].first
312
313 size_t i = nbIR(e)[1].node; // = RTree[e].second
314 size_t j = nbIR(e)[0].node; // = RTree[e].first
315 size_t _e = nbIR(e)[0].dual;
316
317 Factor msg = OR(i);
318 foreach( const Neighbor &k, nbOR(i) )
319 if( k != e )
320 msg *= message( i, k.iter );
321 if( props.inference == Properties::InfType::SUMPROD )
322 message( j, _e ) = msg.marginal( IR(e), false );
323 else
324 message( j, _e ) = msg.maxMarginal( IR(e), false );
325 _logZ += log( message(j,_e).normalize() );
326 }
327
328 // Second pass
329 for( size_t e = 0; e < nrIRs(); e++ ) {
330 size_t i = nbIR(e)[0].node; // = RTree[e].first
331 size_t j = nbIR(e)[1].node; // = RTree[e].second
332 size_t _e = nbIR(e)[1].dual;
333
334 Factor msg = OR(i);
335 foreach( const Neighbor &k, nbOR(i) )
336 if( k != e )
337 msg *= message( i, k.iter );
338 if( props.inference == Properties::InfType::SUMPROD )
339 message( j, _e ) = msg.marginal( IR(e) );
340 else
341 message( j, _e ) = msg.maxMarginal( IR(e) );
342 }
343
344 // Calculate beliefs
345 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
346 Factor piet = OR(alpha);
347 foreach( const Neighbor &k, nbOR(alpha) )
348 piet *= message( alpha, k.iter );
349 if( nrIRs() == 0 ) {
350 _logZ += log( piet.normalize() );
351 Qa[alpha] = piet;
352 } else if( alpha == nbIR(0)[0].node /*RTree[0].first*/ ) {
353 _logZ += log( piet.normalize() );
354 Qa[alpha] = piet;
355 } else
356 Qa[alpha] = piet.normalized();
357 }
358
359 // Only for logZ (and for belief)...
360 for( size_t beta = 0; beta < nrIRs(); beta++ ) {
361 if( props.inference == Properties::InfType::SUMPROD )
362 Qb[beta] = Qa[nbIR(beta)[0].node].marginal( IR(beta) );
363 else
364 Qb[beta] = Qa[nbIR(beta)[0].node].maxMarginal( IR(beta) );
365 }
366 }
367
368
369 Real JTree::run() {
370 if( props.updates == Properties::UpdateType::HUGIN )
371 runHUGIN();
372 else if( props.updates == Properties::UpdateType::SHSH )
373 runShaferShenoy();
374 return 0.0;
375 }
376
377
378 Real JTree::logZ() const {
379 /* Real s = 0.0;
380 for( size_t beta = 0; beta < nrIRs(); beta++ )
381 s += IR(beta).c() * Qb[beta].entropy();
382 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
383 s += OR(alpha).c() * Qa[alpha].entropy();
384 s += (OR(alpha).log(true) * Qa[alpha]).sum();
385 }
386 DAI_ASSERT( abs( _logZ - s ) < 1e-8 );
387 return s;*/
388 return _logZ;
389 }
390
391
392 size_t JTree::findEfficientTree( const VarSet& vs, RootedTree &Tree, size_t PreviousRoot ) const {
393 // find new root clique (the one with maximal statespace overlap with vs)
394 size_t maxval = 0, maxalpha = 0;
395 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
396 size_t val = VarSet(vs & OR(alpha).vars()).nrStates();
397 if( val > maxval ) {
398 maxval = val;
399 maxalpha = alpha;
400 }
401 }
402
403 // reorder the tree edges such that maxalpha becomes the new root
404 RootedTree newTree( GraphEL( RTree.begin(), RTree.end() ), maxalpha );
405
406 // identify subtree that contains all variables of vs which are not in the new root
407 set<DEdge> subTree;
408 // for each variable in vs
409 for( VarSet::const_iterator n = vs.begin(); n != vs.end(); n++ ) {
410 for( size_t e = 0; e < newTree.size(); e++ ) {
411 if( OR(newTree[e].second).vars().contains( *n ) ) {
412 size_t f = e;
413 subTree.insert( newTree[f] );
414 size_t pos = newTree[f].first;
415 for( ; f > 0; f-- )
416 if( newTree[f-1].second == pos ) {
417 subTree.insert( newTree[f-1] );
418 pos = newTree[f-1].first;
419 }
420 }
421 }
422 }
423 if( PreviousRoot != (size_t)-1 && PreviousRoot != maxalpha) {
424 // find first occurence of PreviousRoot in the tree, which is closest to the new root
425 size_t e = 0;
426 for( ; e != newTree.size(); e++ ) {
427 if( newTree[e].second == PreviousRoot )
428 break;
429 }
430 DAI_ASSERT( e != newTree.size() );
431
432 // track-back path to root and add edges to subTree
433 subTree.insert( newTree[e] );
434 size_t pos = newTree[e].first;
435 for( ; e > 0; e-- )
436 if( newTree[e-1].second == pos ) {
437 subTree.insert( newTree[e-1] );
438 pos = newTree[e-1].first;
439 }
440 }
441
442 // Resulting Tree is a reordered copy of newTree
443 // First add edges in subTree to Tree
444 Tree.clear();
445 vector<DEdge> remTree;
446 for( RootedTree::const_iterator e = newTree.begin(); e != newTree.end(); e++ )
447 if( subTree.count( *e ) )
448 Tree.push_back( *e );
449 else
450 remTree.push_back( *e );
451 size_t subTreeSize = Tree.size();
452 // Then add remaining edges
453 copy( remTree.begin(), remTree.end(), back_inserter( Tree ) );
454
455 return subTreeSize;
456 }
457
458
459 Factor JTree::calcMarginal( const VarSet& vs ) {
460 vector<Factor>::const_iterator beta;
461 for( beta = Qb.begin(); beta != Qb.end(); beta++ )
462 if( beta->vars() >> vs )
463 break;
464 if( beta != Qb.end() ) {
465 if( props.inference == Properties::InfType::SUMPROD )
466 return( beta->marginal(vs) );
467 else
468 return( beta->maxMarginal(vs) );
469 } else {
470 vector<Factor>::const_iterator alpha;
471 for( alpha = Qa.begin(); alpha != Qa.end(); alpha++ )
472 if( alpha->vars() >> vs )
473 break;
474 if( alpha != Qa.end() ) {
475 if( props.inference == Properties::InfType::SUMPROD )
476 return( alpha->marginal(vs) );
477 else
478 return( alpha->maxMarginal(vs) );
479 } else {
480 // Find subtree to do efficient inference
481 RootedTree T;
482 size_t Tsize = findEfficientTree( vs, T );
483
484 // Find remaining variables (which are not in the new root)
485 VarSet vsrem = vs / OR(T.front().first).vars();
486 Factor Pvs (vs, 0.0);
487
488 // Save Qa and Qb on the subtree
489 map<size_t,Factor> Qa_old;
490 map<size_t,Factor> Qb_old;
491 vector<size_t> b(Tsize, 0);
492 for( size_t i = Tsize; (i--) != 0; ) {
493 size_t alpha1 = T[i].first;
494 size_t alpha2 = T[i].second;
495 size_t beta;
496 for( beta = 0; beta < nrIRs(); beta++ )
497 if( UEdge( RTree[beta].first, RTree[beta].second ) == UEdge( alpha1, alpha2 ) )
498 break;
499 DAI_ASSERT( beta != nrIRs() );
500 b[i] = beta;
501
502 if( !Qa_old.count( alpha1 ) )
503 Qa_old[alpha1] = Qa[alpha1];
504 if( !Qa_old.count( alpha2 ) )
505 Qa_old[alpha2] = Qa[alpha2];
506 if( !Qb_old.count( beta ) )
507 Qb_old[beta] = Qb[beta];
508 }
509
510 // For all states of vsrem
511 for( State s(vsrem); s.valid(); s++ ) {
512 // CollectEvidence
513 Real logZ = 0.0;
514 for( size_t i = Tsize; (i--) != 0; ) {
515 // Make outer region T[i].first consistent with outer region T[i].second
516 // IR(i) = seperator OR(T[i].first) && OR(T[i].second)
517
518 for( VarSet::const_iterator n = vsrem.begin(); n != vsrem.end(); n++ )
519 if( Qa[T[i].second].vars() >> *n ) {
520 Factor piet( *n, 0.0 );
521 piet.set( s(*n), 1.0 );
522 Qa[T[i].second] *= piet;
523 }
524
525 Factor new_Qb;
526 if( props.inference == Properties::InfType::SUMPROD )
527 new_Qb = Qa[T[i].second].marginal( IR( b[i] ), false );
528 else
529 new_Qb = Qa[T[i].second].maxMarginal( IR( b[i] ), false );
530 logZ += log(new_Qb.normalize());
531 Qa[T[i].first] *= new_Qb / Qb[b[i]];
532 Qb[b[i]] = new_Qb;
533 }
534 logZ += log(Qa[T[0].first].normalize());
535
536 Factor piet( vsrem, 0.0 );
537 piet.set( s, exp(logZ) );
538 if( props.inference == Properties::InfType::SUMPROD )
539 Pvs += piet * Qa[T[0].first].marginal( vs / vsrem, false ); // OPTIMIZE ME
540 else
541 Pvs += piet * Qa[T[0].first].maxMarginal( vs / vsrem, false ); // OPTIMIZE ME
542
543 // Restore clamped beliefs
544 for( map<size_t,Factor>::const_iterator alpha = Qa_old.begin(); alpha != Qa_old.end(); alpha++ )
545 Qa[alpha->first] = alpha->second;
546 for( map<size_t,Factor>::const_iterator beta = Qb_old.begin(); beta != Qb_old.end(); beta++ )
547 Qb[beta->first] = beta->second;
548 }
549
550 return( Pvs.normalized() );
551 }
552 }
553 }
554
555
556 std::pair<size_t,double> boundTreewidth( const FactorGraph &fg, greedyVariableElimination::eliminationCostFunction fn ) {
557 ClusterGraph _cg;
558
559 // Copy factors
560 for( size_t I = 0; I < fg.nrFactors(); I++ )
561 _cg.insert( fg.factor(I).vars() );
562
563 // Retain only maximal clusters
564 _cg.eraseNonMaximal();
565
566 // Obtain elimination sequence
567 vector<VarSet> ElimVec = _cg.VarElim( greedyVariableElimination( fn ) ).eraseNonMaximal().clusters();
568
569 // Calculate treewidth
570 size_t treewidth = 0;
571 double nrstates = 0.0;
572 for( size_t i = 0; i < ElimVec.size(); i++ ) {
573 if( ElimVec[i].size() > treewidth )
574 treewidth = ElimVec[i].size();
575 size_t s = ElimVec[i].nrStates();
576 if( s > nrstates )
577 nrstates = s;
578 }
579
580 return make_pair(treewidth, nrstates);
581 }
582
583
584 std::vector<size_t> JTree::findMaximum() const {
585 #ifdef DAI_DEBUG
586 // check consistency of variables and factors
587 for( size_t I = 0; I < nrFactors(); I++ ) {
588 size_t linearState = beliefF(I).p().argmax().first;
589 map<Var,size_t> state = calcState( factor(I).vars(), linearState );
590 foreach( const Neighbor& i, nbF(I) ) {
591 if( state[var(i)] != beliefV(i).p().argmax().first ) {
592 cerr << "ERROR: inconsistency between MAP beliefs: factor belief " << beliefF(I) << ": " << state << " is inconsistent with " << beliefV(i) << endl;
593 DAI_THROW(RUNTIME_ERROR);
594 }
595 }
596 }
597 // check consistency of
598 for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
599 size_t linearStateQa = Qa[alpha].p().argmax().first;
600 map<Var,size_t> stateQa = calcState( Qa[alpha].vars(), linearStateQa );
601 foreach( const Neighbor& beta, nbOR(alpha) ) {
602 size_t linearStateQb = Qb[beta].p().argmax().first;
603 map<Var,size_t> stateQb = calcState( Qb[beta].vars(), linearStateQb );
604 for( map<Var,size_t>::const_iterator it = stateQb.begin(); it != stateQb.end(); it++ )
605 if( stateQa[it->first] != it->second ) {
606 cerr << "ERROR: inconsistency between MAP beliefs: OR belief " << Qa[alpha] << " (with MAP " << stateQa << ") is inconsistent with IR belief " << Qb[beta] << " (with MAP " << stateQb << ")" << endl;
607 DAI_THROW(RUNTIME_ERROR);
608 }
609 }
610 }
611 #endif
612
613 vector<size_t> maximum;
614 maximum.reserve( nrVars() );
615 for( size_t i = 0; i < nrVars(); i++ )
616 maximum.push_back( beliefV(i).p().argmax().first );
617 return maximum;
618 }
619
620
621 } // end of namespace dai