-/* Copyright (C) 2006-2008 Joris Mooij [j dot mooij at science dot ru dot nl]
- Radboud University Nijmegen, The Netherlands
-
+/* Copyright (C) 2006-2008 Joris Mooij [joris dot mooij at tuebingen dot mpg dot de]
+ Radboud University Nijmegen, The Netherlands /
+ Max Planck Institute for Biological Cybernetics, Germany
+
This file is part of libDAI.
libDAI is free software; you can redistribute it and/or modify
#include <map>
#include <dai/hak.h>
#include <dai/util.h>
-#include <dai/diffs.h>
+#include <dai/exceptions.h>
namespace dai {
const char *HAK::Name = "HAK";
-bool HAK::checkProperties() {
- if( !HasProperty("tol") )
- return false;
- if (!HasProperty("maxiter") )
- return false;
- if (!HasProperty("verbose") )
- return false;
- if( !HasProperty("doubleloop") )
- return false;
- if( !HasProperty("clusters") )
- return false;
+void HAK::setProperties( const PropertySet &opts ) {
+ assert( opts.hasKey("tol") );
+ assert( opts.hasKey("maxiter") );
+ assert( opts.hasKey("verbose") );
+ assert( opts.hasKey("doubleloop") );
+ assert( opts.hasKey("clusters") );
- ConvertPropertyTo<double>("tol");
- ConvertPropertyTo<size_t>("maxiter");
- ConvertPropertyTo<size_t>("verbose");
- ConvertPropertyTo<bool>("doubleloop");
- ConvertPropertyTo<ClustersType>("clusters");
-
- if( HasProperty("loopdepth") )
- ConvertPropertyTo<size_t>("loopdepth");
- else if( Clusters() == ClustersType::LOOP )
- return false;
-
- return true;
+ props.tol = opts.getStringAs<double>("tol");
+ props.maxiter = opts.getStringAs<size_t>("maxiter");
+ props.verbose = opts.getStringAs<size_t>("verbose");
+ props.doubleloop = opts.getStringAs<bool>("doubleloop");
+ props.clusters = opts.getStringAs<Properties::ClustersType>("clusters");
+
+ if( opts.hasKey("loopdepth") )
+ props.loopdepth = opts.getStringAs<size_t>("loopdepth");
+ else
+ assert( props.clusters != Properties::ClustersType::LOOP );
+ if( opts.hasKey("damping") )
+ props.damping = opts.getStringAs<double>("damping");
+ else
+ props.damping = 0.0;
+}
+
+
+PropertySet HAK::getProperties() const {
+ PropertySet opts;
+ opts.Set( "tol", props.tol );
+ opts.Set( "maxiter", props.maxiter );
+ opts.Set( "verbose", props.verbose );
+ opts.Set( "doubleloop", props.doubleloop );
+ opts.Set( "clusters", props.clusters );
+ opts.Set( "loopdepth", props.loopdepth );
+ opts.Set( "damping", props.damping );
+ return opts;
+}
+
+
+string HAK::printProperties() const {
+ stringstream s( stringstream::out );
+ s << "[";
+ s << "tol=" << props.tol << ",";
+ s << "maxiter=" << props.maxiter << ",";
+ s << "verbose=" << props.verbose << ",";
+ s << "doubleloop=" << props.doubleloop << ",";
+ s << "clusters=" << props.clusters << ",";
+ s << "loopdepth=" << props.loopdepth << ",";
+ s << "damping=" << props.damping << "]";
+ return s.str();
}
void HAK::constructMessages() {
// Create outer beliefs
_Qa.clear();
- _Qa.reserve(nr_ORs());
- for( size_t alpha = 0; alpha < nr_ORs(); alpha++ )
+ _Qa.reserve(nrORs());
+ for( size_t alpha = 0; alpha < nrORs(); alpha++ )
_Qa.push_back( Factor( OR(alpha).vars() ) );
// Create inner beliefs
_Qb.clear();
- _Qb.reserve(nr_IRs());
- for( size_t beta = 0; beta < nr_IRs(); beta++ )
+ _Qb.reserve(nrIRs());
+ for( size_t beta = 0; beta < nrIRs(); beta++ )
_Qb.push_back( Factor( IR(beta) ) );
// Create messages
_muab.clear();
- _muab.reserve(nr_Redges());
+ _muab.reserve( nrORs() );
_muba.clear();
- _muba.reserve(nr_Redges());
- for( vector<R_edge_t>::const_iterator ab = Redges().begin(); ab != Redges().end(); ab++ ) {
- _muab.push_back( Factor( IR(ab->second) ) );
- _muba.push_back( Factor( IR(ab->second) ) );
+ _muba.reserve( nrORs() );
+ for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
+ _muab.push_back( vector<Factor>() );
+ _muba.push_back( vector<Factor>() );
+ _muab[alpha].reserve( nbOR(alpha).size() );
+ _muba[alpha].reserve( nbOR(alpha).size() );
+ foreach( const Neighbor &beta, nbOR(alpha) ) {
+ _muab[alpha].push_back( Factor( IR(beta) ) );
+ _muba[alpha].push_back( Factor( IR(beta) ) );
+ }
}
}
-HAK::HAK(const RegionGraph & rg, const Properties &opts) : DAIAlgRG(rg, opts) {
- assert( checkProperties() );
+HAK::HAK( const RegionGraph &rg, const PropertySet &opts ) : DAIAlgRG(rg), _Qa(), _Qb(), _muab(), _muba(), _maxdiff(0.0), _iters(0U), props() {
+ setProperties( opts );
constructMessages();
}
-void HAK::findLoopClusters( const FactorGraph & fg, set<VarSet> &allcl, VarSet newcl, const Var & root, size_t length, VarSet vars ) {
+void HAK::findLoopClusters( const FactorGraph & fg, std::set<VarSet> &allcl, VarSet newcl, const Var & root, size_t length, VarSet vars ) {
for( VarSet::const_iterator in = vars.begin(); in != vars.end(); in++ ) {
- VarSet ind = fg.delta( *in );
- if( (newcl.size()) >= 2 && (ind >> root) ) {
+ VarSet ind = fg.delta( fg.findVar( *in ) );
+ if( (newcl.size()) >= 2 && ind.contains( root ) ) {
allcl.insert( newcl | *in );
}
else if( length > 1 )
}
-HAK::HAK(const FactorGraph & fg, const Properties &opts) : DAIAlgRG(opts) {
- assert( checkProperties() );
+HAK::HAK(const FactorGraph & fg, const PropertySet &opts) : DAIAlgRG(), _Qa(), _Qb(), _muab(), _muba(), _maxdiff(0.0), _iters(0U), props() {
+ setProperties( opts );
vector<VarSet> cl;
- if( Clusters() == ClustersType::MIN ) {
+ if( props.clusters == Properties::ClustersType::MIN ) {
cl = fg.Cliques();
- } else if( Clusters() == ClustersType::DELTA ) {
+ } else if( props.clusters == Properties::ClustersType::DELTA ) {
for( size_t i = 0; i < fg.nrVars(); i++ )
- cl.push_back(fg.Delta(fg.var(i)));
- } else if( Clusters() == ClustersType::LOOP ) {
+ cl.push_back(fg.Delta(i));
+ } else if( props.clusters == Properties::ClustersType::LOOP ) {
cl = fg.Cliques();
set<VarSet> scl;
- for( vector<Var>::const_iterator i0 = fg.vars().begin(); i0 != fg.vars().end(); i0++ ) {
- VarSet i0d = fg.delta(*i0);
- if( LoopDepth() > 1 )
- findLoopClusters( fg, scl, *i0, *i0, LoopDepth() - 1, fg.delta(*i0) );
+ for( size_t i0 = 0; i0 < fg.nrVars(); i0++ ) {
+ VarSet i0d = fg.delta(i0);
+ if( props.loopdepth > 1 )
+ findLoopClusters( fg, scl, fg.var(i0), fg.var(i0), props.loopdepth - 1, fg.delta(i0) );
}
for( set<VarSet>::const_iterator c = scl.begin(); c != scl.end(); c++ )
cl.push_back(*c);
- if( Verbose() >= 3 ) {
- cout << "HAK uses the following clusters: " << endl;
+ if( props.verbose >= 3 ) {
+ cout << Name << " uses the following clusters: " << endl;
for( vector<VarSet>::const_iterator cli = cl.begin(); cli != cl.end(); cli++ )
cout << *cli << endl;
}
} else
- throw "Invalid Clusters type";
+ DAI_THROW(INTERNAL_ERROR);
RegionGraph rg(fg,cl);
RegionGraph::operator=(rg);
constructMessages();
- if( Verbose() >= 3 )
- cout << "HAK regiongraph: " << *this << endl;
+ if( props.verbose >= 3 )
+ cout << Name << " regiongraph: " << *this << endl;
}
string HAK::identify() const {
- stringstream result (stringstream::out);
- result << Name << GetProperties();
- return result.str();
+ return string(Name) + printProperties();
}
void HAK::init( const VarSet &ns ) {
for( vector<Factor>::iterator alpha = _Qa.begin(); alpha != _Qa.end(); alpha++ )
- if( alpha->vars() && ns )
- alpha->fill( 1.0 / alpha->stateSpace() );
-
- for( size_t beta = 0; beta < nr_IRs(); beta++ )
- if( IR(beta) && ns ) {
- _Qb[beta].fill( 1.0 / IR(beta).stateSpace() );
- for( R_nb_cit alpha = nbIR(beta).begin(); alpha != nbIR(beta).end(); alpha++ ) {
- muab(*alpha,beta).fill( 1.0 / IR(beta).stateSpace() );
- muba(beta,*alpha).fill( 1.0 / IR(beta).stateSpace() );
+ if( alpha->vars().intersects( ns ) )
+ alpha->fill( 1.0 / alpha->states() );
+
+ for( size_t beta = 0; beta < nrIRs(); beta++ )
+ if( IR(beta).intersects( ns ) ) {
+ _Qb[beta].fill( 1.0 );
+ foreach( const Neighbor &alpha, nbIR(beta) ) {
+ size_t _beta = alpha.dual;
+ muab( alpha, _beta ).fill( 1.0 );
+ muba( alpha, _beta ).fill( 1.0 );
}
}
}
void HAK::init() {
- assert( checkProperties() );
-
for( vector<Factor>::iterator alpha = _Qa.begin(); alpha != _Qa.end(); alpha++ )
- alpha->fill( 1.0 / alpha->stateSpace() );
+ alpha->fill( 1.0 / alpha->states() );
for( vector<Factor>::iterator beta = _Qb.begin(); beta != _Qb.end(); beta++ )
- beta->fill( 1.0 / beta->stateSpace() );
+ beta->fill( 1.0 / beta->states() );
- for( size_t ab = 0; ab < nr_Redges(); ab++ ) {
- _muab[ab].fill( 1.0 / _muab[ab].stateSpace() );
- _muba[ab].fill( 1.0 / _muba[ab].stateSpace() );
- }
+ for( size_t alpha = 0; alpha < nrORs(); alpha++ )
+ foreach( const Neighbor &beta, nbOR(alpha) ) {
+ size_t _beta = beta.iter;
+ muab( alpha, _beta ).fill( 1.0 / muab( alpha, _beta ).states() );
+ muba( alpha, _beta ).fill( 1.0 / muab( alpha, _beta ).states() );
+ }
}
double HAK::doGBP() {
- if( Verbose() >= 1 )
+ if( props.verbose >= 1 )
cout << "Starting " << identify() << "...";
- if( Verbose() >= 3)
+ if( props.verbose >= 3)
cout << endl;
- clock_t tic = toc();
+ double tic = toc();
// Check whether counting numbers won't lead to problems
- for( size_t beta = 0; beta < nr_IRs(); beta++ )
+ for( size_t beta = 0; beta < nrIRs(); beta++ )
assert( nbIR(beta).size() + IR(beta).c() != 0.0 );
// Keep old beliefs to check convergence
// Differences in single node beliefs
Diffs diffs(nrVars(), 1.0);
- size_t iter = 0;
// do several passes over the network until maximum number of iterations has
// been reached or until the maximum belief difference is smaller than tolerance
- for( iter = 0; iter < MaxIter() && diffs.max() > Tol(); iter++ ) {
- for( size_t beta = 0; beta < nr_IRs(); beta++ ) {
- for( R_nb_cit alpha = nbIR(beta).begin(); alpha != nbIR(beta).end(); alpha++ )
- muab(*alpha,beta) = _Qa[*alpha].marginal(IR(beta)).divided_by( muba(beta,*alpha) );
+ for( _iters = 0; _iters < props.maxiter && diffs.maxDiff() > props.tol; _iters++ ) {
+ for( size_t beta = 0; beta < nrIRs(); beta++ ) {
+ foreach( const Neighbor &alpha, nbIR(beta) ) {
+ size_t _beta = alpha.dual;
+ muab( alpha, _beta ) = _Qa[alpha].marginal(IR(beta)).divided_by( muba(alpha,_beta) );
+ /* TODO: INVESTIGATE THIS PROBLEM
+ *
+ * In some cases, the muab's can have very large entries because the muba's have very
+ * small entries. This may cause NANs later on (e.g., multiplying large quantities may
+ * result in +inf; normalization then tries to calculate inf / inf which is NAN).
+ * A fix of this problem would consist in normalizing the messages muab.
+ * However, it is not obvious whether this is a real solution, because it has a
+ * negative performance impact and the NAN's seem to be a symptom of a fundamental
+ * numerical unstability.
+ */
+ muab(alpha,_beta).normalize();
+ }
Factor Qb_new;
- for( R_nb_cit alpha = nbIR(beta).begin(); alpha != nbIR(beta).end(); alpha++ )
- Qb_new *= muab(*alpha,beta) ^ (1 / (nbIR(beta).size() + IR(beta).c()));
- Qb_new.normalize( _normtype );
- if( Qb_new.hasNaNs() ) {
- cout << "HAK::doGBP: Qb_new has NaNs!" << endl;
- return NAN;
+ foreach( const Neighbor &alpha, nbIR(beta) ) {
+ size_t _beta = alpha.dual;
+ Qb_new *= muab(alpha,_beta) ^ (1 / (nbIR(beta).size() + IR(beta).c()));
}
-// _Qb[beta] = Qb_new.makeZero(1e-100); // damping?
- _Qb[beta] = Qb_new;
-
- for( R_nb_cit alpha = nbIR(beta).begin(); alpha != nbIR(beta).end(); alpha++ ) {
- muba(beta,*alpha) = _Qb[beta].divided_by( muab(*alpha,beta) );
- Factor Qa_new = OR(*alpha);
- for( R_nb_cit gamma = nbOR(*alpha).begin(); gamma != nbOR(*alpha).end(); gamma++ )
- Qa_new *= muba(*gamma,*alpha);
- Qa_new ^= (1.0 / OR(*alpha).c());
- Qa_new.normalize( _normtype );
+ Qb_new.normalize();
+ if( Qb_new.hasNaNs() ) {
+ // TODO: WHAT TO DO IN THIS CASE?
+ cout << Name << "::doGBP: Qb_new has NaNs!" << endl;
+ return 1.0;
+ }
+ /* TODO: WHAT IS THE PURPOSE OF THE FOLLOWING CODE?
+ *
+ * _Qb[beta] = Qb_new.makeZero(1e-100);
+ */
+
+ if( props.doubleloop || props.damping == 0.0 )
+ _Qb[beta] = Qb_new; // no damping for double loop
+ else
+ _Qb[beta] = (Qb_new^(1.0 - props.damping)) * (_Qb[beta]^props.damping);
+
+ foreach( const Neighbor &alpha, nbIR(beta) ) {
+ size_t _beta = alpha.dual;
+ muba(alpha,_beta) = _Qb[beta].divided_by( muab(alpha,_beta) );
+
+ /* TODO: INVESTIGATE WHETHER THIS HACK (INVENTED BY KEES) TO PREVENT NANS MAKES SENSE
+ *
+ * muba(beta,*alpha).makePositive(1e-100);
+ *
+ */
+
+ Factor Qa_new = OR(alpha);
+ foreach( const Neighbor &gamma, nbOR(alpha) )
+ Qa_new *= muba(alpha,gamma.iter);
+ Qa_new ^= (1.0 / OR(alpha).c());
+ Qa_new.normalize();
if( Qa_new.hasNaNs() ) {
- cout << "HAK::doGBP: Qa_new has NaNs!" << endl;
- return NAN;
+ cout << Name << "::doGBP: Qa_new has NaNs!" << endl;
+ return 1.0;
}
-// _Qa[*alpha] = Qa_new.makeZero(1e-100); // damping?
- _Qa[*alpha] = Qa_new;
+ /* TODO: WHAT IS THE PURPOSE OF THE FOLLOWING CODE?
+ *
+ * _Qb[beta] = Qb_new.makeZero(1e-100);
+ */
+
+ if( props.doubleloop || props.damping == 0.0 )
+ _Qa[alpha] = Qa_new; // no damping for double loop
+ else
+ // FIXME: GEOMETRIC DAMPING IS SLOW!
+ _Qa[alpha] = (Qa_new^(1.0 - props.damping)) * (_Qa[alpha]^props.damping);
}
}
old_beliefs[i] = new_belief;
}
- if( Verbose() >= 3 )
- cout << "HAK::doGBP: maxdiff " << diffs.max() << " after " << iter+1 << " passes" << endl;
+ if( props.verbose >= 3 )
+ cout << Name << "::doGBP: maxdiff " << diffs.maxDiff() << " after " << _iters+1 << " passes" << endl;
}
- updateMaxDiff( diffs.max() );
+ if( diffs.maxDiff() > _maxdiff )
+ _maxdiff = diffs.maxDiff();
- if( Verbose() >= 1 ) {
- if( diffs.max() > Tol() ) {
- if( Verbose() == 1 )
+ if( props.verbose >= 1 ) {
+ if( diffs.maxDiff() > props.tol ) {
+ if( props.verbose == 1 )
cout << endl;
- cout << "HAK::doGBP: WARNING: not converged within " << MaxIter() << " passes (" << toc() - tic << " clocks)...final maxdiff:" << diffs.max() << endl;
+ cout << Name << "::doGBP: WARNING: not converged within " << props.maxiter << " passes (" << toc() - tic << " seconds)...final maxdiff:" << diffs.maxDiff() << endl;
} else {
- if( Verbose() >= 2 )
- cout << "HAK::doGBP: ";
- cout << "converged in " << iter << " passes (" << toc() - tic << " clocks)." << endl;
+ if( props.verbose >= 2 )
+ cout << Name << "::doGBP: ";
+ cout << "converged in " << _iters << " passes (" << toc() - tic << " seconds)." << endl;
}
}
- return diffs.max();
+ return diffs.maxDiff();
}
double HAK::doDoubleLoop() {
- if( Verbose() >= 1 )
+ if( props.verbose >= 1 )
cout << "Starting " << identify() << "...";
- if( Verbose() >= 3)
+ if( props.verbose >= 3)
cout << endl;
- clock_t tic = toc();
+ double tic = toc();
// Save original outer regions
- vector<FRegion> org_ORs = ORs();
+ vector<FRegion> org_ORs = ORs;
// Save original inner counting numbers and set negative counting numbers to zero
- vector<double> org_IR_cs( nr_IRs(), 0.0 );
- for( size_t beta = 0; beta < nr_IRs(); beta++ ) {
+ vector<double> org_IR_cs( nrIRs(), 0.0 );
+ for( size_t beta = 0; beta < nrIRs(); beta++ ) {
org_IR_cs[beta] = IR(beta).c();
if( IR(beta).c() < 0.0 )
IR(beta).c() = 0.0;
// Differences in single node beliefs
Diffs diffs(nrVars(), 1.0);
- size_t outer_maxiter = MaxIter();
- double outer_tol = Tol();
- size_t outer_verbose = Verbose();
- double org_maxdiff = MaxDiff();
+ size_t outer_maxiter = props.maxiter;
+ double outer_tol = props.tol;
+ size_t outer_verbose = props.verbose;
+ double org_maxdiff = _maxdiff;
// Set parameters for inner loop
- MaxIter( 5 );
- Verbose( outer_verbose ? outer_verbose - 1 : 0 );
+ props.maxiter = 5;
+ props.verbose = outer_verbose ? outer_verbose - 1 : 0;
size_t outer_iter = 0;
- for( outer_iter = 0; outer_iter < outer_maxiter && diffs.max() > outer_tol; outer_iter++ ) {
+ size_t total_iter = 0;
+ for( outer_iter = 0; outer_iter < outer_maxiter && diffs.maxDiff() > outer_tol; outer_iter++ ) {
// Calculate new outer regions
- for( size_t alpha = 0; alpha < nr_ORs(); alpha++ ) {
+ for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
OR(alpha) = org_ORs[alpha];
- for( R_nb_cit beta = nbOR(alpha).begin(); beta != nbOR(alpha).end(); beta++ )
- OR(alpha) *= _Qb[*beta] ^ ((IR(*beta).c() - org_IR_cs[*beta]) / nbIR(*beta).size());
+ foreach( const Neighbor &beta, nbOR(alpha) )
+ OR(alpha) *= _Qb[beta] ^ ((IR(beta).c() - org_IR_cs[beta]) / nbIR(beta).size());
}
// Inner loop
if( isnan( doGBP() ) )
- return NAN;
+ return 1.0;
// Calculate new single variable beliefs and compare with old ones
for( size_t i = 0; i < nrVars(); ++i ) {
old_beliefs[i] = new_belief;
}
- if( Verbose() >= 3 )
- cout << "HAK::doDoubleLoop: maxdiff " << diffs.max() << " after " << outer_iter+1 << " passes" << endl;
+ total_iter += Iterations();
+
+ if( props.verbose >= 3 )
+ cout << Name << "::doDoubleLoop: maxdiff " << diffs.maxDiff() << " after " << total_iter << " passes" << endl;
}
// restore _maxiter, _verbose and _maxdiff
- MaxIter( outer_maxiter );
- Verbose( outer_verbose );
- MaxDiff( org_maxdiff );
+ props.maxiter = outer_maxiter;
+ props.verbose = outer_verbose;
+ _maxdiff = org_maxdiff;
- updateMaxDiff( diffs.max() );
+ _iters = total_iter;
+ if( diffs.maxDiff() > _maxdiff )
+ _maxdiff = diffs.maxDiff();
// Restore original outer regions
- ORs() = org_ORs;
+ ORs = org_ORs;
// Restore original inner counting numbers
- for( size_t beta = 0; beta < nr_IRs(); ++beta )
+ for( size_t beta = 0; beta < nrIRs(); ++beta )
IR(beta).c() = org_IR_cs[beta];
- if( Verbose() >= 1 ) {
- if( diffs.max() > Tol() ) {
- if( Verbose() == 1 )
+ if( props.verbose >= 1 ) {
+ if( diffs.maxDiff() > props.tol ) {
+ if( props.verbose == 1 )
cout << endl;
- cout << "HAK::doDoubleLoop: WARNING: not converged within " << outer_maxiter << " passes (" << toc() - tic << " clocks)...final maxdiff:" << diffs.max() << endl;
+ cout << Name << "::doDoubleLoop: WARNING: not converged within " << outer_maxiter << " passes (" << toc() - tic << " seconds)...final maxdiff:" << diffs.maxDiff() << endl;
} else {
- if( Verbose() >= 3 )
- cout << "HAK::doDoubleLoop: ";
- cout << "converged in " << outer_iter << " passes (" << toc() - tic << " clocks)." << endl;
+ if( props.verbose >= 3 )
+ cout << Name << "::doDoubleLoop: ";
+ cout << "converged in " << total_iter << " passes (" << toc() - tic << " seconds)." << endl;
}
}
- return diffs.max();
+ return diffs.maxDiff();
}
double HAK::run() {
- if( DoubleLoop() )
+ if( props.doubleloop )
return doDoubleLoop();
else
return doGBP();
vector<Factor> HAK::beliefs() const {
vector<Factor> result;
- for( size_t beta = 0; beta < nr_IRs(); beta++ )
+ for( size_t beta = 0; beta < nrIRs(); beta++ )
result.push_back( Qb(beta) );
- for( size_t alpha = 0; alpha < nr_ORs(); alpha++ )
+ for( size_t alpha = 0; alpha < nrORs(); alpha++ )
result.push_back( Qa(alpha) );
return result;
}
-Complex HAK::logZ() const {
- Complex sum = 0.0;
- for( size_t beta = 0; beta < nr_IRs(); beta++ )
- sum += Complex(IR(beta).c()) * Qb(beta).entropy();
- for( size_t alpha = 0; alpha < nr_ORs(); alpha++ ) {
- sum += Complex(OR(alpha).c()) * Qa(alpha).entropy();
+Real HAK::logZ() const {
+ Real sum = 0.0;
+ for( size_t beta = 0; beta < nrIRs(); beta++ )
+ sum += IR(beta).c() * Qb(beta).entropy();
+ for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
+ sum += OR(alpha).c() * Qa(alpha).entropy();
sum += (OR(alpha).log0() * Qa(alpha)).totalSum();
}
return sum;