+* [Frederik Eaton] Improved parsing code in tests/testdai to allow recursive
+ alias substitutions
+* Interface changes:
+ TProb<T>::minVal() -> TProb<T>::min()
+ TFactor<T>::minVal() -> TFactor<T>::min()
+ TProb<T>::maxVal() -> TProb<T>::max()
+ TFactor<T>::maxVal() -> TFactor<T>::max()
+ TProb<T>::totalSum() -> TProb<T>sum()
+ TFactor<T>::totalSum() -> TFactor<T>::sum()
+* [Frederik Eaton] Added methods setUniform (), sumAbs(), argmax() to TProb<T>
+* [Frederik Eaton] Added TAGS, lib targets to Makefile
+* [Frederik Eaton] Add useful aliases to aliases.conf
* [Frederik Eaton] Removed unnecessary copy constructors and assignment
operators
* [Frederik Eaton] Change cout to cerr in warnings and error messages
BOOSTLIBS=-lboost_program_options-mt
endif
ifeq ($(OS),LINUX)
- BOOSTLIBS=-lboost_program_options
+ BOOSTLIBS=-lboost_program_options-mt
endif
ifeq ($(OS),WINDOWS)
BOOSTLIBS=/LIBPATH:C:\boost_1_36_0\stage\lib
endif
# Build targets
-TARGETS=tests utils $(LIB)/libdai$(LE) examples testregression
+TARGETS=tests utils lib examples testregression
ifneq ($(OS),WINDOWS)
TARGETS:=$(TARGETS) doc
endif
utils : utils/createfg$(EE) utils/fg2dot$(EE) utils/fginfo$(EE)
+lib: $(LIB)/libdai$(LE)
+
# OBJECTS
##########
doc : $(INC)/*.h $(SRC)/*.cpp examples/*.cpp doxygen.conf
doxygen doxygen.conf
+TAGS:
+ etags src/*.cpp include/dai/*.h tests/*.cpp utils/*.cpp
+ ctags src/*.cpp include/dai/*.h tests/*.cpp utils/*.cpp
+
# CLEAN
########
bool hasNegatives() const { return _p.hasNegatives(); }
/// Returns total sum of values
- T totalSum() const { return _p.totalSum(); }
+ T sum() const { return _p.sum(); }
/// Returns maximum absolute value
T maxAbs() const { return _p.maxAbs(); }
/// Returns maximum value
- T maxVal() const { return _p.maxVal(); }
+ T max() const { return _p.max(); }
/// Returns minimum value
- T minVal() const { return _p.minVal(); }
+ T min() const { return _p.min(); }
/// Returns entropy of *this TFactor
Real entropy() const { return _p.entropy(); }
bs = i.states();
else
as = j.states();
- T f1 = slice( ij, alpha1 * as + beta1 * bs ).p().divide( slice( ij, alpha2 * as + beta1 * bs ).p() ).maxVal();
- T f2 = slice( ij, alpha2 * as + beta2 * bs ).p().divide( slice( ij, alpha1 * as + beta2 * bs ).p() ).maxVal();
+ T f1 = slice( ij, alpha1 * as + beta1 * bs ).p().divide( slice( ij, alpha2 * as + beta1 * bs ).p() ).max();
+ T f2 = slice( ij, alpha2 * as + beta2 * bs ).p().divide( slice( ij, alpha1 * as + beta2 * bs ).p() ).max();
T f = f1 * f2;
if( f > max )
max = f;
_p[i] = 0;
return *this;
}
+
+ /// Set all entries to 1.0/size()
+ TProb<T>& setUniform () {
+ fill(1.0/size());
+ return *this;
+ }
/// Sets entries that are smaller than epsilon to epsilon
TProb<T>& makePositive( Real epsilon ) {
}
/// Returns sum of all entries
- T totalSum() const {
+ T sum() const {
T Z = std::accumulate( _p.begin(), _p.end(), (T)0 );
return Z;
}
+ /// Return sum of absolute value of all entries
+ T sumAbs() const {
+ T s = 0;
+ for( size_t i = 0; i < size(); i++ )
+ s += fabs( (Real) _p[i] );
+ return s;
+ }
+
/// Returns maximum absolute value of all entries
T maxAbs() const {
T Z = 0;
}
/// Returns maximum value of all entries
- T maxVal() const {
+ T max() const {
T Z = *std::max_element( _p.begin(), _p.end() );
return Z;
}
/// Returns minimum value of all entries
- T minVal() const {
+ T min() const {
T Z = *std::min_element( _p.begin(), _p.end() );
return Z;
}
+ /// Returns {arg,}maximum value
+ std::pair<size_t,T> argmax() const {
+ T max = _p[0];
+ size_t arg = 0;
+ for( size_t i = 1; i < size(); i++ ) {
+ if( _p[i] > max ) {
+ max = _p[i];
+ arg = i;
+ }
+ }
+ return make_pair(arg,max);
+ }
+
/// Normalizes vector using the specified norm
T normalize( NormType norm=NORMPROB ) {
T Z = 0.0;
if( norm == NORMPROB )
- Z = totalSum();
+ Z = sum();
else if( norm == NORMLINF )
Z = maxAbs();
if( Z == 0.0 )
/// Returns a random index, according to the (normalized) distribution described by *this
size_t draw() {
- double x = rnd_uniform() * totalSum();
+ double x = rnd_uniform() * sum();
T s = 0;
for( size_t i = 0; i < size(); i++ ) {
s += _p[i];
#include <cassert>
#include <typeinfo>
#include <dai/exceptions.h>
+#include <dai/util.h>
namespace dai {
/// Sets a property
PropertySet & Set(const PropertyKey &key, const PropertyValue &val) { this->operator[](key) = val; return *this; }
+ /// Set properties according to those in newProps, overriding properties that already exist with new values
+ PropertySet & Set( const PropertySet &newProps ) {
+ const std::map<PropertyKey, PropertyValue> *m = &newProps;
+ foreach(value_type i, *m) {
+ Set(i.first, i.second);
+ }
+ return *this;
+ }
+
/// Gets a property, casted as ValueType
template<typename ValueType>
ValueType GetAs(const PropertyKey &key) const {
}
}
if( props.logdomain ) {
- prod -= prod.maxVal();
+ prod -= prod.max();
prod.takeExp();
}
calcBeliefV( i, p );
if( props.logdomain ) {
- p -= p.maxVal();
+ p -= p.max();
p.takeExp();
}
calcBeliefF( I, prod );
if( props.logdomain ) {
- prod -= prod.maxVal();
+ prod -= prod.max();
prod.takeExp();
}
prod.normalize();
for( size_t I = 0; I < nrFactors(); I++ )
P *= factor(I);
- Real Z = P.totalSum();
+ Real Z = P.sum();
_logZ = std::log(Z);
for( size_t i = 0; i < nrVars(); i++ )
_beliefsV[i] = P.marginal(var(i));
Real HAK::logZ() const {
- Real sum = 0.0;
+ Real s = 0.0;
for( size_t beta = 0; beta < nrIRs(); beta++ )
- sum += IR(beta).c() * Qb(beta).entropy();
+ s += IR(beta).c() * Qb(beta).entropy();
for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
- sum += OR(alpha).c() * Qa(alpha).entropy();
- sum += (OR(alpha).log(true) * Qa(alpha)).totalSum();
+ s += OR(alpha).c() * Qa(alpha).entropy();
+ s += (OR(alpha).log(true) * Qa(alpha)).sum();
}
- return sum;
+ return s;
}
Real JTree::logZ() const {
- Real sum = 0.0;
+ Real s = 0.0;
for( size_t beta = 0; beta < nrIRs(); beta++ )
- sum += IR(beta).c() * Qb[beta].entropy();
+ s += IR(beta).c() * Qb[beta].entropy();
for( size_t alpha = 0; alpha < nrORs(); alpha++ ) {
- sum += OR(alpha).c() * Qa[alpha].entropy();
- sum += (OR(alpha).log(true) * Qa[alpha]).totalSum();
+ s += OR(alpha).c() * Qa[alpha].entropy();
+ s += (OR(alpha).log(true) * Qa[alpha]).sum();
}
- return sum;
+ return s;
}
Real MF::logZ() const {
- Real sum = 0.0;
+ Real s = 0.0;
for(size_t i=0; i < nrVars(); i++ )
- sum -= beliefV(i).entropy();
+ s -= beliefV(i).entropy();
for(size_t I=0; I < nrFactors(); I++ ) {
Factor henk;
foreach( const Neighbor &j, nbF(I) ) // for all j in I
Factor piet;
piet = factor(I).log(true);
piet *= henk;
- sum -= piet.totalSum();
+ s -= piet.sum();
}
- return -sum;
+ return -s;
}
// Normalize Qa and Qb
_logZ = 0.0;
for( size_t alpha = 0; alpha < _Qa.size(); alpha++ ) {
- _logZ += log(Qa[_a[alpha]].totalSum());
+ _logZ += log(Qa[_a[alpha]].sum());
Qa[_a[alpha]].normalize();
}
for( size_t beta = 0; beta < _Qb.size(); beta++ ) {
- _logZ -= log(Qb[_b[beta]].totalSum());
+ _logZ -= log(Qb[_b[beta]].sum());
Qb[_b[beta]].normalize();
}
}
double TreeEP::TreeEPSubTree::logZ( const std::vector<Factor> &Qa, const std::vector<Factor> &Qb ) const {
- double sum = 0.0;
+ double s = 0.0;
for( size_t alpha = 0; alpha < _Qa.size(); alpha++ )
- sum += (Qa[_a[alpha]] * _Qa[alpha].log(true)).totalSum();
+ s += (Qa[_a[alpha]] * _Qa[alpha].log(true)).sum();
for( size_t beta = 0; beta < _Qb.size(); beta++ )
- sum -= (Qb[_b[beta]] * _Qb[beta].log(true)).totalSum();
- return sum + _logZ;
+ s -= (Qb[_b[beta]] * _Qb[beta].log(true)).sum();
+ return s + _logZ;
}
Real TreeEP::logZ() const {
- double sum = 0.0;
+ double s = 0.0;
// entropy of the tree
for( size_t beta = 0; beta < nrIRs(); beta++ )
- sum -= Qb[beta].entropy();
+ s -= Qb[beta].entropy();
for( size_t alpha = 0; alpha < nrORs(); alpha++ )
- sum += Qa[alpha].entropy();
+ s += Qa[alpha].entropy();
// energy of the on-tree factors
for( size_t alpha = 0; alpha < nrORs(); alpha++ )
- sum += (OR(alpha).log(true) * Qa[alpha]).totalSum();
+ s += (OR(alpha).log(true) * Qa[alpha]).sum();
// energy of the off-tree factors
for( size_t I = 0; I < nrFactors(); I++ )
if( offtree(I) )
- sum += (_Q.find(I))->second.logZ( Qa, Qb );
+ s += (_Q.find(I))->second.logZ( Qa, Qb );
- return sum;
+ return s;
}
LCBP_PAIR2CAV_NONE: LC[cavity=PAIR2,reinit=0,updates=SEQFIX,maxiter=0,cavainame=BP,cavaiopts=[updates=SEQMAX,tol=1e-9,maxiter=10000,logdomain=0],tol=1e-9,verbose=0]
LCBP_UNICAV_SEQFIX: LC[cavity=UNIFORM,updates=SEQFIX,maxiter=10000,tol=1e-9,verbose=0,cavaiopts=[],cavainame=NONE]
LCBP_UNICAV_SEQRND: LC[cavity=UNIFORM,updates=SEQRND,maxiter=10000,tol=1e-9,verbose=0,cavaiopts=[],cavainame=NONE]
+
LCTREEEP: LC[cavity=FULL,reinit=1,updates=SEQFIX,maxiter=10000,cavainame=TREEEP,cavaiopts=[type=ORG,tol=1e-9,maxiter=10000,verbose=0],tol=1e-9,verbose=0]
LCMF: LC[cavity=FULL,reinit=1,updates=SEQFIX,maxiter=10000,cavainame=MF,cavaiopts=[tol=1e-9,maxiter=10000,verbose=0],tol=1e-9,verbose=0]
+LCBP: LCBP_FULLCAVin_SEQRND
+
+# --- GIBBS -------------------
+
+GIBBS: GIBBS[iters=1000,verbose=0]
+GIBBS_1e1: GIBBS[iters=10]
+GIBBS_1e2: GIBBS[iters=100]
+GIBBS_1e3: GIBBS[iters=1000]
+GIBBS_1e4: GIBBS[iters=10000]
+GIBBS_1e5: GIBBS[iters=100000]
+GIBBS_1e6: GIBBS[iters=1000000]
+GIBBS_1e7: GIBBS[iters=10000000]
+GIBBS_1e8: GIBBS[iters=100000000]
+GIBBS_1e9: GIBBS[iters=1000000000]
delete obj;
}
- string identify() {
+ string identify() const {
if( obj != NULL )
return obj->identify();
else
};
-pair<string, PropertySet> parseMethod( const string &_s, const map<string,string> & aliases ) {
- // s = first part of _s, until '['
- string::size_type pos = _s.find_first_of('[');
- string s;
- if( pos == string::npos )
- s = _s;
- else
- s = _s.substr(0,pos);
-
- // if the first part is an alias, substitute
- if( aliases.find(s) != aliases.end() )
- s = aliases.find(s)->second;
+pair<string, PropertySet> parseMethodRaw( const string &s ) {
+ string::size_type pos = s.find_first_of('[');
+ string name;
+ PropertySet opts;
+ if( pos == string::npos ) {
+ name = s;
+ } else {
+ name = s.substr(0,pos);
- // attach second part, merging properties if necessary
- if( pos != string::npos ) {
- if( s.at(s.length()-1) == ']' ) {
- s = s.erase(s.length()-1,1) + ',' + _s.substr(pos+1);
- } else
- s = s + _s.substr(pos);
+ stringstream ss;
+ ss << s.substr(pos,s.length());
+ ss >> opts;
}
+ return make_pair(name,opts);
+}
- pair<string, PropertySet> result;
- string & name = result.first;
- PropertySet & opts = result.second;
- pos = s.find_first_of('[');
- if( pos == string::npos )
- throw "Malformed method";
- name = s.substr( 0, pos );
+pair<string, PropertySet> parseMethod( const string &_s, const map<string,string> & aliases ) {
+ // break string into method[properties]
+ pair<string,PropertySet> ps = parseMethodRaw(_s);
+ bool looped = false;
+
+ // as long as 'method' is an alias, update:
+ while( aliases.find(ps.first) != aliases.end() && !looped ) {
+ string astr = aliases.find(ps.first)->second;
+ pair<string,PropertySet> aps = parseMethodRaw(astr);
+ if( aps.first == ps.first )
+ looped = true;
+ // override aps properties by ps properties
+ aps.second.Set( ps.second );
+ // replace ps by aps
+ ps = aps;
+ // repeat until method name == alias name ('looped'), or
+ // there is no longer an alias 'method'
+ }
+
+ // check whether name is valid
size_t n = 0;
for( ; strlen( DAINames[n] ) != 0; n++ )
- if( name == DAINames[n] )
+ if( ps.first == DAINames[n] )
break;
- if( strlen( DAINames[n] ) == 0 && (name != "LDPC") )
- DAI_THROW(UNKNOWN_DAI_ALGORITHM);
+ if( strlen( DAINames[n] ) == 0 && (ps.first != "LDPC") )
+ throw string("Unknown DAI algorithm \"") + ps.first + string("\" in \"") + _s + string("\"");
- stringstream ss;
- ss << s.substr(pos,s.length());
- ss >> opts;
-
- return result;
+ return ps;
}