Replaced Name members by name() virtual functions (fixing a bug in matlab/dai.cpp)
[libdai.git] / src / trwbp.cpp
1 /* This file is part of libDAI - http://www.libdai.org/
2 *
3 * libDAI is licensed under the terms of the GNU General Public License version
4 * 2, or (at your option) any later version. libDAI is distributed without any
5 * warranty. See the file COPYING for more details.
6 *
7 * Copyright (C) 2010 Joris Mooij [joris dot mooij at libdai dot org]
8 */
9
10
11 #include <dai/trwbp.h>
12
13
14 #define DAI_TRWBP_FAST 1
15
16
17 namespace dai {
18
19
20 using namespace std;
21
22
23 void TRWBP::setProperties( const PropertySet &opts ) {
24 BP::setProperties( opts );
25
26 if( opts.hasKey("nrtrees") )
27 nrtrees = opts.getStringAs<size_t>("nrtrees");
28 else
29 nrtrees = 0;
30 }
31
32
33 PropertySet TRWBP::getProperties() const {
34 PropertySet opts = BP::getProperties();
35 opts.set( "nrtrees", nrtrees );
36 return opts;
37 }
38
39
40 string TRWBP::printProperties() const {
41 stringstream s( stringstream::out );
42 string sbp = BP::printProperties();
43 s << sbp.substr( 0, sbp.size() - 1 );
44 s << ",";
45 s << "nrtrees=" << nrtrees << "]";
46 return s.str();
47 }
48
49
50 // This code has been copied from bp.cpp, except where comments indicate TRWBP-specific behaviour
51 Real TRWBP::logZ() const {
52 Real sum = 0.0;
53 for( size_t I = 0; I < nrFactors(); I++ ) {
54 sum += (beliefF(I) * factor(I).log(true)).sum(); // TRWBP/FBP
55 sum += Weight(I) * beliefF(I).entropy(); // TRWBP/FBP
56 }
57 for( size_t i = 0; i < nrVars(); ++i ) {
58 Real c_i = 0.0;
59 foreach( const Neighbor &I, nbV(i) )
60 c_i += Weight(I);
61 sum += (1.0 - c_i) * beliefV(i).entropy(); // TRWBP/FBP
62 }
63 return sum;
64 }
65
66
67 // This code has been copied from bp.cpp, except where comments indicate TRWBP-specific behaviour
68 Prob TRWBP::calcIncomingMessageProduct( size_t I, bool without_i, size_t i ) const {
69 Real c_I = Weight(I); // TRWBP: c_I
70
71 Factor Fprod( factor(I) );
72 Prob &prod = Fprod.p();
73 if( props.logdomain ) {
74 prod.takeLog();
75 prod /= c_I; // TRWBP
76 } else
77 prod ^= (1.0 / c_I); // TRWBP
78
79 // Calculate product of incoming messages and factor I
80 foreach( const Neighbor &j, nbF(I) )
81 if( !(without_i && (j == i)) ) {
82 const Var &v_j = var(j);
83 // prod_j will be the product of messages coming into j
84 // TRWBP: corresponds to messages n_jI
85 Prob prod_j( v_j.states(), props.logdomain ? 0.0 : 1.0 );
86 foreach( const Neighbor &J, nbV(j) ) {
87 Real c_J = Weight(J); // TRWBP
88 if( J != I ) { // for all J in nb(j) \ I
89 if( props.logdomain )
90 prod_j += message( j, J.iter ) * c_J;
91 else
92 prod_j *= message( j, J.iter ) ^ c_J;
93 } else { // TRWBP: multiply by m_Ij^(c_I-1)
94 if( props.logdomain )
95 prod_j += message( j, J.iter ) * (c_J - 1.0);
96 else
97 prod_j *= message( j, J.iter ) ^ (c_J - 1.0);
98 }
99 }
100
101 // multiply prod with prod_j
102 if( !DAI_TRWBP_FAST ) {
103 // UNOPTIMIZED (SIMPLE TO READ, BUT SLOW) VERSION
104 if( props.logdomain )
105 Fprod += Factor( v_j, prod_j );
106 else
107 Fprod *= Factor( v_j, prod_j );
108 } else {
109 // OPTIMIZED VERSION
110 size_t _I = j.dual;
111 // ind is the precalculated IndexFor(j,I) i.e. to x_I == k corresponds x_j == ind[k]
112 const ind_t &ind = index(j, _I);
113
114 for( size_t r = 0; r < prod.size(); ++r )
115 if( props.logdomain )
116 prod.set( r, prod[r] + prod_j[ind[r]] );
117 else
118 prod.set( r, prod[r] * prod_j[ind[r]] );
119 }
120 }
121
122 return prod;
123 }
124
125
126 // This code has been copied from bp.cpp, except where comments indicate TRWBP-specific behaviour
127 void TRWBP::calcBeliefV( size_t i, Prob &p ) const {
128 p = Prob( var(i).states(), props.logdomain ? 0.0 : 1.0 );
129 foreach( const Neighbor &I, nbV(i) ) {
130 Real c_I = Weight(I);
131 if( props.logdomain )
132 p += newMessage( i, I.iter ) * c_I;
133 else
134 p *= newMessage( i, I.iter ) ^ c_I;
135 }
136 }
137
138
139 void TRWBP::construct() {
140 BP::construct();
141 _weight.resize( nrFactors(), 1.0 );
142 sampleWeights( nrtrees );
143 if( props.verbose >= 2 )
144 cerr << "Weights: " << _weight << endl;
145 }
146
147
148 void TRWBP::addTreeToWeights( const RootedTree &tree ) {
149 for( RootedTree::const_iterator e = tree.begin(); e != tree.end(); e++ ) {
150 VarSet ij( var(e->first), var(e->second) );
151 size_t I = findFactor( ij );
152 _weight[I] += 1.0;
153 }
154 }
155
156
157 void TRWBP::sampleWeights( size_t nrTrees ) {
158 if( !nrTrees )
159 return;
160
161 // initialize weights to zero
162 fill( _weight.begin(), _weight.end(), 0.0 );
163
164 // construct Markov adjacency graph, with edges weighted with
165 // random weights drawn from the uniform distribution on the interval [0,1]
166 WeightedGraph<Real> wg;
167 for( size_t i = 0; i < nrVars(); ++i ) {
168 const Var &v_i = var(i);
169 VarSet di = delta(i);
170 for( VarSet::const_iterator j = di.begin(); j != di.end(); j++ )
171 if( v_i < *j )
172 wg[UEdge(i,findVar(*j))] = rnd_uniform();
173 }
174
175 // now repeatedly change the random weights, find the minimal spanning tree, and add it to the weights
176 for( size_t nr = 0; nr < nrTrees; nr++ ) {
177 // find minimal spanning tree
178 RootedTree randTree = MinSpanningTree( wg, true );
179 // add it to the weights
180 addTreeToWeights( randTree );
181 // resample weights of the graph
182 for( WeightedGraph<Real>::iterator e = wg.begin(); e != wg.end(); e++ )
183 e->second = rnd_uniform();
184 }
185
186 // normalize the weights and set the single-variable weights to 1.0
187 for( size_t I = 0; I < nrFactors(); I++ ) {
188 size_t sizeI = factor(I).vars().size();
189 if( sizeI == 1 )
190 _weight[I] = 1.0;
191 else if( sizeI == 2 )
192 _weight[I] /= nrTrees;
193 else
194 DAI_THROW(NOT_IMPLEMENTED);
195 }
196 }
197
198
199 } // end of namespace dai