make doc
* to build the documentation. If the documentation is not clear enough, feel free * to send me an email (or even better, to improve the documentation and send a patch!). + * The documentation can also be browsed online at http://www.libdai.org. */ @@ -277,6 +317,140 @@ */ +/** \page inference Graphical models and approximate inference + * + * \section inference-graphicalmodels Graphical models + * + * Commonly used graphical models are Bayesian networks and Markov random fields. + * In libDAI, both types of graphical models are represented by a slightly more + * general type of graphical model: a factor graph [\ref KFL01]. + * + * \dot + * digraph bayesnet { + * size="1,1"; + * x0 [label="0"]; + * x1 [label="1"]; + * x2 [label="2"]; + * x3 [label="3"]; + * x4 [label="4"]; + * x0 -> x1; + * x0 -> x2; + * x1 -> x3; + * x1 -> x4; + * x2 -> x4; + * } + * \enddot + * + * \f[ P(\mathbf{x}) = \prod_{i\in\mathcal{V}} P(x_i \,|\, x_{\mathrm{pa}(i)}) \f] + * where \f$\mathrm{pa}(i)\f$ are the parents of node \a i in a DAG. + * + * \dot + * graph mrf { + * size="1.5,1.5"; + * x0 [label="0"]; + * x1 [label="1"]; + * x2 [label="2"]; + * x3 [label="3"]; + * x4 [label="4"]; + * x0 -- x1; + * x0 -- x2; + * x1 -- x2; + * x1 -- x3; + * x1 -- x4; + * x2 -- x4; + * } + * \enddot + * + * \f[ P(\mathbf{x}) = \frac{1}{Z} \prod_{C\in\mathcal{C}} \psi_C(x_C) \f] + * where \f$\mathcal{C} \f$ are the cliques of an undirected graph, + * \f$\psi_C(x_C) \f$ are "potentials" or "compatibility functions", and + * \f$Z \f$ is the partition sum which properly normalizes the probability + * distribution. + * + * \dot + * graph factorgraph { + * size="1.8,1"; + * x0 [label="0"]; + * x1 [label="1"]; + * x2 [label="2"]; + * x3 [label="3"]; + * x4 [label="4"]; + * f01 [shape="box",label=""]; + * f02 [shape="box",label=""]; + * f13 [shape="box",label=""]; + * f124 [shape="box",label=""]; + * x0 -- f01; + * x1 -- f01; + * x0 -- f02; + * x2 -- f02; + * x1 -- f13; + * x3 -- f13; + * x1 -- f124; + * x2 -- f124; + * x4 -- f124; + * } + * \enddot + * + * \f[ P(\mathbf{x}) = \frac{1}{Z} \prod_{I\in \mathcal{F}} f_I(x_I) \f] + * where \f$\mathcal{F} \f$ are the factor nodes of a factor graph (a + * bipartite graph consisting of variable nodes and factor nodes), + * \f$f_I(x_I) \f$ are the factors, and \f$Z \f$ is the partition sum + * which properly normalizes the probability distribution. + * + * Looking at the expressions for the joint probability distributions, + * it is obvious that Bayesian networks and Markov random fields can + * both be easily represented as factor graphs. Factor graphs most + * naturally express the factorization structure of a probability + * distribution, and hence are a convenient representation for approximate + * inference algorithms, which all try to exploit this factorization. + * This is why libDAI uses a factor graph as representation of a + * graphical model, implemented in the dai::FactorGraph class. + * + * \section inference-inference Inference tasks + * + * Given a factor graph, specified by the variable nodes \f$\{x_i\}_{i\in\mathcal{V}}\f$ + * the factor nodes \f$\mathcal{F} \f$, the graph structure, and the factors + * \f$\{f_I(x_I)\}_{I\in\mathcal{F}}\f$, the following tasks are important: + * + * - Calculating the partition sum: + * \f[ Z = \sum_{\mathbf{x}_{\mathcal{V}}} \prod_{I \in \mathcal{F}} f_I(x_I) \f] + * - Calculating the marginal distribution of a subset of variables + * \f$\{x_i\}_{i\in A}\f$: + * \f[ P(\mathbf{x}_{A}) = \frac{1}{Z} \sum_{\mathbf{x}_{\mathcal{V}\setminus A}} \prod_{I \in \mathcal{F}} f_I(x_I) \f] + * - Calculating the MAP state which has the maximum probability mass: + * \f[ \mathrm{argmax}_{\mathbf{x}}\,\prod_{I\in\mathcal{F}} f_I(x_I) \f] + * + * libDAI offers several inference algorithms, which solve (a subset of) these tasks either + * approximately or exactly, for factor graphs with discrete variables. The following + * algorithms are implemented: + * + * Exact inference: + * - Brute force enumeration: dai::ExactInf + * - Junction-tree method: dai::JTree + * + * Approximate inference: + * - Mean Field: dai::MF + * - (Loopy) Belief Propagation: dai::BP [\ref KFL01] + * - Tree Expectation Propagation: dai::TreeEP [\ref MiQ04] + * - Generalized Belief Propagation: dai::HAK [\ref YFW05] + * - Double-loop GBP: dai::HAK [\ref HAK03] + * - Loop Corrected Belief Propagation: dai::MR [\ref MoR05] and dai::LC [\ref MoK07] + * - Gibbs sampling: dai::Gibbs + * - Conditioned BP: dai::CBP [\ref EaG09] + * + * Not all inference tasks are implemented by each method: calculating MAP states + * is only possible with dai::JTree and dai::BP, calculating partition sums is + * not possible with dai::MR, dai::LC and dai::Gibbs. + * + * \section inference-learning Parameter learning + * + * In addition, libDAI supports parameter learning of conditional probability + * tables by Expectation Maximization (or Maximum Likelihood, if there is no + * missing data). This is implemented in dai::EMAlg. + * + */ + + /** \page fileformats libDAI file formats * * \section fileformats-factorgraph Factor graph (.fg) file format @@ -582,5 +756,3 @@ * this is compile-time polymorphism, whereas tests/testdai needs runtime polymorphism. * Therefore this is probably a bad idea. */ - - diff --git a/include/dai/index.h b/include/dai/index.h index 2283747..2569e54 100644 --- a/include/dai/index.h +++ b/include/dai/index.h @@ -326,6 +326,11 @@ class multifor { * \note A State is very similar to a \link multifor \endlink, but tailored for Var 's and VarSet 's. * * \see VarSet::calcState(), VarSet::calcStates() + * + * \idea Make the State class a more prominent part of libDAI + * (and document it clearly, explaining the concept of state); + * add more optimized variants of the State class like IndexFor + * (e.g. for TFactor<>::slice()). */ class State { private: -- 2.20.1