Safe Haskell | Safe-Infered |
---|

Algorithms for factor elimination

- moralGraph :: (NamedGraph g, FoldableWithVertex g, DirectedGraph g) => g () b -> UndirectedSG () b
- nodeComparisonForTriangulation :: (UndirectedGraph g, Factor f) => g a f -> Vertex -> Vertex -> Ordering
- numberOfAddedEdges :: UndirectedGraph g => g a b -> Vertex -> Integer
- weight :: (UndirectedGraph g, Factor f) => g a f -> Vertex -> Integer
- weightedEdges :: (UndirectedGraph g, Factor f) => g a f -> Vertex -> Integer
- triangulate :: Graph g => (Vertex -> Vertex -> Ordering) -> g () b -> [VertexCluster]
- createClusterGraph :: (UndirectedGraph g, Factor f, Graph g') => g' e f -> [VertexCluster] -> g Int Cluster
- data Cluster
- createJunctionTree :: (DirectedGraph g, FoldableWithVertex g, NamedGraph g, Factor f, IsBucketItem f, Show f) => (UndirectedSG () f -> Vertex -> Vertex -> Ordering) -> BayesianNetwork g f -> JunctionTree f
- createUninitializedJunctionTree :: (DirectedGraph g, FoldableWithVertex g, NamedGraph g, Factor f, Show f) => (UndirectedSG () f -> Vertex -> Vertex -> Ordering) -> g () f -> JunctionTree f
- type JunctionTree f = JTree Cluster f
- displayTreeValues :: (Show f, Show c) => JTree c f -> IO ()
- collect :: (Ord c, Message a c) => JTree c a -> JTree c a
- distribute :: (Ord c, Message a c) => JTree c a -> JTree c a
- posterior :: (BayesianDiscreteVariable dv, Factor f, IsBucketItem f) => JunctionTree f -> [dv] -> Maybe f
- changeEvidence :: (IsCluster c, Ord c, Factor f, Message f c, Show c, Show f) => [DVI] -> JTree c f -> JTree c f
- junctionTreeProperty_prop :: DirectedSG () CPT -> Property
- junctionTreeAllClusters_prop :: DirectedSG () CPT -> Property
- data VertexCluster
- junctionTreeProperty :: JTree Cluster CPT -> [Cluster] -> Cluster -> Bool
- maximumSpanningTree :: (UndirectedGraph g, IsCluster c, Factor f, Ord c, Show c, Show f) => g Int c -> JTree c f
- fromVertexCluster :: VertexCluster -> Set Vertex
- triangulatedebug :: Graph g => (Vertex -> Vertex -> Ordering) -> g () b -> ([VertexCluster], [g () b])

# Moral graph

moralGraph :: (NamedGraph g, FoldableWithVertex g, DirectedGraph g) => g () b -> UndirectedSG () bSource

For the junction tree construction, only the vertices are needed during the intermediate steps. So, the moral graph is returned without any vertex data.

# Triangulation

nodeComparisonForTriangulation :: (UndirectedGraph g, Factor f) => g a f -> Vertex -> Vertex -> OrderingSource

Node selection comparison function used for triangulating the graph

numberOfAddedEdges :: UndirectedGraph g => g a b -> Vertex -> IntegerSource

Number of edges added when connecting all neighbors

weightedEdges :: (UndirectedGraph g, Factor f) => g a f -> Vertex -> IntegerSource

:: Graph g | |

=> (Vertex -> Vertex -> Ordering) | Criterion function for triangulation |

-> g () b | |

-> [VertexCluster] | Returns the clusters and the triangulated graph |

Triangulate a graph using a cost function The result is the triangulated graph and the list of clusters which may not be maximal.

# Junction tree

createClusterGraph :: (UndirectedGraph g, Factor f, Graph g') => g' e f -> [VertexCluster] -> g Int ClusterSource

Create the cluster graph

Cluster of discrete variables.
Discrete variables instead of vertices are needed because the
factor are using `DV`

and we need to find
which factors must be contained in a given cluster.

:: (DirectedGraph g, FoldableWithVertex g, NamedGraph g, Factor f, IsBucketItem f, Show f) | |

=> (UndirectedSG () f -> Vertex -> Vertex -> Ordering) | Weight function on the moral graph |

-> BayesianNetwork g f | Input directed graph |

-> JunctionTree f | Junction tree |

Create a function tree

createUninitializedJunctionTreeSource

:: (DirectedGraph g, FoldableWithVertex g, NamedGraph g, Factor f, Show f) | |

=> (UndirectedSG () f -> Vertex -> Vertex -> Ordering) | Weight function on the moral graph |

-> g () f | Input directed graph |

-> JunctionTree f | Junction tree |

Create a junction tree with only the clusters and no factors

type JunctionTree f = JTree Cluster fSource

# Shenoy-Shafer message passing

collect :: (Ord c, Message a c) => JTree c a -> JTree c aSource

Collect message taking into account that the tree depth may be different for different leaves.

distribute :: (Ord c, Message a c) => JTree c a -> JTree c aSource

posterior :: (BayesianDiscreteVariable dv, Factor f, IsBucketItem f) => JunctionTree f -> [dv] -> Maybe fSource

Compute the marginal posterior (if some evidence is set on the junction tree) otherwise compute just the marginal prior. The set of variables must be included inside a cluster for thr algorithm to work. So, most of the cases, it will be used to compute the posterior of just one variable.

# Evidence

:: (IsCluster c, Ord c, Factor f, Message f c, Show c, Show f) | |

=> [DVI] | Evidence |

-> JTree c f | |

-> JTree c f |

Change evidence in the network

# Test

data VertexCluster Source

A cluster containing only the vertices and not yet the factors

# For debug

maximumSpanningTree :: (UndirectedGraph g, IsCluster c, Factor f, Ord c, Show c, Show f) => g Int c -> JTree c fSource

Implementing the Prim's algorithm for minimum spanning tree