-- Hoogle documentation, generated by Haddock -- See Hoogle, http://www.haskell.org/hoogle/ -- | Deep Learning in Haskell -- -- Implements type-safe deep neural networks @package deeplearning-hs @version 0.1.0.2 module DeepLearning.ConvNet -- | Activation matrix type Vol sh = Array U sh Double -- | Delayed activation matrix type DVol sh = Array D sh Double -- | Label for supervised learning type Label = Int -- | Layer reprsents a layer that can pass activations forward. -- TopLayer and InnerLayer are derived layers that can be -- backpropagated through. class (Shape sh, Shape sh') => Layer a sh sh' | a -> sh, a -> sh' -- | InnerLayer represents an inner layer of a neural network that -- can accept backpropagation input from higher layers class (Layer a sh sh', Shape sh, Shape sh') => InnerLayer a sh sh' | a -> sh, a -> sh' -- | TopLayer is a top level layer that can initialize a -- backpropagation pass. class Layer a DIM1 DIM1 => TopLayer a -- | SoftMaxLayer computes the softmax activation function. data SoftMaxLayer SoftMaxLayer :: SoftMaxLayer -- | FullyConnectedLayer represents a fully-connected input layer data FullyConnectedLayer sh FullyConnectedLayer :: Vol (sh :. Int) -> Vol DIM1 -> FullyConnectedLayer sh _weights :: FullyConnectedLayer sh -> Vol (sh :. Int) _bias :: FullyConnectedLayer sh -> Vol DIM1 -- | >-> composes two forward activation functions (>->) :: (Monad m, Shape sh, Shape sh', Shape sh'') => Forward m sh sh' -> Forward m sh' sh'' -> Forward m sh sh'' -- | The Forward function represents a single forward pass through a -- layer. type Forward m sh sh' = Vol sh -> WriterT [Vector Double] m (DVol sh') -- | withActivations computes the output activation, along with the -- intermediate activations withActivations :: Forward m sh sh' -> Vol sh -> m (DVol sh', [Vector Double]) -- | FlowNetwork builds a network of the form -- --
-- Input Layer Output Softmax -- +--+ -- | | Inner Layers +--+ +--+ -- | | | | | | -- | | +-+ +-+ +-+ | | | | -- | +---+ +---+ +--+ +--+ +---> | -- | | +-+ +-+ +-+ | | | | -- | | | | | | -- | | +--+ +--+ -- +--+ --flowNetwork :: (Monad m, Shape sh) => sh -> Int -> Int -> Int -> Forward m sh DIM1 -- | net1 constructs a single-layer fully connected perceptron with -- softmax output. net1 :: (Monad m, InnerLayer a sh DIM1, TopLayer a1) => a -> a1 -> Forward m sh DIM1 -- | net1 constructs a two-layer fully connected MLP with softmax -- output. net2 :: (Monad m, InnerLayer a sh sh', InnerLayer a1 sh' DIM1, TopLayer a2) => a -> a1 -> a2 -> Forward m sh DIM1 -- | newFC constructs a new fully connected layer newFC :: Shape sh => sh -> Int -> FullyConnectedLayer sh instance Shape sh => InnerLayer (FullyConnectedLayer sh) sh DIM1 instance Shape sh => Layer (FullyConnectedLayer sh) sh DIM1 instance TopLayer SoftMaxLayer instance Layer SoftMaxLayer DIM1 DIM1 module DeepLearning.Util -- | Sample 3x3 matrix used for demonstrations and tests testShape :: (Z :. Int) :. Int -- | Random 3x3 matrix testInput :: Shape sh => sh -> Array U sh Double -- | Random single-layer network testNet :: (Monad m, Shape sh) => sh -> Int -> Forward m sh DIM1