-- Hoogle documentation, generated by Haddock
-- See Hoogle, http://www.haskell.org/hoogle/
-- | Practical Deep Learning in Haskell
--
-- Grenade is a composable, dependently typed, practical, and fast
-- recurrent neural network library for precise specifications and
-- complex deep neural networks in Haskell.
--
-- Grenade provides an API for composing layers of a neural network into
-- a sequence parallel graph in a type safe manner; running networks with
-- reverse automatic differentiation to calculate their gradients; and
-- applying gradient decent for learning.
--
-- Documentation and examples are available on github
-- https://github.com/HuwCampbell/grenade.
@package grenade
@version 0.1.0
module Grenade.Layers.Internal.Update
decendMatrix :: (KnownNat rows, KnownNat columns) => Double -> Double -> Double -> L rows columns -> L rows columns -> L rows columns -> (L rows columns, L rows columns)
decendVector :: (KnownNat r) => Double -> Double -> Double -> R r -> R r -> R r -> (R r, R r)
module Grenade.Layers.Internal.Pooling
poolForward :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Matrix Double -> Matrix Double
poolBackward :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Matrix Double -> Matrix Double -> Matrix Double
module Grenade.Layers.Internal.Pad
pad :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Matrix Double -> Matrix Double
crop :: Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Int -> Matrix Double -> Matrix Double
module Grenade.Layers.Internal.Convolution
im2col :: Int -> Int -> Int -> Int -> Matrix Double -> Matrix Double
col2im :: Int -> Int -> Int -> Int -> Int -> Int -> Matrix Double -> Matrix Double
col2vid :: Int -> Int -> Int -> Int -> Int -> Int -> Matrix Double -> Matrix Double
vid2col :: Int -> Int -> Int -> Int -> Int -> Int -> Matrix Double -> Matrix Double
module Grenade.Core.Shape
-- | The current shapes we accept. at the moment this is just one, two, and
-- three dimensional Vectors/Matricies.
--
-- These are only used with DataKinds, as Kind Shape, with Types
-- 'D1, 'D2, 'D3.
data Shape
-- | One dimensional vector
D1 :: Nat -> Shape
-- | Two dimensional matrix. Row, Column.
D2 :: Nat -> Nat -> Shape
-- | Three dimensional matrix. Row, Column, Channels.
D3 :: Nat -> Nat -> Nat -> Shape
-- | Concrete data structures for a Shape.
--
-- All shapes are held in contiguous memory. 3D is held in a matrix
-- (usually row oriented) which has height depth * rows.
data S (n :: Shape)
[S1D] :: (KnownNat len) => R len -> S (D1 len)
[S2D] :: (KnownNat rows, KnownNat columns) => L rows columns -> S (D2 rows columns)
[S3D] :: (KnownNat rows, KnownNat columns, KnownNat depth, KnownNat (rows * depth)) => L (rows * depth) columns -> S (D3 rows columns depth)
-- | The singleton kind-indexed data family.
-- | Generate random data of the desired shape
randomOfShape :: forall x m. (MonadRandom m, SingI x) => m (S x)
-- | Generate a shape from a Storable Vector.
--
-- Returns Nothing if the vector is of the wrong size.
fromStorable :: forall x. SingI x => Vector Double -> Maybe (S x)
instance GHC.Show.Show (Grenade.Core.Shape.S n)
instance GHC.TypeLits.KnownNat a => Data.Singletons.SingI ('Grenade.Core.Shape.D1 a)
instance (GHC.TypeLits.KnownNat a, GHC.TypeLits.KnownNat b) => Data.Singletons.SingI ('Grenade.Core.Shape.D2 a b)
instance (GHC.TypeLits.KnownNat a, GHC.TypeLits.KnownNat b, GHC.TypeLits.KnownNat c, GHC.TypeLits.KnownNat (a GHC.TypeLits.* c)) => Data.Singletons.SingI ('Grenade.Core.Shape.D3 a b c)
instance Data.Singletons.SingI x => GHC.Num.Num (Grenade.Core.Shape.S x)
instance Data.Singletons.SingI x => GHC.Real.Fractional (Grenade.Core.Shape.S x)
instance Data.Singletons.SingI x => GHC.Float.Floating (Grenade.Core.Shape.S x)
instance Control.DeepSeq.NFData (Grenade.Core.Shape.S x)
module Grenade.Utils.OneHot
-- | From an int which is hot, create a 1D Shape with one index hot (1)
-- with the rest 0. Rerurns Nothing if the hot number is larger than the
-- length of the vector.
oneHot :: forall n. (KnownNat n) => Int -> Maybe (S (D1 n))
-- | Create a one hot map from any enumerable. Returns a map, and the
-- ordered list for the reverse transformation
hotMap :: (Ord a, KnownNat n) => Proxy n -> [a] -> Maybe (Map a Int, Vector a)
-- | From a map and value, create a 1D Shape with one index hot (1) with
-- the rest 0. Rerurns Nothing if the hot number is larger than the
-- length of the vector or the map doesn't contain the value.
makeHot :: forall a n. (Ord a, KnownNat n) => Map a Int -> a -> Maybe (S (D1 n))
unHot :: forall a n. KnownNat n => Vector a -> S (D1 n) -> Maybe a
sample :: forall a n m. (KnownNat n, MonadRandom m) => Double -> Vector a -> S (D1 n) -> m a
module Grenade.Core.LearningParameters
-- | Learning parameters for stochastic gradient descent.
data LearningParameters
LearningParameters :: Double -> Double -> Double -> LearningParameters
[learningRate] :: LearningParameters -> Double
[learningMomentum] :: LearningParameters -> Double
[learningRegulariser] :: LearningParameters -> Double
instance GHC.Show.Show Grenade.Core.LearningParameters.LearningParameters
instance GHC.Classes.Eq Grenade.Core.LearningParameters.LearningParameters
-- | This module defines what a Layer is in a Grenade neural network.
--
-- There are two classes of interest: UpdateLayer and
-- Layer.
--
-- UpdateLayer is required for all types which are used as a layer
-- in a network. Having no shape information, this class is agnotostic to
-- the input and output data of the layer.
--
-- An instance of Layer on the other hand is required for usage in
-- a neural network, but also specifies the shapes of data that the
-- network can transform. Multiple instance of Layer are permitted
-- for a single type, to transform different shapes. The Reshape
-- layer for example can act as a flattening layer, and its inverse,
-- projecting a 1D shape up to 2 or 3 dimensions.
--
-- Instances of Layer should be as strict as possible, and not
-- emit runtime errors.
module Grenade.Core.Layer
-- | Class for a layer. All layers implement this, however, they don't need
-- to implement it for all shapes, only ones which are appropriate.
class UpdateLayer x => Layer x (i :: Shape) (o :: Shape) where type Tape x i o :: * where {
type family Tape x i o :: *;
}
-- | Used in training and scoring. Take the input from the previous layer,
-- and give the output from this layer.
runForwards :: Layer x i o => x -> S i -> (Tape x i o, S o)
-- | Back propagate a step. Takes the current layer, the input that the
-- layer gave from the input and the back propagated derivatives from the
-- layer above.
--
-- Returns the gradient layer and the derivatives to push back further.
runBackwards :: Layer x i o => x -> Tape x i o -> S o -> (Gradient x, S i)
-- | Class for updating a layer. All layers implement this, as it describes
-- how to create and update the layer.
class UpdateLayer x where type Gradient x :: * runUpdates rate = foldl' (runUpdate rate) where {
type family Gradient x :: *;
}
-- | Update a layer with its gradient and learning parameters
runUpdate :: UpdateLayer x => LearningParameters -> x -> Gradient x -> x
-- | Create a random layer, many layers will use pure
createRandom :: (UpdateLayer x, MonadRandom m) => m x
-- | Update a layer with many Gradients
runUpdates :: UpdateLayer x => LearningParameters -> x -> [Gradient x] -> x
-- | This module defines the core data types and functions for
-- non-recurrent neural networks.
module Grenade.Core.Network
-- | Type of a network.
--
-- The [*] type specifies the types of the layers.
--
-- The [Shape] type specifies the shapes of data passed between
-- the layers.
--
-- Can be considered to be a heterogeneous list of layers which are able
-- to transform the data shapes of the network.
data Network :: [*] -> [Shape] -> *
[NNil] :: SingI i => Network '[] '[i]
[:~>] :: (SingI i, SingI h, Layer x i h) => !x -> !(Network xs (h : hs)) -> Network (x : xs) (i : (h : hs))
-- | Gradient of a network.
--
-- Parameterised on the layers of the network.
data Gradients :: [*] -> *
[GNil] :: Gradients '[]
[:/>] :: UpdateLayer x => Gradient x -> Gradients xs -> Gradients (x : xs)
-- | Wegnert Tape of a network.
--
-- Parameterised on the layers and shapes of the network.
data Tapes :: [*] -> [Shape] -> *
[TNil] :: SingI i => Tapes '[] '[i]
[:\>] :: (SingI i, SingI h, Layer x i h) => !(Tape x i h) -> !(Tapes xs (h : hs)) -> Tapes (x : xs) (i : (h : hs))
-- | Running a network forwards with some input data.
--
-- This gives the output, and the Wengert tape required for back
-- propagation.
runNetwork :: forall layers shapes. Network layers shapes -> S (Head shapes) -> (Tapes layers shapes, S (Last shapes))
-- | Running a loss gradient back through the network.
--
-- This requires a Wengert tape, generated with the appropriate input for
-- the loss.
--
-- Gives the gradients for the layer, and the gradient across the input
-- (which may not be required).
runGradient :: forall layers shapes. Network layers shapes -> Tapes layers shapes -> S (Last shapes) -> (Gradients layers, S (Head shapes))
-- | Apply one step of stochastic gradient decent across the network.
applyUpdate :: LearningParameters -> Network layers shapes -> Gradients layers -> Network layers shapes
-- | Create a network with randomly initialised weights.
--
-- Calls to this function will not compile if the type of the neural
-- network is not sound.
randomNetwork :: (CreatableNetwork xs ss, MonadRandom m) => m (Network xs ss)
instance GHC.Show.Show (Grenade.Core.Network.Network '[] '[i])
instance (GHC.Show.Show x, GHC.Show.Show (Grenade.Core.Network.Network xs rs)) => GHC.Show.Show (Grenade.Core.Network.Network (x : xs) (i : rs))
instance Data.Singletons.SingI i => Grenade.Core.Network.CreatableNetwork '[] '[i]
instance (Data.Singletons.SingI i, Data.Singletons.SingI o, Grenade.Core.Layer.Layer x i o, Grenade.Core.Network.CreatableNetwork xs (o : rs)) => Grenade.Core.Network.CreatableNetwork (x : xs) (i : o : rs)
instance Data.Singletons.SingI i => Data.Serialize.Serialize (Grenade.Core.Network.Network '[] '[i])
instance (Data.Singletons.SingI i, Data.Singletons.SingI o, Grenade.Core.Layer.Layer x i o, Data.Serialize.Serialize x, Data.Serialize.Serialize (Grenade.Core.Network.Network xs (o : rs))) => Data.Serialize.Serialize (Grenade.Core.Network.Network (x : xs) (i : o : rs))
instance Grenade.Core.Network.CreatableNetwork sublayers subshapes => Grenade.Core.Layer.UpdateLayer (Grenade.Core.Network.Network sublayers subshapes)
instance (Grenade.Core.Network.CreatableNetwork sublayers subshapes, i ~ Data.Singletons.Prelude.List.Head subshapes, o ~ Data.Singletons.Prelude.List.Last subshapes) => Grenade.Core.Layer.Layer (Grenade.Core.Network.Network sublayers subshapes) i o
module Grenade.Core.Runner
-- | Update a network with new weights after training with an instance.
train :: SingI (Last shapes) => LearningParameters -> Network layers shapes -> S (Head shapes) -> S (Last shapes) -> Network layers shapes
-- | Perform reverse automatic differentiation on the network for the
-- current input and expected output.
--
-- Note: The loss function pushed backwards is appropriate for
-- both regression and classification as a squared loss or log-loss
-- respectively.
--
-- For other loss functions, use runNetwork and runGradient with the back
-- propagated gradient of your loss.
backPropagate :: SingI (Last shapes) => Network layers shapes -> S (Head shapes) -> S (Last shapes) -> Gradients layers
-- | Run the network with input and return the given output.
runNet :: Network layers shapes -> S (Head shapes) -> S (Last shapes)
module Grenade.Core
-- | This module provides the concatenation layer, which runs two chilld
-- layers in parallel and combines their outputs.
module Grenade.Layers.Concat
-- | A Concatentating Layer.
--
-- This layer shares it's input state between two sublayers, and
-- concatenates their output.
--
-- With Networks able to be Layers, this allows for very expressive
-- composition of complex Networks.
--
-- The Concat layer has a few instances, which allow one to flexibly
-- "bash" together the outputs.
--
-- Two 1D vectors, can go to a 2D shape with 2 rows if their lengths are
-- identical. Any 2 1D vectors can also become a longer 1D Vector.
--
-- 3D images become 3D images with more channels. The sizes must be the
-- same, one can use Pad and Crop layers to ensure this is the case.
data Concat :: Shape -> * -> Shape -> * -> *
[Concat] :: x -> y -> Concat m x n y
instance (GHC.Show.Show x, GHC.Show.Show y) => GHC.Show.Show (Grenade.Layers.Concat.Concat m x n y)
instance (Grenade.Core.Layer.UpdateLayer x, Grenade.Core.Layer.UpdateLayer y) => Grenade.Core.Layer.UpdateLayer (Grenade.Layers.Concat.Concat m x n y)
instance (Data.Singletons.SingI i, Grenade.Core.Layer.Layer x i ('Grenade.Core.Shape.D1 o), Grenade.Core.Layer.Layer y i ('Grenade.Core.Shape.D1 o)) => Grenade.Core.Layer.Layer (Grenade.Layers.Concat.Concat ('Grenade.Core.Shape.D1 o) x ('Grenade.Core.Shape.D1 o) y) i ('Grenade.Core.Shape.D2 2 o)
instance (Data.Singletons.SingI i, Grenade.Core.Layer.Layer x i ('Grenade.Core.Shape.D1 m), Grenade.Core.Layer.Layer y i ('Grenade.Core.Shape.D1 n), GHC.TypeLits.KnownNat o, GHC.TypeLits.KnownNat m, GHC.TypeLits.KnownNat n, o ~ (m GHC.TypeLits.+ n), n ~ (o GHC.TypeLits.- m), (m GHC.TypeLits.<=? o) ~ 'GHC.Types.True) => Grenade.Core.Layer.Layer (Grenade.Layers.Concat.Concat ('Grenade.Core.Shape.D1 m) x ('Grenade.Core.Shape.D1 n) y) i ('Grenade.Core.Shape.D1 o)
instance (Data.Singletons.SingI i, Grenade.Core.Layer.Layer x i ('Grenade.Core.Shape.D3 rows cols m), Grenade.Core.Layer.Layer y i ('Grenade.Core.Shape.D3 rows cols n), GHC.TypeLits.KnownNat (rows GHC.TypeLits.* n), GHC.TypeLits.KnownNat (rows GHC.TypeLits.* m), GHC.TypeLits.KnownNat (rows GHC.TypeLits.* o), GHC.TypeLits.KnownNat o, GHC.TypeLits.KnownNat m, GHC.TypeLits.KnownNat n, ((rows GHC.TypeLits.* m) GHC.TypeLits.+ (rows GHC.TypeLits.* n)) ~ (rows GHC.TypeLits.* o), ((rows GHC.TypeLits.* o) GHC.TypeLits.- (rows GHC.TypeLits.* m)) ~ (rows GHC.TypeLits.* n), ((rows GHC.TypeLits.* m) GHC.TypeLits.<=? (rows GHC.TypeLits.* o)) ~ 'GHC.Types.True) => Grenade.Core.Layer.Layer (Grenade.Layers.Concat.Concat ('Grenade.Core.Shape.D3 rows cols m) x ('Grenade.Core.Shape.D3 rows cols n) y) i ('Grenade.Core.Shape.D3 rows cols o)
instance (Data.Serialize.Serialize a, Data.Serialize.Serialize b) => Data.Serialize.Serialize (Grenade.Layers.Concat.Concat sa a sb b)
-- | This module provides the Convolution layer, which is critical in many
-- computer vision tasks.
module Grenade.Layers.Convolution
-- | A convolution layer for a neural network. This uses the im2col
-- convolution trick popularised by Caffe, which essentially turns the
-- many, many, many, many loop convolution into a single matrix
-- multiplication.
--
-- The convolution layer takes all of the kernels for the convolution,
-- which are flattened and then put into columns in the matrix.
--
-- The kernel size dictates which input and output sizes will "fit".
-- Fitting the equation: `out = (in - kernel) / stride + 1` for both
-- dimensions.
--
-- One probably shouldn't build their own layer, but rather use the
-- randomConvolution function.
data Convolution :: Nat -> Nat -> Nat -> Nat -> Nat -> Nat -> *
[Convolution] :: (KnownNat channels, KnownNat filters, KnownNat kernelRows, KnownNat kernelColumns, KnownNat strideRows, KnownNat strideColumns, KnownNat kernelFlattened, kernelFlattened ~ ((kernelRows * kernelColumns) * channels)) => !(L kernelFlattened filters) -> !(L kernelFlattened filters) -> Convolution channels filters kernelRows kernelColumns strideRows strideColumns
data Convolution' :: Nat -> Nat -> Nat -> Nat -> Nat -> Nat -> *
[Convolution'] :: (KnownNat channels, KnownNat filters, KnownNat kernelRows, KnownNat kernelColumns, KnownNat strideRows, KnownNat strideColumns, KnownNat kernelFlattened, kernelFlattened ~ ((kernelRows * kernelColumns) * channels)) => !(L kernelFlattened filters) -> Convolution' channels filters kernelRows kernelColumns strideRows strideColumns
randomConvolution :: (MonadRandom m, KnownNat channels, KnownNat filters, KnownNat kernelRows, KnownNat kernelColumns, KnownNat strideRows, KnownNat strideColumns, KnownNat kernelFlattened, kernelFlattened ~ ((kernelRows * kernelColumns) * channels)) => m (Convolution channels filters kernelRows kernelColumns strideRows strideColumns)
instance GHC.Show.Show (Grenade.Layers.Convolution.Convolution c f k k' s s')
instance (GHC.TypeLits.KnownNat channels, GHC.TypeLits.KnownNat filters, GHC.TypeLits.KnownNat kernelRows, GHC.TypeLits.KnownNat kernelColumns, GHC.TypeLits.KnownNat strideRows, GHC.TypeLits.KnownNat strideColumns, GHC.TypeLits.KnownNat ((kernelRows GHC.TypeLits.* kernelColumns) GHC.TypeLits.* channels)) => Grenade.Core.Layer.UpdateLayer (Grenade.Layers.Convolution.Convolution channels filters kernelRows kernelColumns strideRows strideColumns)
instance (GHC.TypeLits.KnownNat channels, GHC.TypeLits.KnownNat filters, GHC.TypeLits.KnownNat kernelRows, GHC.TypeLits.KnownNat kernelColumns, GHC.TypeLits.KnownNat strideRows, GHC.TypeLits.KnownNat strideColumns, GHC.TypeLits.KnownNat ((kernelRows GHC.TypeLits.* kernelColumns) GHC.TypeLits.* channels)) => Data.Serialize.Serialize (Grenade.Layers.Convolution.Convolution channels filters kernelRows kernelColumns strideRows strideColumns)
instance (GHC.TypeLits.KnownNat kernelRows, GHC.TypeLits.KnownNat kernelCols, GHC.TypeLits.KnownNat filters, GHC.TypeLits.KnownNat strideRows, GHC.TypeLits.KnownNat strideCols, GHC.TypeLits.KnownNat inputRows, GHC.TypeLits.KnownNat inputCols, GHC.TypeLits.KnownNat outputRows, GHC.TypeLits.KnownNat outputCols, GHC.TypeLits.KnownNat channels, ((outputRows GHC.TypeLits.- 1) GHC.TypeLits.* strideRows) ~ (inputRows GHC.TypeLits.- kernelRows), ((outputCols GHC.TypeLits.- 1) GHC.TypeLits.* strideCols) ~ (inputCols GHC.TypeLits.- kernelCols), GHC.TypeLits.KnownNat ((kernelRows GHC.TypeLits.* kernelCols) GHC.TypeLits.* channels), GHC.TypeLits.KnownNat (outputRows GHC.TypeLits.* filters)) => Grenade.Core.Layer.Layer (Grenade.Layers.Convolution.Convolution channels filters kernelRows kernelCols strideRows strideCols) ('Grenade.Core.Shape.D3 inputRows inputCols channels) ('Grenade.Core.Shape.D3 outputRows outputCols filters)
instance (GHC.TypeLits.KnownNat kernelRows, GHC.TypeLits.KnownNat kernelCols, GHC.TypeLits.KnownNat filters, GHC.TypeLits.KnownNat strideRows, GHC.TypeLits.KnownNat strideCols, GHC.TypeLits.KnownNat inputRows, GHC.TypeLits.KnownNat inputCols, GHC.TypeLits.KnownNat outputRows, GHC.TypeLits.KnownNat outputCols, ((outputRows GHC.TypeLits.- 1) GHC.TypeLits.* strideRows) ~ (inputRows GHC.TypeLits.- kernelRows), ((outputCols GHC.TypeLits.- 1) GHC.TypeLits.* strideCols) ~ (inputCols GHC.TypeLits.- kernelCols), GHC.TypeLits.KnownNat ((kernelRows GHC.TypeLits.* kernelCols) GHC.TypeLits.* 1), GHC.TypeLits.KnownNat (outputRows GHC.TypeLits.* filters)) => Grenade.Core.Layer.Layer (Grenade.Layers.Convolution.Convolution 1 filters kernelRows kernelCols strideRows strideCols) ('Grenade.Core.Shape.D2 inputRows inputCols) ('Grenade.Core.Shape.D3 outputRows outputCols filters)
instance (GHC.TypeLits.KnownNat kernelRows, GHC.TypeLits.KnownNat kernelCols, GHC.TypeLits.KnownNat strideRows, GHC.TypeLits.KnownNat strideCols, GHC.TypeLits.KnownNat inputRows, GHC.TypeLits.KnownNat inputCols, GHC.TypeLits.KnownNat outputRows, GHC.TypeLits.KnownNat outputCols, ((outputRows GHC.TypeLits.- 1) GHC.TypeLits.* strideRows) ~ (inputRows GHC.TypeLits.- kernelRows), ((outputCols GHC.TypeLits.- 1) GHC.TypeLits.* strideCols) ~ (inputCols GHC.TypeLits.- kernelCols), GHC.TypeLits.KnownNat ((kernelRows GHC.TypeLits.* kernelCols) GHC.TypeLits.* 1), GHC.TypeLits.KnownNat (outputRows GHC.TypeLits.* 1)) => Grenade.Core.Layer.Layer (Grenade.Layers.Convolution.Convolution 1 1 kernelRows kernelCols strideRows strideCols) ('Grenade.Core.Shape.D2 inputRows inputCols) ('Grenade.Core.Shape.D2 outputRows outputCols)
instance (GHC.TypeLits.KnownNat kernelRows, GHC.TypeLits.KnownNat kernelCols, GHC.TypeLits.KnownNat strideRows, GHC.TypeLits.KnownNat strideCols, GHC.TypeLits.KnownNat inputRows, GHC.TypeLits.KnownNat inputCols, GHC.TypeLits.KnownNat outputRows, GHC.TypeLits.KnownNat outputCols, GHC.TypeLits.KnownNat channels, ((outputRows GHC.TypeLits.- 1) GHC.TypeLits.* strideRows) ~ (inputRows GHC.TypeLits.- kernelRows), ((outputCols GHC.TypeLits.- 1) GHC.TypeLits.* strideCols) ~ (inputCols GHC.TypeLits.- kernelCols), GHC.TypeLits.KnownNat ((kernelRows GHC.TypeLits.* kernelCols) GHC.TypeLits.* channels), GHC.TypeLits.KnownNat (outputRows GHC.TypeLits.* 1)) => Grenade.Core.Layer.Layer (Grenade.Layers.Convolution.Convolution channels 1 kernelRows kernelCols strideRows strideCols) ('Grenade.Core.Shape.D3 inputRows inputCols channels) ('Grenade.Core.Shape.D2 outputRows outputCols)
module Grenade.Layers.Crop
-- | A cropping layer for a neural network.
data Crop :: Nat -> Nat -> Nat -> Nat -> *
[Crop] :: Crop cropLeft cropTop cropRight cropBottom
instance GHC.Show.Show (Grenade.Layers.Crop.Crop cropLeft cropTop cropRight cropBottom)
instance Grenade.Core.Layer.UpdateLayer (Grenade.Layers.Crop.Crop l t r b)
instance (GHC.TypeLits.KnownNat cropLeft, GHC.TypeLits.KnownNat cropTop, GHC.TypeLits.KnownNat cropRight, GHC.TypeLits.KnownNat cropBottom, GHC.TypeLits.KnownNat inputRows, GHC.TypeLits.KnownNat inputColumns, GHC.TypeLits.KnownNat outputRows, GHC.TypeLits.KnownNat outputColumns, ((inputRows GHC.TypeLits.- cropTop) GHC.TypeLits.- cropBottom) ~ outputRows, ((inputColumns GHC.TypeLits.- cropLeft) GHC.TypeLits.- cropRight) ~ outputColumns) => Grenade.Core.Layer.Layer (Grenade.Layers.Crop.Crop cropLeft cropTop cropRight cropBottom) ('Grenade.Core.Shape.D2 inputRows inputColumns) ('Grenade.Core.Shape.D2 outputRows outputColumns)
instance (GHC.TypeLits.KnownNat cropLeft, GHC.TypeLits.KnownNat cropTop, GHC.TypeLits.KnownNat cropRight, GHC.TypeLits.KnownNat cropBottom, GHC.TypeLits.KnownNat inputRows, GHC.TypeLits.KnownNat inputColumns, GHC.TypeLits.KnownNat outputRows, GHC.TypeLits.KnownNat outputColumns, GHC.TypeLits.KnownNat channels, GHC.TypeLits.KnownNat (inputRows GHC.TypeLits.* channels), GHC.TypeLits.KnownNat (outputRows GHC.TypeLits.* channels), ((outputRows GHC.TypeLits.+ cropTop) GHC.TypeLits.+ cropBottom) ~ inputRows, ((outputColumns GHC.TypeLits.+ cropLeft) GHC.TypeLits.+ cropRight) ~ inputColumns) => Grenade.Core.Layer.Layer (Grenade.Layers.Crop.Crop cropLeft cropTop cropRight cropBottom) ('Grenade.Core.Shape.D3 inputRows inputColumns channels) ('Grenade.Core.Shape.D3 outputRows outputColumns channels)
-- | A deconvolution layer is in many ways a convolution layer in reverse.
-- It learns a kernel to apply to each pixel location, spreading it out
-- into a larger layer.
--
-- This layer is important for image generation tasks, such as GANs on
-- images.
module Grenade.Layers.Deconvolution
-- | A Deconvolution layer for a neural network. This uses the im2col
-- Convolution trick popularised by Caffe.
--
-- The Deconvolution layer is a way of spreading out a single response
-- into a larger image, and is useful in generating images.
data Deconvolution :: Nat -> Nat -> Nat -> Nat -> Nat -> Nat -> *
[Deconvolution] :: (KnownNat channels, KnownNat filters, KnownNat kernelRows, KnownNat kernelColumns, KnownNat strideRows, KnownNat strideColumns, KnownNat kernelFlattened, kernelFlattened ~ ((kernelRows * kernelColumns) * filters)) => !(L kernelFlattened channels) -> !(L kernelFlattened channels) -> Deconvolution channels filters kernelRows kernelColumns strideRows strideColumns
data Deconvolution' :: Nat -> Nat -> Nat -> Nat -> Nat -> Nat -> *
[Deconvolution'] :: (KnownNat channels, KnownNat filters, KnownNat kernelRows, KnownNat kernelColumns, KnownNat strideRows, KnownNat strideColumns, KnownNat kernelFlattened, kernelFlattened ~ ((kernelRows * kernelColumns) * filters)) => !(L kernelFlattened channels) -> Deconvolution' channels filters kernelRows kernelColumns strideRows strideColumns
randomDeconvolution :: (MonadRandom m, KnownNat channels, KnownNat filters, KnownNat kernelRows, KnownNat kernelColumns, KnownNat strideRows, KnownNat strideColumns, KnownNat kernelFlattened, kernelFlattened ~ ((kernelRows * kernelColumns) * filters)) => m (Deconvolution channels filters kernelRows kernelColumns strideRows strideColumns)
instance GHC.Show.Show (Grenade.Layers.Deconvolution.Deconvolution c f k k' s s')
instance (GHC.TypeLits.KnownNat channels, GHC.TypeLits.KnownNat filters, GHC.TypeLits.KnownNat kernelRows, GHC.TypeLits.KnownNat kernelColumns, GHC.TypeLits.KnownNat strideRows, GHC.TypeLits.KnownNat strideColumns, GHC.TypeLits.KnownNat ((kernelRows GHC.TypeLits.* kernelColumns) GHC.TypeLits.* filters)) => Grenade.Core.Layer.UpdateLayer (Grenade.Layers.Deconvolution.Deconvolution channels filters kernelRows kernelColumns strideRows strideColumns)
instance (GHC.TypeLits.KnownNat channels, GHC.TypeLits.KnownNat filters, GHC.TypeLits.KnownNat kernelRows, GHC.TypeLits.KnownNat kernelColumns, GHC.TypeLits.KnownNat strideRows, GHC.TypeLits.KnownNat strideColumns, GHC.TypeLits.KnownNat ((kernelRows GHC.TypeLits.* kernelColumns) GHC.TypeLits.* filters)) => Data.Serialize.Serialize (Grenade.Layers.Deconvolution.Deconvolution channels filters kernelRows kernelColumns strideRows strideColumns)
instance (GHC.TypeLits.KnownNat kernelRows, GHC.TypeLits.KnownNat kernelCols, GHC.TypeLits.KnownNat filters, GHC.TypeLits.KnownNat strideRows, GHC.TypeLits.KnownNat strideCols, GHC.TypeLits.KnownNat inputRows, GHC.TypeLits.KnownNat inputCols, GHC.TypeLits.KnownNat outputRows, GHC.TypeLits.KnownNat outputCols, ((inputRows GHC.TypeLits.- 1) GHC.TypeLits.* strideRows) ~ (outputRows GHC.TypeLits.- kernelRows), ((inputCols GHC.TypeLits.- 1) GHC.TypeLits.* strideCols) ~ (outputCols GHC.TypeLits.- kernelCols), GHC.TypeLits.KnownNat ((kernelRows GHC.TypeLits.* kernelCols) GHC.TypeLits.* filters), GHC.TypeLits.KnownNat (outputRows GHC.TypeLits.* filters)) => Grenade.Core.Layer.Layer (Grenade.Layers.Deconvolution.Deconvolution 1 filters kernelRows kernelCols strideRows strideCols) ('Grenade.Core.Shape.D2 inputRows inputCols) ('Grenade.Core.Shape.D3 outputRows outputCols filters)
instance (GHC.TypeLits.KnownNat kernelRows, GHC.TypeLits.KnownNat kernelCols, GHC.TypeLits.KnownNat strideRows, GHC.TypeLits.KnownNat strideCols, GHC.TypeLits.KnownNat inputRows, GHC.TypeLits.KnownNat inputCols, GHC.TypeLits.KnownNat outputRows, GHC.TypeLits.KnownNat outputCols, ((inputRows GHC.TypeLits.- 1) GHC.TypeLits.* strideRows) ~ (outputRows GHC.TypeLits.- kernelRows), ((inputCols GHC.TypeLits.- 1) GHC.TypeLits.* strideCols) ~ (outputCols GHC.TypeLits.- kernelCols), GHC.TypeLits.KnownNat ((kernelRows GHC.TypeLits.* kernelCols) GHC.TypeLits.* 1), GHC.TypeLits.KnownNat (outputRows GHC.TypeLits.* 1)) => Grenade.Core.Layer.Layer (Grenade.Layers.Deconvolution.Deconvolution 1 1 kernelRows kernelCols strideRows strideCols) ('Grenade.Core.Shape.D2 inputRows inputCols) ('Grenade.Core.Shape.D2 outputRows outputCols)
instance (GHC.TypeLits.KnownNat kernelRows, GHC.TypeLits.KnownNat kernelCols, GHC.TypeLits.KnownNat strideRows, GHC.TypeLits.KnownNat strideCols, GHC.TypeLits.KnownNat inputRows, GHC.TypeLits.KnownNat inputCols, GHC.TypeLits.KnownNat outputRows, GHC.TypeLits.KnownNat outputCols, ((inputRows GHC.TypeLits.- 1) GHC.TypeLits.* strideRows) ~ (outputRows GHC.TypeLits.- kernelRows), ((inputCols GHC.TypeLits.- 1) GHC.TypeLits.* strideCols) ~ (outputCols GHC.TypeLits.- kernelCols), GHC.TypeLits.KnownNat ((kernelRows GHC.TypeLits.* kernelCols) GHC.TypeLits.* 1), GHC.TypeLits.KnownNat (outputRows GHC.TypeLits.* 1), GHC.TypeLits.KnownNat channels) => Grenade.Core.Layer.Layer (Grenade.Layers.Deconvolution.Deconvolution channels 1 kernelRows kernelCols strideRows strideCols) ('Grenade.Core.Shape.D3 inputRows inputCols channels) ('Grenade.Core.Shape.D2 outputRows outputCols)
instance (GHC.TypeLits.KnownNat kernelRows, GHC.TypeLits.KnownNat kernelCols, GHC.TypeLits.KnownNat filters, GHC.TypeLits.KnownNat strideRows, GHC.TypeLits.KnownNat strideCols, GHC.TypeLits.KnownNat inputRows, GHC.TypeLits.KnownNat inputCols, GHC.TypeLits.KnownNat outputRows, GHC.TypeLits.KnownNat outputCols, GHC.TypeLits.KnownNat channels, ((inputRows GHC.TypeLits.- 1) GHC.TypeLits.* strideRows) ~ (outputRows GHC.TypeLits.- kernelRows), ((inputCols GHC.TypeLits.- 1) GHC.TypeLits.* strideCols) ~ (outputCols GHC.TypeLits.- kernelCols), GHC.TypeLits.KnownNat ((kernelRows GHC.TypeLits.* kernelCols) GHC.TypeLits.* filters), GHC.TypeLits.KnownNat (outputRows GHC.TypeLits.* filters)) => Grenade.Core.Layer.Layer (Grenade.Layers.Deconvolution.Deconvolution channels filters kernelRows kernelCols strideRows strideCols) ('Grenade.Core.Shape.D3 inputRows inputCols channels) ('Grenade.Core.Shape.D3 outputRows outputCols filters)
module Grenade.Layers.Dropout
data Dropout
Dropout :: Double -> Int -> Dropout
[dropoutRate] :: Dropout -> Double
[dropoutSeed] :: Dropout -> Int
randomDropout :: MonadRandom m => Double -> m Dropout
instance GHC.Show.Show Grenade.Layers.Dropout.Dropout
instance Grenade.Core.Layer.UpdateLayer Grenade.Layers.Dropout.Dropout
instance GHC.TypeLits.KnownNat i => Grenade.Core.Layer.Layer Grenade.Layers.Dropout.Dropout ('Grenade.Core.Shape.D1 i) ('Grenade.Core.Shape.D1 i)
module Grenade.Layers.Elu
-- | An exponential linear unit. A layer which can act between any shape of
-- the same dimension, acting as a diode on every neuron individually.
data Elu
Elu :: Elu
instance GHC.Show.Show Grenade.Layers.Elu.Elu
instance Grenade.Core.Layer.UpdateLayer Grenade.Layers.Elu.Elu
instance Data.Serialize.Serialize Grenade.Layers.Elu.Elu
instance GHC.TypeLits.KnownNat i => Grenade.Core.Layer.Layer Grenade.Layers.Elu.Elu ('Grenade.Core.Shape.D1 i) ('Grenade.Core.Shape.D1 i)
instance (GHC.TypeLits.KnownNat i, GHC.TypeLits.KnownNat j) => Grenade.Core.Layer.Layer Grenade.Layers.Elu.Elu ('Grenade.Core.Shape.D2 i j) ('Grenade.Core.Shape.D2 i j)
instance (GHC.TypeLits.KnownNat i, GHC.TypeLits.KnownNat j, GHC.TypeLits.KnownNat k) => Grenade.Core.Layer.Layer Grenade.Layers.Elu.Elu ('Grenade.Core.Shape.D3 i j k) ('Grenade.Core.Shape.D3 i j k)
module Grenade.Layers.FullyConnected
-- | A basic fully connected (or inner product) neural network layer.
data FullyConnected i o
FullyConnected :: !(FullyConnected' i o) -> !(FullyConnected' i o) -> FullyConnected i o
data FullyConnected' i o
FullyConnected' :: !(R o) -> !(L o i) -> FullyConnected' i o
randomFullyConnected :: (MonadRandom m, KnownNat i, KnownNat o) => m (FullyConnected i o)
instance GHC.Show.Show (Grenade.Layers.FullyConnected.FullyConnected i o)
instance (GHC.TypeLits.KnownNat i, GHC.TypeLits.KnownNat o) => Grenade.Core.Layer.UpdateLayer (Grenade.Layers.FullyConnected.FullyConnected i o)
instance (GHC.TypeLits.KnownNat i, GHC.TypeLits.KnownNat o) => Grenade.Core.Layer.Layer (Grenade.Layers.FullyConnected.FullyConnected i o) ('Grenade.Core.Shape.D1 i) ('Grenade.Core.Shape.D1 o)
instance (GHC.TypeLits.KnownNat i, GHC.TypeLits.KnownNat o) => Data.Serialize.Serialize (Grenade.Layers.FullyConnected.FullyConnected i o)
module Grenade.Layers.Logit
-- | A Logit layer.
--
-- A layer which can act between any shape of the same dimension,
-- perfoming an sigmoid function. This layer should be used as the output
-- layer of a network for logistic regression (classification) problems.
data Logit
Logit :: Logit
instance GHC.Show.Show Grenade.Layers.Logit.Logit
instance Grenade.Core.Layer.UpdateLayer Grenade.Layers.Logit.Logit
instance (a ~ b, Data.Singletons.SingI a) => Grenade.Core.Layer.Layer Grenade.Layers.Logit.Logit a b
instance Data.Serialize.Serialize Grenade.Layers.Logit.Logit
module Grenade.Layers.Merge
-- | A Merging layer.
--
-- Similar to Concat layer, except sums the activations instead of
-- creating a larger shape.
data Merge :: * -> * -> *
[Merge] :: x -> y -> Merge x y
instance (GHC.Show.Show x, GHC.Show.Show y) => GHC.Show.Show (Grenade.Layers.Merge.Merge x y)
instance (Grenade.Core.Layer.UpdateLayer x, Grenade.Core.Layer.UpdateLayer y) => Grenade.Core.Layer.UpdateLayer (Grenade.Layers.Merge.Merge x y)
instance (Data.Singletons.SingI i, Data.Singletons.SingI o, Grenade.Core.Layer.Layer x i o, Grenade.Core.Layer.Layer y i o) => Grenade.Core.Layer.Layer (Grenade.Layers.Merge.Merge x y) i o
instance (Data.Serialize.Serialize a, Data.Serialize.Serialize b) => Data.Serialize.Serialize (Grenade.Layers.Merge.Merge a b)
module Grenade.Layers.Pad
-- | A padding layer for a neural network.
--
-- Pads on the X and Y dimension of an image.
data Pad :: Nat -> Nat -> Nat -> Nat -> *
[Pad] :: Pad padLeft padTop padRight padBottom
instance GHC.Show.Show (Grenade.Layers.Pad.Pad padLeft padTop padRight padBottom)
instance Grenade.Core.Layer.UpdateLayer (Grenade.Layers.Pad.Pad l t r b)
instance Data.Serialize.Serialize (Grenade.Layers.Pad.Pad l t r b)
instance (GHC.TypeLits.KnownNat padLeft, GHC.TypeLits.KnownNat padTop, GHC.TypeLits.KnownNat padRight, GHC.TypeLits.KnownNat padBottom, GHC.TypeLits.KnownNat inputRows, GHC.TypeLits.KnownNat inputColumns, GHC.TypeLits.KnownNat outputRows, GHC.TypeLits.KnownNat outputColumns, ((inputRows GHC.TypeLits.+ padTop) GHC.TypeLits.+ padBottom) ~ outputRows, ((inputColumns GHC.TypeLits.+ padLeft) GHC.TypeLits.+ padRight) ~ outputColumns) => Grenade.Core.Layer.Layer (Grenade.Layers.Pad.Pad padLeft padTop padRight padBottom) ('Grenade.Core.Shape.D2 inputRows inputColumns) ('Grenade.Core.Shape.D2 outputRows outputColumns)
instance (GHC.TypeLits.KnownNat padLeft, GHC.TypeLits.KnownNat padTop, GHC.TypeLits.KnownNat padRight, GHC.TypeLits.KnownNat padBottom, GHC.TypeLits.KnownNat inputRows, GHC.TypeLits.KnownNat inputColumns, GHC.TypeLits.KnownNat outputRows, GHC.TypeLits.KnownNat outputColumns, GHC.TypeLits.KnownNat channels, GHC.TypeLits.KnownNat (inputRows GHC.TypeLits.* channels), GHC.TypeLits.KnownNat (outputRows GHC.TypeLits.* channels), ((inputRows GHC.TypeLits.+ padTop) GHC.TypeLits.+ padBottom) ~ outputRows, ((inputColumns GHC.TypeLits.+ padLeft) GHC.TypeLits.+ padRight) ~ outputColumns) => Grenade.Core.Layer.Layer (Grenade.Layers.Pad.Pad padLeft padTop padRight padBottom) ('Grenade.Core.Shape.D3 inputRows inputColumns channels) ('Grenade.Core.Shape.D3 outputRows outputColumns channels)
module Grenade.Layers.Pooling
-- | A pooling layer for a neural network.
--
-- Does a max pooling, looking over a kernel similarly to the convolution
-- network, but returning maxarg only. This layer is often used to
-- provide minor amounts of translational invariance.
--
-- The kernel size dictates which input and output sizes will "fit".
-- Fitting the equation: `out = (in - kernel) / stride + 1` for both
-- dimensions.
data Pooling :: Nat -> Nat -> Nat -> Nat -> *
[Pooling] :: Pooling kernelRows kernelColumns strideRows strideColumns
instance GHC.Show.Show (Grenade.Layers.Pooling.Pooling k k' s s')
instance Grenade.Core.Layer.UpdateLayer (Grenade.Layers.Pooling.Pooling kernelRows kernelColumns strideRows strideColumns)
instance Data.Serialize.Serialize (Grenade.Layers.Pooling.Pooling kernelRows kernelColumns strideRows strideColumns)
instance (GHC.TypeLits.KnownNat kernelRows, GHC.TypeLits.KnownNat kernelColumns, GHC.TypeLits.KnownNat strideRows, GHC.TypeLits.KnownNat strideColumns, GHC.TypeLits.KnownNat inputRows, GHC.TypeLits.KnownNat inputColumns, GHC.TypeLits.KnownNat outputRows, GHC.TypeLits.KnownNat outputColumns, ((outputRows GHC.TypeLits.- 1) GHC.TypeLits.* strideRows) ~ (inputRows GHC.TypeLits.- kernelRows), ((outputColumns GHC.TypeLits.- 1) GHC.TypeLits.* strideColumns) ~ (inputColumns GHC.TypeLits.- kernelColumns)) => Grenade.Core.Layer.Layer (Grenade.Layers.Pooling.Pooling kernelRows kernelColumns strideRows strideColumns) ('Grenade.Core.Shape.D2 inputRows inputColumns) ('Grenade.Core.Shape.D2 outputRows outputColumns)
instance (GHC.TypeLits.KnownNat kernelRows, GHC.TypeLits.KnownNat kernelColumns, GHC.TypeLits.KnownNat strideRows, GHC.TypeLits.KnownNat strideColumns, GHC.TypeLits.KnownNat inputRows, GHC.TypeLits.KnownNat inputColumns, GHC.TypeLits.KnownNat outputRows, GHC.TypeLits.KnownNat outputColumns, GHC.TypeLits.KnownNat channels, GHC.TypeLits.KnownNat (outputRows GHC.TypeLits.* channels), ((outputRows GHC.TypeLits.- 1) GHC.TypeLits.* strideRows) ~ (inputRows GHC.TypeLits.- kernelRows), ((outputColumns GHC.TypeLits.- 1) GHC.TypeLits.* strideColumns) ~ (inputColumns GHC.TypeLits.- kernelColumns)) => Grenade.Core.Layer.Layer (Grenade.Layers.Pooling.Pooling kernelRows kernelColumns strideRows strideColumns) ('Grenade.Core.Shape.D3 inputRows inputColumns channels) ('Grenade.Core.Shape.D3 outputRows outputColumns channels)
module Grenade.Layers.Relu
-- | A rectifying linear unit. A layer which can act between any shape of
-- the same dimension, acting as a diode on every neuron individually.
data Relu
Relu :: Relu
instance GHC.Show.Show Grenade.Layers.Relu.Relu
instance Grenade.Core.Layer.UpdateLayer Grenade.Layers.Relu.Relu
instance Data.Serialize.Serialize Grenade.Layers.Relu.Relu
instance GHC.TypeLits.KnownNat i => Grenade.Core.Layer.Layer Grenade.Layers.Relu.Relu ('Grenade.Core.Shape.D1 i) ('Grenade.Core.Shape.D1 i)
instance (GHC.TypeLits.KnownNat i, GHC.TypeLits.KnownNat j) => Grenade.Core.Layer.Layer Grenade.Layers.Relu.Relu ('Grenade.Core.Shape.D2 i j) ('Grenade.Core.Shape.D2 i j)
instance (GHC.TypeLits.KnownNat i, GHC.TypeLits.KnownNat j, GHC.TypeLits.KnownNat k) => Grenade.Core.Layer.Layer Grenade.Layers.Relu.Relu ('Grenade.Core.Shape.D3 i j k) ('Grenade.Core.Shape.D3 i j k)
module Grenade.Layers.Reshape
-- | Reshape Layer
--
-- The Reshape layer can flatten any 2D or 3D image to 1D vector with the
-- same number of activations, as well as cast up from 1D to a 2D or 3D
-- shape.
--
-- Can also be used to turn a 3D image with only one channel into a 2D
-- image or vice versa.
data Reshape
Reshape :: Reshape
instance GHC.Show.Show Grenade.Layers.Reshape.Reshape
instance Grenade.Core.Layer.UpdateLayer Grenade.Layers.Reshape.Reshape
instance (GHC.TypeLits.KnownNat a, GHC.TypeLits.KnownNat x, GHC.TypeLits.KnownNat y, a ~ (x GHC.TypeLits.* y)) => Grenade.Core.Layer.Layer Grenade.Layers.Reshape.Reshape ('Grenade.Core.Shape.D2 x y) ('Grenade.Core.Shape.D1 a)
instance (GHC.TypeLits.KnownNat a, GHC.TypeLits.KnownNat x, GHC.TypeLits.KnownNat y, GHC.TypeLits.KnownNat (x GHC.TypeLits.* z), GHC.TypeLits.KnownNat z, a ~ ((x GHC.TypeLits.* y) GHC.TypeLits.* z)) => Grenade.Core.Layer.Layer Grenade.Layers.Reshape.Reshape ('Grenade.Core.Shape.D3 x y z) ('Grenade.Core.Shape.D1 a)
instance (GHC.TypeLits.KnownNat y, GHC.TypeLits.KnownNat x, GHC.TypeLits.KnownNat z, z ~ 1) => Grenade.Core.Layer.Layer Grenade.Layers.Reshape.Reshape ('Grenade.Core.Shape.D3 x y z) ('Grenade.Core.Shape.D2 x y)
instance (GHC.TypeLits.KnownNat y, GHC.TypeLits.KnownNat x, GHC.TypeLits.KnownNat z, z ~ 1) => Grenade.Core.Layer.Layer Grenade.Layers.Reshape.Reshape ('Grenade.Core.Shape.D2 x y) ('Grenade.Core.Shape.D3 x y z)
instance (GHC.TypeLits.KnownNat a, GHC.TypeLits.KnownNat x, GHC.TypeLits.KnownNat y, a ~ (x GHC.TypeLits.* y)) => Grenade.Core.Layer.Layer Grenade.Layers.Reshape.Reshape ('Grenade.Core.Shape.D1 a) ('Grenade.Core.Shape.D2 x y)
instance (GHC.TypeLits.KnownNat a, GHC.TypeLits.KnownNat x, GHC.TypeLits.KnownNat y, GHC.TypeLits.KnownNat (x GHC.TypeLits.* z), GHC.TypeLits.KnownNat z, a ~ ((x GHC.TypeLits.* y) GHC.TypeLits.* z)) => Grenade.Core.Layer.Layer Grenade.Layers.Reshape.Reshape ('Grenade.Core.Shape.D1 a) ('Grenade.Core.Shape.D3 x y z)
instance Data.Serialize.Serialize Grenade.Layers.Reshape.Reshape
module Grenade.Layers.Softmax
-- | A Softmax layer
--
-- This layer is like a logit layer, but normalises a set of matricies to
-- be probabilities.
--
-- One can use this layer as the last layer in a network if they need
-- normalised probabilities.
data Softmax
Softmax :: Softmax
softmax :: KnownNat i => R i -> R i
softmax' :: KnownNat i => R i -> R i -> R i
instance GHC.Show.Show Grenade.Layers.Softmax.Softmax
instance Grenade.Core.Layer.UpdateLayer Grenade.Layers.Softmax.Softmax
instance GHC.TypeLits.KnownNat i => Grenade.Core.Layer.Layer Grenade.Layers.Softmax.Softmax ('Grenade.Core.Shape.D1 i) ('Grenade.Core.Shape.D1 i)
instance Data.Serialize.Serialize Grenade.Layers.Softmax.Softmax
module Grenade.Layers.Tanh
-- | A Tanh layer. A layer which can act between any shape of the same
-- dimension, perfoming a tanh function.
data Tanh
Tanh :: Tanh
instance GHC.Show.Show Grenade.Layers.Tanh.Tanh
instance Grenade.Core.Layer.UpdateLayer Grenade.Layers.Tanh.Tanh
instance Data.Serialize.Serialize Grenade.Layers.Tanh.Tanh
instance (a ~ b, Data.Singletons.SingI a) => Grenade.Core.Layer.Layer Grenade.Layers.Tanh.Tanh a b
module Grenade.Layers.Trivial
-- | A Trivial layer.
--
-- This can be used to pass an unchanged value up one side of a graph,
-- for a Residual network for example.
data Trivial
Trivial :: Trivial
instance GHC.Show.Show Grenade.Layers.Trivial.Trivial
instance Data.Serialize.Serialize Grenade.Layers.Trivial.Trivial
instance Grenade.Core.Layer.UpdateLayer Grenade.Layers.Trivial.Trivial
instance a ~ b => Grenade.Core.Layer.Layer Grenade.Layers.Trivial.Trivial a b
-- | Export an Inception style type, which can be used to build up complex
-- multiconvolution size networks.
module Grenade.Layers.Inception
-- | Type of an inception layer.
--
-- It looks like a bit of a handful, but is actually pretty easy to use.
--
-- The first three type parameters are the size of the (3D) data the
-- inception layer will take. It will emit 3D data with the number of
-- channels being the sum of chx, chy, chz,
-- which are the number of convolution filters in the 3x3, 5x5, and 7x7
-- convolutions Layers respectively.
--
-- The network get padded effectively before each convolution filters
-- such that the output dimension is the same x and y as the input.
type Inception rows cols channels chx chy chz = Network '[Concat (D3 rows cols (chx + chy)) (InceptionMini rows cols channels chx chy) (D3 rows cols chz) (Inception7x7 rows cols channels chz)] '[D3 rows cols channels, D3 rows cols ((chx + chy) + chz)]
type InceptionMini rows cols channels chx chy = Network '[Concat (D3 rows cols chx) (Inception3x3 rows cols channels chx) (D3 rows cols chy) (Inception5x5 rows cols channels chy)] '[D3 rows cols channels, D3 rows cols (chx + chy)]
type Resnet branch = Merge Trivial branch
module Grenade.Layers
module Grenade.Recurrent.Core.Layer
class (RecurrentUpdateLayer x, SingI (RecurrentShape x)) => RecurrentLayer x (i :: Shape) (o :: Shape) where type RecTape x i o :: * where {
type family RecTape x i o :: *;
}
-- | Used in training and scoring. Take the input from the previous layer,
-- and give the output from this layer.
runRecurrentForwards :: RecurrentLayer x i o => x -> S (RecurrentShape x) -> S i -> (RecTape x i o, S (RecurrentShape x), S o)
-- | Back propagate a step. Takes the current layer, the input that the
-- layer gave from the input and the back propagated derivatives from the
-- layer above. Returns the gradient layer and the derivatives to push
-- back further.
runRecurrentBackwards :: RecurrentLayer x i o => x -> RecTape x i o -> S (RecurrentShape x) -> S o -> (Gradient x, S (RecurrentShape x), S i)
-- | Class for a recurrent layer. It's quite similar to a normal layer but
-- for the input and output of an extra recurrent data shape.
class UpdateLayer x => RecurrentUpdateLayer x where type RecurrentShape x :: Shape where {
type family RecurrentShape x :: Shape;
}
module Grenade.Recurrent.Core.Network
-- | Witness type to say indicate we're building up with a recurrent layer.
data Recurrent :: * -> *
-- | Witness type to say indicate we're building up with a normal feed
-- forward layer.
data FeedForward :: * -> *
-- | Type of a recurrent neural network.
--
-- The [*] type specifies the types of the layers.
--
-- The [Shape] type specifies the shapes of data passed between the
-- layers.
--
-- The definition is similar to a Network, but every layer in the type is
-- tagged by whether it's a FeedForward Layer of a Recurrent layer.
--
-- Often, to make the definitions more concise, one will use a type alias
-- for these empty data types.
data RecurrentNetwork :: [*] -> [Shape] -> *
[RNil] :: SingI i => RecurrentNetwork '[] '[i]
[:~~>] :: (SingI i, Layer x i h) => !x -> !(RecurrentNetwork xs (h : hs)) -> RecurrentNetwork (FeedForward x : xs) (i : (h : hs))
[:~@>] :: (SingI i, RecurrentLayer x i h) => !x -> !(RecurrentNetwork xs (h : hs)) -> RecurrentNetwork (Recurrent x : xs) (i : (h : hs))
-- | Recurrent inputs (sideways shapes on an imaginary unrolled graph)
-- Parameterised on the layers of a Network.
data RecurrentInputs :: [*] -> *
[RINil] :: RecurrentInputs '[]
[:~~+>] :: UpdateLayer x => () -> !(RecurrentInputs xs) -> RecurrentInputs (FeedForward x : xs)
[:~@+>] :: (SingI (RecurrentShape x), RecurrentUpdateLayer x) => !(S (RecurrentShape x)) -> !(RecurrentInputs xs) -> RecurrentInputs (Recurrent x : xs)
-- | All the information required to backpropogate through time safely.
--
-- We index on the time step length as well, to ensure that that all Tape
-- lengths are the same.
data RecurrentTapes :: [*] -> [Shape] -> *
[TRNil] :: SingI i => RecurrentTapes '[] '[i]
[:\~>] :: [Tape x i h] -> !(RecurrentTapes xs (h : hs)) -> RecurrentTapes (FeedForward x : xs) (i : (h : hs))
[:\@>] :: [RecTape x i h] -> !(RecurrentTapes xs (h : hs)) -> RecurrentTapes (Recurrent x : xs) (i : (h : hs))
-- | Gradient of a network.
--
-- Parameterised on the layers of the network.
data RecurrentGradients :: [*] -> *
[RGNil] :: RecurrentGradients '[]
[://>] :: UpdateLayer x => [Gradient x] -> RecurrentGradients xs -> RecurrentGradients (phantom x : xs)
-- | Create a network of the types requested
randomRecurrent :: (CreatableRecurrent xs ss, MonadRandom m) => m (RecurrentNetwork xs ss, RecurrentInputs xs)
runRecurrentNetwork :: forall shapes layers. RecurrentNetwork layers shapes -> RecurrentInputs layers -> [S (Head shapes)] -> (RecurrentTapes layers shapes, RecurrentInputs layers, [S (Last shapes)])
runRecurrentGradient :: forall layers shapes. RecurrentNetwork layers shapes -> RecurrentTapes layers shapes -> RecurrentInputs layers -> [S (Last shapes)] -> (RecurrentGradients layers, RecurrentInputs layers, [S (Head shapes)])
-- | Apply a batch of gradients to the network Uses runUpdates which can be
-- specialised for a layer.
applyRecurrentUpdate :: LearningParameters -> RecurrentNetwork layers shapes -> RecurrentGradients layers -> RecurrentNetwork layers shapes
instance GHC.Show.Show (Grenade.Recurrent.Core.Network.RecurrentNetwork '[] '[i])
instance (GHC.Show.Show x, GHC.Show.Show (Grenade.Recurrent.Core.Network.RecurrentNetwork xs rs)) => GHC.Show.Show (Grenade.Recurrent.Core.Network.RecurrentNetwork (Grenade.Recurrent.Core.Network.FeedForward x : xs) (i : rs))
instance (GHC.Show.Show x, GHC.Show.Show (Grenade.Recurrent.Core.Network.RecurrentNetwork xs rs)) => GHC.Show.Show (Grenade.Recurrent.Core.Network.RecurrentNetwork (Grenade.Recurrent.Core.Network.Recurrent x : xs) (i : rs))
instance Data.Singletons.SingI i => Grenade.Recurrent.Core.Network.CreatableRecurrent '[] '[i]
instance (Data.Singletons.SingI i, Grenade.Core.Layer.Layer x i o, Grenade.Recurrent.Core.Network.CreatableRecurrent xs (o : rs)) => Grenade.Recurrent.Core.Network.CreatableRecurrent (Grenade.Recurrent.Core.Network.FeedForward x : xs) (i : o : rs)
instance (Data.Singletons.SingI i, Grenade.Recurrent.Core.Layer.RecurrentLayer x i o, Grenade.Recurrent.Core.Network.CreatableRecurrent xs (o : rs)) => Grenade.Recurrent.Core.Network.CreatableRecurrent (Grenade.Recurrent.Core.Network.Recurrent x : xs) (i : o : rs)
instance Data.Singletons.SingI i => Data.Serialize.Serialize (Grenade.Recurrent.Core.Network.RecurrentNetwork '[] '[i])
instance (Data.Singletons.SingI i, Grenade.Core.Layer.Layer x i o, Data.Serialize.Serialize x, Data.Serialize.Serialize (Grenade.Recurrent.Core.Network.RecurrentNetwork xs (o : rs))) => Data.Serialize.Serialize (Grenade.Recurrent.Core.Network.RecurrentNetwork (Grenade.Recurrent.Core.Network.FeedForward x : xs) (i : o : rs))
instance (Data.Singletons.SingI i, Grenade.Recurrent.Core.Layer.RecurrentLayer x i o, Data.Serialize.Serialize x, Data.Serialize.Serialize (Grenade.Recurrent.Core.Network.RecurrentNetwork xs (o : rs))) => Data.Serialize.Serialize (Grenade.Recurrent.Core.Network.RecurrentNetwork (Grenade.Recurrent.Core.Network.Recurrent x : xs) (i : o : rs))
instance Data.Serialize.Serialize (Grenade.Recurrent.Core.Network.RecurrentInputs '[])
instance (Grenade.Core.Layer.UpdateLayer x, Data.Serialize.Serialize (Grenade.Recurrent.Core.Network.RecurrentInputs ys)) => Data.Serialize.Serialize (Grenade.Recurrent.Core.Network.RecurrentInputs (Grenade.Recurrent.Core.Network.FeedForward x : ys))
instance (Data.Singletons.SingI (Grenade.Recurrent.Core.Layer.RecurrentShape x), Grenade.Recurrent.Core.Layer.RecurrentUpdateLayer x, Data.Serialize.Serialize (Grenade.Recurrent.Core.Network.RecurrentInputs ys)) => Data.Serialize.Serialize (Grenade.Recurrent.Core.Network.RecurrentInputs (Grenade.Recurrent.Core.Network.Recurrent x : ys))
instance GHC.Num.Num (Grenade.Recurrent.Core.Network.RecurrentInputs '[])
instance (Grenade.Core.Layer.UpdateLayer x, GHC.Num.Num (Grenade.Recurrent.Core.Network.RecurrentInputs ys)) => GHC.Num.Num (Grenade.Recurrent.Core.Network.RecurrentInputs (Grenade.Recurrent.Core.Network.FeedForward x : ys))
instance (Data.Singletons.SingI (Grenade.Recurrent.Core.Layer.RecurrentShape x), Grenade.Recurrent.Core.Layer.RecurrentUpdateLayer x, GHC.Num.Num (Grenade.Recurrent.Core.Network.RecurrentInputs ys)) => GHC.Num.Num (Grenade.Recurrent.Core.Network.RecurrentInputs (Grenade.Recurrent.Core.Network.Recurrent x : ys))
module Grenade.Recurrent.Core.Runner
trainRecurrent :: forall shapes layers. (SingI (Last shapes), Num (RecurrentInputs layers)) => LearningParameters -> RecurrentNetwork layers shapes -> RecurrentInputs layers -> [(S (Head shapes), Maybe (S (Last shapes)))] -> (RecurrentNetwork layers shapes, RecurrentInputs layers)
-- | Just forwards propagation with no training.
runRecurrent :: RecurrentNetwork layers shapes -> RecurrentInputs layers -> S (Head shapes) -> (RecurrentInputs layers, S (Last shapes))
-- | Drive and network and collect its back propogated gradients.
backPropagateRecurrent :: forall shapes layers. (SingI (Last shapes), Num (RecurrentInputs layers)) => RecurrentNetwork layers shapes -> RecurrentInputs layers -> [(S (Head shapes), Maybe (S (Last shapes)))] -> (RecurrentGradients layers, RecurrentInputs layers)
module Grenade.Recurrent.Core
module Grenade.Recurrent.Layers.BasicRecurrent
data BasicRecurrent :: Nat -> Nat -> *
[BasicRecurrent] :: (KnownNat input, KnownNat output, KnownNat matrixCols, matrixCols ~ (input + output)) => !(R output) -> !(R output) -> !(L output matrixCols) -> !(L output matrixCols) -> BasicRecurrent input output
randomBasicRecurrent :: (MonadRandom m, KnownNat i, KnownNat o, KnownNat x, x ~ (i + o)) => m (BasicRecurrent i o)
instance GHC.Show.Show (Grenade.Recurrent.Layers.BasicRecurrent.BasicRecurrent i o)
instance (GHC.TypeLits.KnownNat i, GHC.TypeLits.KnownNat o, GHC.TypeLits.KnownNat (i GHC.TypeLits.+ o)) => Grenade.Core.Layer.UpdateLayer (Grenade.Recurrent.Layers.BasicRecurrent.BasicRecurrent i o)
instance (GHC.TypeLits.KnownNat i, GHC.TypeLits.KnownNat o, GHC.TypeLits.KnownNat (i GHC.TypeLits.+ o), i GHC.TypeLits.<= (i GHC.TypeLits.+ o), o ~ ((i GHC.TypeLits.+ o) GHC.TypeLits.- i)) => Grenade.Recurrent.Core.Layer.RecurrentUpdateLayer (Grenade.Recurrent.Layers.BasicRecurrent.BasicRecurrent i o)
instance (GHC.TypeLits.KnownNat i, GHC.TypeLits.KnownNat o, GHC.TypeLits.KnownNat (i GHC.TypeLits.+ o), i GHC.TypeLits.<= (i GHC.TypeLits.+ o), o ~ ((i GHC.TypeLits.+ o) GHC.TypeLits.- i)) => Grenade.Recurrent.Core.Layer.RecurrentLayer (Grenade.Recurrent.Layers.BasicRecurrent.BasicRecurrent i o) ('Grenade.Core.Shape.D1 i) ('Grenade.Core.Shape.D1 o)
module Grenade.Recurrent.Layers.LSTM
-- | Long Short Term Memory Recurrent unit
--
-- This is a Peephole formulation, so the recurrent shape is just the
-- cell state, the previous output is not held or used at all.
data LSTM :: Nat -> Nat -> *
[LSTM] :: (KnownNat input, KnownNat output) => !(LSTMWeights input output) -> !(LSTMWeights input output) -> LSTM input output
data LSTMWeights :: Nat -> Nat -> *
[LSTMWeights] :: (KnownNat input, KnownNat output) => {lstmWf :: !(L output input), lstmUf :: !(L output output), lstmBf :: !(R output), lstmWi :: !(L output input), lstmUi :: !(L output output), lstmBi :: !(R output), lstmWo :: !(L output input), lstmUo :: !(L output output), lstmBo :: !(R output), lstmWc :: !(L output input), lstmBc :: !(R output)} -> LSTMWeights input output
-- | Generate an LSTM layer with random Weights one can also just call
-- createRandom from UpdateLayer
--
-- Has forget gate biases set to 1 to encourage early learning.
--
--
-- https://github.com/karpathy/char-rnn/commit/0dfeaa454e687dd0278f036552ea1e48a0a408c9
randomLSTM :: forall m i o. (MonadRandom m, KnownNat i, KnownNat o) => m (LSTM i o)
instance GHC.Show.Show (Grenade.Recurrent.Layers.LSTM.LSTM i o)
instance (GHC.TypeLits.KnownNat i, GHC.TypeLits.KnownNat o) => Grenade.Core.Layer.UpdateLayer (Grenade.Recurrent.Layers.LSTM.LSTM i o)
instance (GHC.TypeLits.KnownNat i, GHC.TypeLits.KnownNat o) => Grenade.Recurrent.Core.Layer.RecurrentUpdateLayer (Grenade.Recurrent.Layers.LSTM.LSTM i o)
instance (GHC.TypeLits.KnownNat i, GHC.TypeLits.KnownNat o) => Grenade.Recurrent.Core.Layer.RecurrentLayer (Grenade.Recurrent.Layers.LSTM.LSTM i o) ('Grenade.Core.Shape.D1 i) ('Grenade.Core.Shape.D1 o)
instance (GHC.TypeLits.KnownNat i, GHC.TypeLits.KnownNat o) => Data.Serialize.Serialize (Grenade.Recurrent.Layers.LSTM.LSTM i o)
module Grenade.Recurrent.Layers
module Grenade.Recurrent
module Grenade