Safe Haskell | None |
---|---|
Language | Haskell98 |
- data Recurrent :: * -> *
- data FeedForward :: * -> *
- data RecurrentNetwork :: [*] -> [Shape] -> * where
- RNil :: SingI i => RecurrentNetwork '[] '[i]
- (:~~>) :: (SingI i, Layer x i h) => !x -> !(RecurrentNetwork xs (h ': hs)) -> RecurrentNetwork (FeedForward x ': xs) (i ': (h ': hs))
- (:~@>) :: (SingI i, RecurrentLayer x i h) => !x -> !(RecurrentNetwork xs (h ': hs)) -> RecurrentNetwork (Recurrent x ': xs) (i ': (h ': hs))
- data RecurrentInputs :: [*] -> * where
- RINil :: RecurrentInputs '[]
- (:~~+>) :: UpdateLayer x => () -> !(RecurrentInputs xs) -> RecurrentInputs (FeedForward x ': xs)
- (:~@+>) :: (SingI (RecurrentShape x), RecurrentUpdateLayer x) => !(S (RecurrentShape x)) -> !(RecurrentInputs xs) -> RecurrentInputs (Recurrent x ': xs)
- data RecurrentTapes :: [*] -> [Shape] -> * where
- TRNil :: SingI i => RecurrentTapes '[] '[i]
- (:\~>) :: [Tape x i h] -> !(RecurrentTapes xs (h ': hs)) -> RecurrentTapes (FeedForward x ': xs) (i ': (h ': hs))
- (:\@>) :: [RecTape x i h] -> !(RecurrentTapes xs (h ': hs)) -> RecurrentTapes (Recurrent x ': xs) (i ': (h ': hs))
- data RecurrentGradients :: [*] -> * where
- RGNil :: RecurrentGradients '[]
- (://>) :: UpdateLayer x => [Gradient x] -> RecurrentGradients xs -> RecurrentGradients (phantom x ': xs)
- randomRecurrent :: (CreatableRecurrent xs ss, MonadRandom m) => m (RecurrentNetwork xs ss, RecurrentInputs xs)
- runRecurrentNetwork :: forall shapes layers. RecurrentNetwork layers shapes -> RecurrentInputs layers -> [S (Head shapes)] -> (RecurrentTapes layers shapes, RecurrentInputs layers, [S (Last shapes)])
- runRecurrentGradient :: forall layers shapes. RecurrentNetwork layers shapes -> RecurrentTapes layers shapes -> RecurrentInputs layers -> [S (Last shapes)] -> (RecurrentGradients layers, RecurrentInputs layers, [S (Head shapes)])
- applyRecurrentUpdate :: LearningParameters -> RecurrentNetwork layers shapes -> RecurrentGradients layers -> RecurrentNetwork layers shapes
Documentation
data Recurrent :: * -> * Source #
Witness type to say indicate we're building up with a recurrent layer.
(SingI Shape (RecurrentShape x), RecurrentUpdateLayer x, Num (RecurrentInputs ys)) => Num (RecurrentInputs ((:) * (Recurrent x) ys)) Source # | |
(SingI Shape (RecurrentShape x), RecurrentUpdateLayer x, Serialize (RecurrentInputs ys)) => Serialize (RecurrentInputs ((:) * (Recurrent x) ys)) Source # | |
(Show x, Show (RecurrentNetwork xs rs)) => Show (RecurrentNetwork ((:) * (Recurrent x) xs) ((:) Shape i rs)) Source # | |
(SingI Shape i, RecurrentLayer x i o, Serialize x, Serialize (RecurrentNetwork xs ((:) Shape o rs))) => Serialize (RecurrentNetwork ((:) * (Recurrent x) xs) ((:) Shape i ((:) Shape o rs))) Source # | |
data FeedForward :: * -> * Source #
Witness type to say indicate we're building up with a normal feed forward layer.
(UpdateLayer x, Num (RecurrentInputs ys)) => Num (RecurrentInputs ((:) * (FeedForward x) ys)) Source # | |
(UpdateLayer x, Serialize (RecurrentInputs ys)) => Serialize (RecurrentInputs ((:) * (FeedForward x) ys)) Source # | |
(Show x, Show (RecurrentNetwork xs rs)) => Show (RecurrentNetwork ((:) * (FeedForward x) xs) ((:) Shape i rs)) Source # | |
(SingI Shape i, Layer x i o, Serialize x, Serialize (RecurrentNetwork xs ((:) Shape o rs))) => Serialize (RecurrentNetwork ((:) * (FeedForward x) xs) ((:) Shape i ((:) Shape o rs))) Source # | |
data RecurrentNetwork :: [*] -> [Shape] -> * where Source #
Type of a recurrent neural network.
The [*] type specifies the types of the layers.
The [Shape] type specifies the shapes of data passed between the layers.
The definition is similar to a Network, but every layer in the type is tagged by whether it's a FeedForward Layer of a Recurrent layer.
Often, to make the definitions more concise, one will use a type alias for these empty data types.
RNil :: SingI i => RecurrentNetwork '[] '[i] | |
(:~~>) :: (SingI i, Layer x i h) => !x -> !(RecurrentNetwork xs (h ': hs)) -> RecurrentNetwork (FeedForward x ': xs) (i ': (h ': hs)) infixr 5 | |
(:~@>) :: (SingI i, RecurrentLayer x i h) => !x -> !(RecurrentNetwork xs (h ': hs)) -> RecurrentNetwork (Recurrent x ': xs) (i ': (h ': hs)) infixr 5 |
(Show x, Show (RecurrentNetwork xs rs)) => Show (RecurrentNetwork ((:) * (Recurrent x) xs) ((:) Shape i rs)) Source # | |
(Show x, Show (RecurrentNetwork xs rs)) => Show (RecurrentNetwork ((:) * (FeedForward x) xs) ((:) Shape i rs)) Source # | |
Show (RecurrentNetwork ([] *) ((:) Shape i ([] Shape))) Source # | |
(SingI Shape i, RecurrentLayer x i o, Serialize x, Serialize (RecurrentNetwork xs ((:) Shape o rs))) => Serialize (RecurrentNetwork ((:) * (Recurrent x) xs) ((:) Shape i ((:) Shape o rs))) Source # | |
(SingI Shape i, Layer x i o, Serialize x, Serialize (RecurrentNetwork xs ((:) Shape o rs))) => Serialize (RecurrentNetwork ((:) * (FeedForward x) xs) ((:) Shape i ((:) Shape o rs))) Source # | |
SingI Shape i => Serialize (RecurrentNetwork ([] *) ((:) Shape i ([] Shape))) Source # | Add very simple serialisation to the recurrent network |
data RecurrentInputs :: [*] -> * where Source #
Recurrent inputs (sideways shapes on an imaginary unrolled graph) Parameterised on the layers of a Network.
RINil :: RecurrentInputs '[] | |
(:~~+>) :: UpdateLayer x => () -> !(RecurrentInputs xs) -> RecurrentInputs (FeedForward x ': xs) | |
(:~@+>) :: (SingI (RecurrentShape x), RecurrentUpdateLayer x) => !(S (RecurrentShape x)) -> !(RecurrentInputs xs) -> RecurrentInputs (Recurrent x ': xs) |
(SingI Shape (RecurrentShape x), RecurrentUpdateLayer x, Num (RecurrentInputs ys)) => Num (RecurrentInputs ((:) * (Recurrent x) ys)) Source # | |
(UpdateLayer x, Num (RecurrentInputs ys)) => Num (RecurrentInputs ((:) * (FeedForward x) ys)) Source # | |
Num (RecurrentInputs ([] *)) Source # | |
(SingI Shape (RecurrentShape x), RecurrentUpdateLayer x, Serialize (RecurrentInputs ys)) => Serialize (RecurrentInputs ((:) * (Recurrent x) ys)) Source # | |
(UpdateLayer x, Serialize (RecurrentInputs ys)) => Serialize (RecurrentInputs ((:) * (FeedForward x) ys)) Source # | |
Serialize (RecurrentInputs ([] *)) Source # | |
data RecurrentTapes :: [*] -> [Shape] -> * where Source #
All the information required to backpropogate through time safely.
We index on the time step length as well, to ensure that that all Tape lengths are the same.
TRNil :: SingI i => RecurrentTapes '[] '[i] | |
(:\~>) :: [Tape x i h] -> !(RecurrentTapes xs (h ': hs)) -> RecurrentTapes (FeedForward x ': xs) (i ': (h ': hs)) | |
(:\@>) :: [RecTape x i h] -> !(RecurrentTapes xs (h ': hs)) -> RecurrentTapes (Recurrent x ': xs) (i ': (h ': hs)) |
data RecurrentGradients :: [*] -> * where Source #
Gradient of a network.
Parameterised on the layers of the network.
RGNil :: RecurrentGradients '[] | |
(://>) :: UpdateLayer x => [Gradient x] -> RecurrentGradients xs -> RecurrentGradients (phantom x ': xs) |
randomRecurrent :: (CreatableRecurrent xs ss, MonadRandom m) => m (RecurrentNetwork xs ss, RecurrentInputs xs) Source #
Create a network of the types requested
runRecurrentNetwork :: forall shapes layers. RecurrentNetwork layers shapes -> RecurrentInputs layers -> [S (Head shapes)] -> (RecurrentTapes layers shapes, RecurrentInputs layers, [S (Last shapes)]) Source #
runRecurrentGradient :: forall layers shapes. RecurrentNetwork layers shapes -> RecurrentTapes layers shapes -> RecurrentInputs layers -> [S (Last shapes)] -> (RecurrentGradients layers, RecurrentInputs layers, [S (Head shapes)]) Source #
applyRecurrentUpdate :: LearningParameters -> RecurrentNetwork layers shapes -> RecurrentGradients layers -> RecurrentNetwork layers shapes Source #
Apply a batch of gradients to the network Uses runUpdates which can be specialised for a layer.