module Network.Layer
( LayerDefinition(..)
, Layer(..)
, ShowableLayer(..)
, Connectivity
, RandomTransform
, layerToShowable
, showableToLayer
, createLayer
, scaleLayer
, connectFully
, randomList
, boxMuller
, normals
, uniforms
, boundedUniforms
) where
import Network.Neuron
import System.Random
import Numeric.LinearAlgebra
import Data.Binary (encode, decode, Binary(..))
data LayerDefinition a = LayerDefinition { neuronDef :: (Neuron a)
, neuronCount :: Int
, connect :: (Connectivity a)
}
data Layer a = Layer { weightMatrix :: (Matrix a)
, biasVector :: (Vector a)
, neuron :: (Neuron a)
}
data ShowableLayer a = ShowableLayer { weights :: (Matrix a)
, biases :: (Vector a)
} deriving Show
instance (Element a, Binary a) => Binary (ShowableLayer a) where
put ShowableLayer{..} = do put weights; put biases
get = do weights <- get; biases <- get; return ShowableLayer{..}
type Connectivity a = Int -> Int -> Matrix a
type RandomTransform a = [a] -> [a]
createLayer ::
(RandomGen g, Random a, Floating (Vector a), Container Vector a, Floating a)
=> RandomTransform a -> g -> LayerDefinition a -> LayerDefinition a -> Layer a
createLayer t g layerDef layerDef' =
Layer (randomMatrix * (connectivity i j))
(randomVector * bias)
(neuronDef layerDef)
where randomMatrix = (i >< j) (randomList t g')
randomVector = i |> (randomList t g'')
i = neuronCount layerDef'
j = neuronCount layerDef
connectivity = connect layerDef'
bias = i |> (repeat 1)
(g', g'') = split g
scaleLayer :: (Floating (Vector a), Container Vector a)
=> a -> Layer a -> Layer a
scaleLayer factor l =
Layer (factor `scale` (weightMatrix l)) (factor `scale` (biasVector l)) (neuron l)
connectFully :: Int -> Int -> Matrix Float
connectFully i j = (i >< j) (repeat 1)
layerToShowable :: (Floating (Vector a), Container Vector a, Floating a)
=> Layer a -> ShowableLayer a
layerToShowable l = ShowableLayer (weightMatrix l) (biasVector l)
showableToLayer :: (Floating (Vector a), Container Vector a, Floating a)
=> (ShowableLayer a, LayerDefinition a) -> Layer a
showableToLayer (s, d) = Layer (weights s) (biases s) (neuronDef d)
randomList :: (RandomGen g, Random a, Floating a)
=> RandomTransform a -> g -> [a]
randomList transform = transform . randoms
boxMuller :: Floating a => a -> a -> (a, a)
boxMuller x1 x2 = (z1, z2) where z1 = sqrt ((2) * log x1) * cos (2 * pi * x2)
z2 = sqrt ((2) * log x1) * sin (2 * pi * x2)
normals :: Floating a => [a] -> [a]
normals (x1:x2:xs) = z1:z2:(normals xs) where (z1, z2) = boxMuller x1 x2
normals _ = []
uniforms :: Floating a => [a] -> [a]
uniforms xs = xs
boundedUniforms :: Floating a => (a, a) -> [a] -> [a]
boundedUniforms (lower, upper) xs = map affine xs
where affine x = lower + x * (upper lower)