module AI.BPANN where
import Data.List
import Data.List.Split
import Data.Maybe
import System.Random
type ALayer a = [(Neuron,a)]
type ANetwork a = [ALayer a]
type Network = ANetwork ()
data ForwardPassInfo = FPInfo {
o :: Double,
net :: Double,
xs :: [Double]
} deriving Show
data Neuron = Neuron {
ws :: [Double],
fun :: (Double -> Double),
fun' :: (Double -> Double)
}
instance Show Neuron where
show (Neuron ws _ _) =
"Neuron: ws=" ++ (show ws)
type PackedNeuron = [Double]
sigmoid :: Double -> Double
sigmoid x = 1.0 / (1 + exp (x))
sigmoid' :: Double -> Double
sigmoid' x = sigmoid x * (1 sigmoid x)
type NeuronCreator = PackedNeuron -> Neuron
sigmoidNeuron :: PackedNeuron -> Neuron
sigmoidNeuron ws = Neuron ws sigmoid sigmoid'
outputNeuron :: PackedNeuron -> Neuron
outputNeuron ws = Neuron ws id (const 1)
biasNeuron :: Int
-> Neuron
biasNeuron nInputs = Neuron (replicate nInputs 1) (const 1) (const 0)
createLayer :: [PackedNeuron] -> NeuronCreator -> ALayer ()
createLayer pns nc = map (\pn -> (nc pn,())) pns
sigmoidLayer :: [PackedNeuron] -> ALayer ()
sigmoidLayer pns = (biasNeuron nInputs, ()) : createLayer pns sigmoidNeuron
where
nInputs = length $ head pns
outputLayer :: [PackedNeuron] -> ALayer ()
outputLayer pns = createLayer pns outputNeuron
createRandomNetwork ::
Int
-> [Int]
-> Network
createRandomNetwork seed layerNeuronCounts =
unpackNetwork wss
where
restLayerNeuronCounts' = init layerNeuronCounts
hiddenIcsNcs = zip (map (+1) restLayerNeuronCounts') (tail restLayerNeuronCounts')
(outputIc,outputNc) = ((snd $ last hiddenIcsNcs) + 1,last layerNeuronCounts)
rs = randomRs (1,1) $ mkStdGen seed
(hiddenWss,rs') = foldl (\(wss',rs') (ic,nc) -> let
(sl,rs'') = icNcToPackedNeurons ic nc rs'
in
(wss'++[sl],rs'')) ([],rs) hiddenIcsNcs
(outputWss,_) = icNcToPackedNeurons outputIc outputNc rs'
wss = hiddenWss ++ [outputWss]
icNcToPackedNeurons :: Int -> Int -> [Double] -> ([PackedNeuron],[Double])
icNcToPackedNeurons ic nc ws = (take nc $ splitEvery ic ws, drop (ic * nc) ws)
unpackNetwork :: [[PackedNeuron]] -> Network
unpackNetwork wss =
hLayers ++ [oLayer]
where
hLayers = map sigmoidLayer $ init wss
oLayer = outputLayer $ last wss
packNetwork :: Network -> [[PackedNeuron]]
packNetwork n = (map unpackHiddenLayer (init n)) ++ [unpackLayer (last n)]
where
unpackLayer ol = map (ws . fst) ol
unpackHiddenLayer l = unpackLayer $ tail l
passForward :: Network -> [Double] -> ANetwork ForwardPassInfo
passForward nw xs = reverse $ fst $ foldl pf ([],(1 : xs)) nw
where
pf (nw',xs') l = (l' : nw', xs'')
where
l' = (passForward' l xs')
xs'' = map (o . snd) l'
passForward' :: ALayer a -> [Double] -> ALayer ForwardPassInfo
passForward' l xs = (map (\(n,_) -> (n, passForward'' n xs)) l)
passForward'' :: Neuron -> [Double] -> ForwardPassInfo
passForward'' n xs = FPInfo {
o = (fun n) net',
net = net',
xs = xs
}
where
net' = calcNet xs (ws n)
calcNet :: [Double] -> [Double] -> Double
calcNet xs ws = sum $ zipWith (*) xs ws
weightUpdate ::
Double
-> ANetwork ForwardPassInfo
-> [Double]
-> Network
weightUpdate alpha fpnw ys = fst $ foldr (weightUpdate' alpha) ([],ds) fpnw
where
ds = zipWith () ys (map (o . snd) (last fpnw))
weightUpdate' :: Double -> ALayer ForwardPassInfo -> (Network,[Double]) -> (Network,[Double])
weightUpdate' alpha fpl (nw,ds) = (l':nw, ds')
where
(l,δs) = unzip $ zipWith (weightUpdate'' alpha) fpl ds
ds' = map sum $ transpose $ map (\(n,δ) -> map (\w -> w * δ) (ws n)) (zip l δs)
l' = (map (\n -> (n,())) l)
weightUpdate'' :: Double -> (Neuron, ForwardPassInfo) -> Double -> (Neuron, Double)
weightUpdate'' alpha (n,fpi) d = (n{ws=ws'},δ)
where
δ = ((fun' n) (net fpi)) * d
ws' = zipWith (\x w -> w + (alpha * δ * x)) (xs fpi) (ws n)
backprop ::
Double
-> Network
-> ([Double],[Double])
-> Network
backprop alpha nw (xs,ys) = weightUpdate alpha (passForward nw xs) ys
calculate :: Network -> [Double] -> [Double]
calculate nw xs = foldl calculate' (1 : xs) nw
calculate' :: [Double] -> ALayer a -> [Double]
calculate' xs l = map (\(n,_) -> (fun n) (calcNet xs (ws n))) l
quadErrorNet :: Network -> ([Double], [Double]) -> Double
quadErrorNet nw (xs,ys) = sum $ zipWith (\o y -> (y o) ** 2) os ys
where
os = calculate nw xs
globalQuadError :: Network -> [([Double], [Double])] -> Double
globalQuadError nw samples = sum $ map (quadErrorNet nw) samples
trainAlot ::
Double
-> Network
-> [([Double],[Double])]
-> [Network]
trainAlot alpha nw samples =
iterate (\nw' -> foldl (backprop alpha) nw' samples) nw
train ::
Double
-> Double
-> Network
-> [([Double],[Double])]
-> Network
train alpha epsilon nw samples = fromJust $ find
(\nw' -> globalQuadError nw' samples < epsilon)
(trainAlot alpha nw samples)
testBoolAnd = train 0.5 0.001 (createRandomNetwork 1 [2,2,1])
[([0,0],[0]),([0,1],[0]),([1,0],[0]),([1,1],[1])]
testBoolOr = train 0.5 0.001 (createRandomNetwork 1 [2,2,1])
[([0,0],[0]),([0,1],[1]),([1,0],[1]),([1,1],[1])]
testBoolXor = train 0.5 0.001 (createRandomNetwork 1 [2,2,1])
[([0,0],[0]),([0,1],[1]),([1,0],[1]),([1,1],[0])]
testBoolNot = train 0.5 0.001 (createRandomNetwork 1 [1,1,1])
[([0],[1]),([1],[0])]