simple-neural-networks-0.2.0.1: Simple parallel neural networks implementation

Safe HaskellNone

AI.NeuralNetworks.Simple

Description

Simple parallel neural networks implementation

  import AI.NeuralNetworks.Simple
  import Text.Printf
  import System.Random
  import Control.Monad

calcXor net x y =
    let [r] = runNeuralNetwork net [x, y]
    in  r

mse net =
    let square x = x * x
        e1 = square $ calcXor net 0 0
        e2 = square $ calcXor net 1 0 - 1
        e3 = square $ calcXor net 0 1 - 1
        e4 = square $ calcXor net 1 1
    in 0.5 * (e1 + e2 + e3 + e4)

stopf best gnum = do
    let e = mse best
    when (gnum `rem` 100 == 0) $
      printf "Generation: %02d, MSE: %.4f\n" gnum e
    return $ e < 0.002 || gnum >= 10000

main = do
    gen <- newStdGen
    let (randomNet, _) = randomNeuralNetwork gen [2,2,1] [Logistic, Logistic] 0.45
        examples = [ ([0,0],[0]), ([0,1],[1]), ([1,0],[1]), ([1,1],[0]) ]
    net <- backpropagationBatchParallel randomNet examples 0.4 stopf :: IO (NeuralNetwork Double)
    putStrLn ""
    putStrLn $ "Result: " ++ show net
    _ <- printf "0 xor 0 = %.4f\n" (calcXor net 0 0)
    _ <- printf "1 xor 0 = %.4f\n" (calcXor net 1 0)
    _ <- printf "0 xor 1 = %.4f\n" (calcXor net 0 1)
    printf "1 xor 1 = %.4f" (calcXor net 1 1)

Synopsis

Documentation

data ActivationFunction Source

Activation function

Constructors

Tanh

Hyperbolic tangent

Logistic

Logistic function : 1 / (1 + exp (-x))

data NeuralNetwork a Source

Neural network

Instances

data WeightDeltas a Source

Deltas calculated by backpropagation algorithm

Instances

emptyNeuralNetworkSource

Arguments

:: [Word16]

Number of neurons in each layer

-> [ActivationFunction]

Activation functions

-> NeuralNetwork a

New neural network

Neural network with all weights set to zero.

 {- 
    2 input neurons,
    one hidden layer with 2 neurons and tanh activation function,
    one output layer with 1 neuron and tanh activation function
 -}
 emptyNeuralNetwork [2, 2, 1] [Tanh, Tanh]

getWeightsSource

Arguments

:: NeuralNetwork a

Neural network

-> [((Word16, Word16, Word16), a)]

Weights (layer 0.., neuron 1.., input 0..)

Weights of the given neural network.

setWeightsSource

Arguments

:: [((Word16, Word16, Word16), a)]

Weights

-> NeuralNetwork a

Neural network

-> NeuralNetwork a

Neural network with changed weights

Change weights of the given neural network.

runNeuralNetworkSource

Arguments

:: (Num a, Floating a) 
=> NeuralNetwork a

Neural network

-> [a]

Input signal

-> [a]

Output signal

Run neural network.

backpropagationOneStepSource

Arguments

:: (Num a, Floating a) 
=> NeuralNetwork a

Current neural network

-> a

Learning rate

-> [a]

Input

-> [a]

Expected output

-> WeightDeltas a

Calculated deltas

Run one step of the backpropagation algorithm.

backpropagationStochasticSource

Arguments

:: (Num a, Floating a) 
=> NeuralNetwork a

Neural network

-> [([a], [a])]

Trainset: inputs and expected outputs

-> a

Learning rate

-> (NeuralNetwork a -> Int -> IO Bool)

Stop function, 1st arg - current NN, 2nd arg - generation number

-> IO (NeuralNetwork a)

Trained neural network

Run backpropagation algorithm in stochastic mode.

backpropagationBatchParallelSource

Arguments

:: (Num a, Floating a, NFData a) 
=> NeuralNetwork a

Neural network

-> [([a], [a])]

Trainset: inputs and expected outputs

-> a

Learning rate

-> (NeuralNetwork a -> Int -> IO Bool)

Stop function, 1st arg - current NN, 2nd arg - generation number

-> IO (NeuralNetwork a)

Trained neural network

Run backpropagation algorithm in batch mode. This code runs faster in parallel, so don't forget to use +RTS -N.

applyWeightDeltasSource

Arguments

:: (Num a, Floating a) 
=> WeightDeltas a

Deltas

-> NeuralNetwork a

Neural network

-> NeuralNetwork a

Neural network with updated weights

Apply deltas to the neural netwotk.

unionWeightDeltasSource

Arguments

:: (Num a, Floating a) 
=> [WeightDeltas a]

List of WeightDeltas

-> WeightDeltas a

United WeightDeltas

Union list of deltas into one WeightDeltas.

randomNeuralNetworkSource

Arguments

:: (RandomGen g, Random a, Num a, Ord a) 
=> g

RandomGen

-> [Word16]

Number of neurons in each layer

-> [ActivationFunction]

Activation functions

-> a

Maximum weight; all weights in NN will be between -maxw and maxw

-> (NeuralNetwork a, g)

Random neural network and new RandomGen

Generate random neural network.

crossoverCommonSource

Arguments

:: (Num a, RandomGen g) 
=> g

RandomGen

-> NeuralNetwork a

First neural network

-> NeuralNetwork a

Second neural network

-> ([NeuralNetwork a], g)

Children and new RandomGen

Crossover of two neural networks.

crossoverMergeSource

Arguments

:: (Num a, RandomGen g) 
=> (a -> a -> a)

Mentioned 'some function'

-> g

Not used

-> NeuralNetwork a

First neural network

-> NeuralNetwork a

Second neural netwrok

-> ([NeuralNetwork a], g)

Children (actually - exactly one child) and exact copy of the 2nd argument

Another implementation of crossover. Weights of a child are just some function of corresponding parent weights.

mutationCommonSource

Arguments

:: (Random a, Num a, RandomGen g) 
=> Double

Percent of mutating weights, (0.0; 1.0)

-> a

Maximum weight, mutated weights will be between -maxw and maxw

-> g

RandomGen

-> NeuralNetwork a

Neural network

-> (NeuralNetwork a, g)

New neural network and RandomGen

Mutate given neural netwrok.