| Safe Haskell | None |
|---|
AI.NeuralNetworks.Simple
Description
Simple parallel neural networks implementation
import AI.NeuralNetworks.Simple
import Text.Printf
import System.Random
import Control.Monad
calcXor net x y =
let [r] = runNeuralNetwork net [x, y]
in r
mse net =
let square x = x * x
e1 = square $ calcXor net 0 0
e2 = square $ calcXor net 1 0 - 1
e3 = square $ calcXor net 0 1 - 1
e4 = square $ calcXor net 1 1
in 0.5 * (e1 + e2 + e3 + e4)
stopf best gnum = do
let e = mse best
when (gnum `rem` 100 == 0) $
printf "Generation: %02d, MSE: %.4f\n" gnum e
return $ e < 0.002 || gnum >= 10000
main = do
gen <- newStdGen
let (randomNet, _) = randomNeuralNetwork gen [2,2,1] [Logistic, Logistic] 0.45
examples = [ ([0,0],[0]), ([0,1],[1]), ([1,0],[1]), ([1,1],[0]) ]
net <- backpropagationBatchParallel randomNet examples 0.4 stopf :: IO (NeuralNetwork Double)
putStrLn ""
putStrLn $ "Result: " ++ show net
_ <- printf "0 xor 0 = %.4f\n" (calcXor net 0 0)
_ <- printf "1 xor 0 = %.4f\n" (calcXor net 1 0)
_ <- printf "0 xor 1 = %.4f\n" (calcXor net 0 1)
printf "1 xor 1 = %.4f" (calcXor net 1 1)
- data ActivationFunction
- data NeuralNetwork a
- data WeightDeltas a
- emptyNeuralNetwork :: [Word16] -> [ActivationFunction] -> NeuralNetwork a
- getWeights :: NeuralNetwork a -> [((Word16, Word16, Word16), a)]
- setWeights :: [((Word16, Word16, Word16), a)] -> NeuralNetwork a -> NeuralNetwork a
- runNeuralNetwork :: (Num a, Floating a) => NeuralNetwork a -> [a] -> [a]
- backpropagationOneStep :: (Num a, Floating a) => NeuralNetwork a -> a -> [a] -> [a] -> WeightDeltas a
- backpropagationStochastic :: (Num a, Floating a) => NeuralNetwork a -> [([a], [a])] -> a -> (NeuralNetwork a -> Int -> IO Bool) -> IO (NeuralNetwork a)
- backpropagationBatchParallel :: (Num a, Floating a, NFData a) => NeuralNetwork a -> [([a], [a])] -> a -> (NeuralNetwork a -> Int -> IO Bool) -> IO (NeuralNetwork a)
- applyWeightDeltas :: (Num a, Floating a) => WeightDeltas a -> NeuralNetwork a -> NeuralNetwork a
- unionWeightDeltas :: (Num a, Floating a) => [WeightDeltas a] -> WeightDeltas a
- randomNeuralNetwork :: (RandomGen g, Random a, Num a, Ord a) => g -> [Word16] -> [ActivationFunction] -> a -> (NeuralNetwork a, g)
- crossoverCommon :: (Num a, RandomGen g) => g -> NeuralNetwork a -> NeuralNetwork a -> ([NeuralNetwork a], g)
- crossoverMerge :: (Num a, RandomGen g) => (a -> a -> a) -> g -> NeuralNetwork a -> NeuralNetwork a -> ([NeuralNetwork a], g)
- mutationCommon :: (Random a, Num a, RandomGen g) => Double -> a -> g -> NeuralNetwork a -> (NeuralNetwork a, g)
Documentation
data ActivationFunction Source
Activation function
data NeuralNetwork a Source
Neural network
Instances
| Eq a => Eq (NeuralNetwork a) | |
| Read a => Read (NeuralNetwork a) | |
| Show a => Show (NeuralNetwork a) | |
| NFData a => NFData (NeuralNetwork a) |
data WeightDeltas a Source
Deltas calculated by backpropagation algorithm
Instances
| Eq a => Eq (WeightDeltas a) | |
| Read a => Read (WeightDeltas a) | |
| Show a => Show (WeightDeltas a) | |
| NFData a => NFData (WeightDeltas a) |
Arguments
| :: [Word16] | Number of neurons in each layer |
| -> [ActivationFunction] | Activation functions |
| -> NeuralNetwork a | New neural network |
Neural network with all weights set to zero.
{-
2 input neurons,
one hidden layer with 2 neurons and tanh activation function,
one output layer with 1 neuron and tanh activation function
-}
emptyNeuralNetwork [2, 2, 1] [Tanh, Tanh]
Arguments
| :: NeuralNetwork a | Neural network |
| -> [((Word16, Word16, Word16), a)] | Weights (layer 0.., neuron 1.., input 0..) |
Weights of the given neural network.
Arguments
| :: [((Word16, Word16, Word16), a)] | Weights |
| -> NeuralNetwork a | Neural network |
| -> NeuralNetwork a | Neural network with changed weights |
Change weights of the given neural network.
Arguments
| :: (Num a, Floating a) | |
| => NeuralNetwork a | Neural network |
| -> [a] | Input signal |
| -> [a] | Output signal |
Run neural network.
Arguments
| :: (Num a, Floating a) | |
| => NeuralNetwork a | Current neural network |
| -> a | Learning rate |
| -> [a] | Input |
| -> [a] | Expected output |
| -> WeightDeltas a | Calculated deltas |
Run one step of the backpropagation algorithm.
backpropagationStochasticSource
Arguments
| :: (Num a, Floating a) | |
| => NeuralNetwork a | Neural network |
| -> [([a], [a])] | Trainset: inputs and expected outputs |
| -> a | Learning rate |
| -> (NeuralNetwork a -> Int -> IO Bool) | Stop function, 1st arg - current NN, 2nd arg - generation number |
| -> IO (NeuralNetwork a) | Trained neural network |
Run backpropagation algorithm in stochastic mode.
backpropagationBatchParallelSource
Arguments
| :: (Num a, Floating a, NFData a) | |
| => NeuralNetwork a | Neural network |
| -> [([a], [a])] | Trainset: inputs and expected outputs |
| -> a | Learning rate |
| -> (NeuralNetwork a -> Int -> IO Bool) | Stop function, 1st arg - current NN, 2nd arg - generation number |
| -> IO (NeuralNetwork a) | Trained neural network |
Run backpropagation algorithm in batch mode. This code runs faster in parallel, so don't forget to use +RTS -N.
Arguments
| :: (Num a, Floating a) | |
| => WeightDeltas a | Deltas |
| -> NeuralNetwork a | Neural network |
| -> NeuralNetwork a | Neural network with updated weights |
Apply deltas to the neural netwotk.
Arguments
| :: (Num a, Floating a) | |
| => [WeightDeltas a] | List of WeightDeltas |
| -> WeightDeltas a | United WeightDeltas |
Union list of deltas into one WeightDeltas.
Arguments
| :: (RandomGen g, Random a, Num a, Ord a) | |
| => g | RandomGen |
| -> [Word16] | Number of neurons in each layer |
| -> [ActivationFunction] | Activation functions |
| -> a | Maximum weight; all weights in NN will be between -maxw and maxw |
| -> (NeuralNetwork a, g) | Random neural network and new RandomGen |
Generate random neural network.
Arguments
| :: (Num a, RandomGen g) | |
| => g | RandomGen |
| -> NeuralNetwork a | First neural network |
| -> NeuralNetwork a | Second neural network |
| -> ([NeuralNetwork a], g) | Children and new RandomGen |
Crossover of two neural networks.
Arguments
| :: (Num a, RandomGen g) | |
| => (a -> a -> a) | Mentioned 'some function' |
| -> g | Not used |
| -> NeuralNetwork a | First neural network |
| -> NeuralNetwork a | Second neural netwrok |
| -> ([NeuralNetwork a], g) | Children (actually - exactly one child) and exact copy of the 2nd argument |
Another implementation of crossover. Weights of a child are just some function of corresponding parent weights.
Arguments
| :: (Random a, Num a, RandomGen g) | |
| => Double | Percent of mutating weights, (0.0; 1.0) |
| -> a | Maximum weight, mutated weights will be between -maxw and maxw |
| -> g | RandomGen |
| -> NeuralNetwork a | Neural network |
| -> (NeuralNetwork a, g) | New neural network and RandomGen |
Mutate given neural netwrok.