-- Hoogle documentation, generated by Haddock
-- See Hoogle, http://www.haskell.org/hoogle/
-- | High Performance Neural Network in Haskell
--
-- High Performance Neural Network in Haskell
--
-- Provides fast training algorithms using hmatrix's bindings to GSL and
-- custom bindings to the liblbfgs C-library
--
-- Supported training algorithms: Gradient Descent, Conjugate Gradient,
-- BFGS, LBFGS
--
--
-- - Users should focus on AI.Model for most usages
-- (classification / regression)
-- - Other modules are provided for user expansion if needed
--
--
-- Go to https://github.com/ktklam9/HaskellNN for examples and
-- tests for usage
@package HaskellNN
@version 0.1.3
-- | Efficient representation of an Artificial Neural Network using vector
-- to represent the weights between each layer
--
-- This module provides the neural network data representation that will
-- be used extensively
module AI.Network
-- | The representation of an Artificial Neural Network
data Network
Network :: (Double -> Double) -> (Double -> Double) -> Double -> Vector Double -> [Int] -> Network
-- | The activation function for each neuron
activation :: Network -> (Double -> Double)
-- | The derivative of the activation function
derivative :: Network -> (Double -> Double)
-- | The regularization constant
lambda :: Network -> Double
-- | The vector of the weights between each layer of the neural network
weights :: Network -> Vector Double
-- | The architecture of the neural networks.
--
-- e.g., a network of an architecture of 2-3-1 would have an architecture
-- representation of [2,3,1]
--
-- NOTE: The library will automatically create a bias neuron in each
-- layer, so you do not need to state them explicitly
architecture :: Network -> [Int]
toActivation :: Network -> (Double -> Double)
toDerivative :: Network -> (Double -> Double)
toLambda :: Network -> Double
toWeights :: Network -> Vector Double
-- | Get the list of matrices of weights between each layer. This can be
-- more useful than the barebone vector representation of the weights
toWeightMatrices :: Network -> [Matrix Double]
toArchitecture :: Network -> [Int]
setActivation :: Network -> (Double -> Double) -> Network
setDerivative :: Network -> (Double -> Double) -> Network
setLambda :: Network -> Double -> Network
setWeights :: Network -> Vector Double -> Network
setArchitecture :: Network -> [Int] -> Network
-- | This module provides the signatures for needed functions in a neural
-- network
module AI.Signatures
-- | Type that represents the activation function
type ActivationFunction = Double -> Double
-- | Type that represents the derivative of the activation function
--
-- NOTE: The derivative can be non-trivial and must be continuous
type DerivativeFunction = Double -> Double
-- | Type that represents the error function between the calculated output
-- vector and the expected output vector
type ErrorFunction = Vector Double -> Vector Double -> Double
-- | Type that represents the function that can calculate the total cost of
-- the neural networks given the neural networks, the input matrix and an
-- expected output matrix
type CostFunction = Network -> Matrix Double -> Matrix Double -> Double
-- | Type that represents the cost function derivative. on the output nodes
type CostDerivative = Network -> Matrix Double -> Matrix Double -> Matrix Double -> Matrix Double
-- | The type to represent a function that can calculate the gradient
-- vector of the weights of the neural network
--
-- NOTE: Must be supplied a function to calculate the cost, the cost
-- derivative of the output neurons, the neural network the input matrix,
-- and the expected output matrix
type GradientFunction = CostFunction -> CostDerivative -> Network -> Matrix Double -> Matrix Double -> Vector Double
-- | This module provides forward propagation to let the user get the
-- output of the neural network given an input vector
module AI.Calculation.NetworkOutput
-- | Forward propagate to get the network's output
networkOutput :: Network -> Vector Double -> Vector Double
-- | This module represents ways to calculate the gradient vector of the
-- weights of the neural network
--
-- Backpropagation should always be preferred over the Numerical Gradient
-- method
module AI.Calculation.Gradients
-- | Calculate the analytical gradient of the weights of the network by
-- using backpropagation
backpropagation :: GradientFunction
-- | NOTE: This should only be used as a last resort if for some reason
-- (bugs?) the backpropagation algorithm does not give you good gradients
--
-- The numerical algorithm requires two forward propagations, while the
-- backpropagation algorithm only requires one, so this is more costly
--
-- Also, analytical gradients almost always perform better than numerical
-- gradients
--
-- User must provide an epsilon value. Make sure to use a very small
-- value for the epsilon for more accurate gradients
numericalGradients :: Double -> GradientFunction
-- | This module provides common cost functions and their derivatives
module AI.Calculation.Cost
-- | Represents the cost model of the Neural Network
data Cost
-- | The mean-squared cost
MeanSquared :: Cost
-- | The logistic cost
Logistic :: Cost
-- | Gets the cost function associated with the cost model
getCostFunction :: Cost -> CostFunction
-- | Gets the cost derivative associated with the cost model
getCostDerivative :: Cost -> CostDerivative
-- | This module provides common activation functions and their derivative
module AI.Calculation.Activation
-- | Represents the activation of each neuron in the neural network
data Activation
-- | The sigmoid activation function
Sigmoid :: Activation
-- | The hyperbolic tangent activation function
HyperbolicTangent :: Activation
-- | Get the activation function associated with an activation
getActivation :: Activation -> ActivationFunction
-- | Get the derivative function associated with an activation
getDerivative :: Activation -> DerivativeFunction
-- | This module provides common calculation functions
module AI.Calculation
-- | This module provides training algorithms to train a neural network
-- given training data.
--
-- User should only use LBFGS though because it uses custom bindings to
-- the C-library liblbfgs
--
-- GSL's multivariate minimization algorithms are known to be inefficient
-- http://www.alglib.net/optimization/lbfgsandcg.php#header6 and
-- LBFGS outperforms them on many (of my) tests
module AI.Training
-- | The types of training algorithm to use
--
-- NOTE: These are all batch training algorithms
data TrainingAlgorithm
-- | hmatrix's binding to GSL
GradientDescent :: TrainingAlgorithm
-- | hmatrix's binding to GSL
ConjugateGradient :: TrainingAlgorithm
-- | hmatrix's binding to GSL
BFGS :: TrainingAlgorithm
-- | home-made binding to liblbfgs
LBFGS :: TrainingAlgorithm
-- | Train the neural network given a training algorithm, the training
-- parameters and the training data
trainNetwork :: TrainingAlgorithm -> Cost -> GradientFunction -> Network -> Double -> Int -> Matrix Double -> Matrix Double -> Network
instance Show TrainingAlgorithm
instance Read TrainingAlgorithm
instance Enum TrainingAlgorithm
-- | This module provides a generic module for initiialization and training
-- of neural networks
--
-- User must provide the needed functions
module AI.Model.GenericModel
-- | Generic neural network model for expansion
data GenericModel
GenericModel :: Cost -> Network -> GenericModel
-- | The cost model of the model
cost :: GenericModel -> Cost
-- | The neural network to be used for modeling
net :: GenericModel -> Network
-- | Initialize neural network model with the weights randomized within
-- [-1.0,1.0]
initializeModel :: Activation -> Cost -> [Int] -> Double -> StdGen -> GenericModel
-- | Get the output of the model
getOutput :: GenericModel -> Vector Double -> Vector Double
-- | Train the model given the parameters and the training algorithm
trainModel :: GenericModel -> TrainingAlgorithm -> Double -> Int -> Matrix Double -> Matrix Double -> GenericModel
-- | This module provides an initialization for a general neural network
-- model that can do either regression or classification
--
-- If for regression, the training data must be normalized by user to
-- have range of [-1,1]
module AI.Model.General
-- | This is a general neural network model that can be used for
-- classification or regression using HyperbolicTangent as the activation
-- model and MeanSquared as the cost model
initializeGeneral :: [Int] -> Double -> StdGen -> GenericModel
-- | This module provides an initialization for a classification neural
-- network model
--
-- NOTE: This theoretically should be faster than the General model if
-- used for classification
module AI.Model.Classification
-- | Make a neural network model that should be used for classification
-- using the Sigmoid as the activation model and Logistic as the cost
-- model
initializeClassification :: [Int] -> Double -> StdGen -> GenericModel
-- | Provides models interface for easy initialization and training of
-- neural networks
module AI.Model