-- Hoogle documentation, generated by Haddock
-- See Hoogle, http://www.haskell.org/hoogle/
-- | Wrapper of numeric-optimization package for using with AD package
--
-- Please see the README on GitHub at
-- https://github.com/msakai/nonlinear-optimization-ad/tree/master/numeric-optimization-ad#readme
@package numeric-optimization-ad
@version 0.1.0.1
-- | This module is a wrapper of Numeric.Optimization that uses
-- ad's automatic differentiation.
module Numeric.Optimization.AD
-- | Synonym of minimizeReverse
minimize :: forall f. Traversable f => Method -> Params (f Double) -> (forall s. Reifies s Tape => f (Reverse s Double) -> Reverse s Double) -> Maybe (f (Double, Double)) -> [Constraint] -> f Double -> IO (Result (f Double))
-- | Minimization of scalar function of one or more variables.
--
-- This is a wrapper of minimize and use
-- Numeric.AD.Mode.Reverse to compute gradient.
--
-- It cannot be used with methods that requires hessian (e.g.
-- Newton).
--
-- Example:
--
--
-- {-# LANGUAGE FlexibleContexts #-}
-- import Numeric.Optimization.AD
--
-- main :: IO ()
-- main = do
-- (x, result, stat) <- minimizeReverse LBFGS def rosenbrock Nothing [] [-3,-4]
-- print (resultSuccess result) -- True
-- print (resultSolution result) -- [0.999999999009131,0.9999999981094296]
-- print (resultValue result) -- 1.8129771632403013e-18
--
-- -- https://en.wikipedia.org/wiki/Rosenbrock_function
-- rosenbrock :: Floating a => [a] -> a
-- -- rosenbrock :: Reifies s Tape => [Reverse s Double] -> Reverse s Double
-- rosenbrock [x,y] = sq (1 - x) + 100 * sq (y - sq x)
--
-- sq :: Floating a => a -> a
-- sq x = x ** 2
--
minimizeReverse :: forall f. Traversable f => Method -> Params (f Double) -> (forall s. Reifies s Tape => f (Reverse s Double) -> Reverse s Double) -> Maybe (f (Double, Double)) -> [Constraint] -> f Double -> IO (Result (f Double))
-- | Minimization of scalar function of one or more variables.
--
-- This is a wrapper of minimize and use
-- Numeric.AD.Mode.Sparse to compute gradient and hessian.
--
-- Unlike minimizeReverse, it can be used with methods that
-- requires hessian (e.g. Newton).
--
-- Example:
--
--
-- {-# LANGUAGE FlexibleContexts #-}
-- import Numeric.Optimization.AD
--
-- main :: IO ()
-- main = do
-- (x, result, stat) <- minimizeSparse Newton def rosenbrock Nothing [] [-3,-4]
-- print (resultSuccess result) -- True
-- print (resultSolution result) -- [0.9999999999999999,0.9999999999999998]
-- print (resultValue result) -- 1.232595164407831e-32
--
-- -- https://en.wikipedia.org/wiki/Rosenbrock_function
-- rosenbrock :: Floating a => [a] -> a
-- -- rosenbrock :: [AD s (Sparse Double)] -> AD s (Sparse Double)
-- rosenbrock [x,y] = sq (1 - x) + 100 * sq (y - sq x)
--
-- sq :: Floating a => a -> a
-- sq x = x ** 2
--
minimizeSparse :: forall f. Traversable f => Method -> Params (f Double) -> (forall s. f (AD s (Sparse Double)) -> AD s (Sparse Double)) -> Maybe (f (Double, Double)) -> [Constraint] -> f Double -> IO (Result (f Double))
-- | Type of constraint
--
-- Currently, no constraints are supported.
data Constraint
-- | Selection of numerical optimization algorithms
data Method
-- | Conjugate gradient method based on Hager and Zhang [1].
--
-- The implementation is provided by nonlinear-optimization package [3]
-- which is a binding library of [2].
--
-- This method requires gradient but does not require hessian.
--
--
CGDescent :: Method
-- | Limited memory BFGS (L-BFGS) algorithm [1]
--
-- The implementtion is provided by lbfgs package [2] which is a binding
-- of liblbfgs [3].
--
-- This method requires gradient but does not require hessian.
--
--
LBFGS :: Method
-- | Native implementation of Newton method
--
-- This method requires both gradient and hessian.
Newton :: Method
-- | Whether a Method is supported under the current environment.
isSupportedMethod :: Method -> Bool
-- | Parameters for optimization algorithms
--
-- TODO:
--
--
-- - How to pass algorithm specific parameters?
-- - Separate callback from other more concrete serializeable
-- parameters?
--
data Params a
Params :: Maybe (a -> IO Bool) -> Maybe Double -> Params a
-- | If callback function returns True, the algorithm execution is
-- terminated.
[paramsCallback] :: Params a -> Maybe (a -> IO Bool)
-- | Tolerance for termination. When tol is specified, the
-- selected algorithm sets some relevant solver-specific tolerance(s)
-- equal to tol.
[paramsTol] :: Params a -> Maybe Double
-- | Optimization result
data Result a
Result :: Bool -> String -> a -> Double -> Maybe a -> Maybe (Matrix Double) -> Maybe (Matrix Double) -> Statistics -> Result a
-- | Whether or not the optimizer exited successfully.
[resultSuccess] :: Result a -> Bool
-- | Description of the cause of the termination.
[resultMessage] :: Result a -> String
-- | Solution
[resultSolution] :: Result a -> a
-- | Value of the function at the solution.
[resultValue] :: Result a -> Double
-- | Gradient at the solution
[resultGrad] :: Result a -> Maybe a
-- | Hessian at the solution; may be an approximation.
[resultHessian] :: Result a -> Maybe (Matrix Double)
-- | Inverse of Hessian at the solution; may be an approximation.
[resultHessianInv] :: Result a -> Maybe (Matrix Double)
-- | Statistics of optimizaion process
[resultStatistics] :: Result a -> Statistics
-- | Statistics of optimizaion process
data Statistics
Statistics :: Int -> Int -> Int -> Int -> Statistics
-- | Total number of iterations.
[totalIters] :: Statistics -> Int
-- | Total number of function evaluations.
[funcEvals] :: Statistics -> Int
-- | Total number of gradient evaluations.
[gradEvals] :: Statistics -> Int
-- | Total number of hessian evaluations.
[hessEvals] :: Statistics -> Int
-- | The bad things that can happen when you use the library.
data OptimizationException
UnsupportedProblem :: String -> OptimizationException
UnsupportedMethod :: Method -> OptimizationException
GradUnavailable :: OptimizationException
HessianUnavailable :: OptimizationException
-- | A class for types with a default value.
class Default a
-- | The default value for this type.
def :: Default a => a
data AD s a
-- | Embed a constant
auto :: Mode t => Scalar t -> t
data Reverse s a
class Reifies (s :: k) a | s -> a
data Tape
-- | We only store partials in sorted order, so the map contained in a
-- partial will only contain partials with equal or greater keys to that
-- of the map in which it was found. This should be key for efficiently
-- computing sparse hessians. there are only n + k - 1 choose
-- k distinct nth partial derivatives of a function with k
-- inputs.
data Sparse a
instance Data.Traversable.Traversable f => Numeric.Optimization.IsProblem (Numeric.Optimization.AD.ProblemSparse f)
instance Data.Traversable.Traversable f => Numeric.Optimization.HasGrad (Numeric.Optimization.AD.ProblemSparse f)
instance Data.Traversable.Traversable f => Numeric.Optimization.HasHessian (Numeric.Optimization.AD.ProblemSparse f)
instance Data.Traversable.Traversable f => Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad (Numeric.Optimization.AD.ProblemSparse f))
instance Data.Traversable.Traversable f => Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian (Numeric.Optimization.AD.ProblemSparse f))
instance Data.Traversable.Traversable f => Numeric.Optimization.IsProblem (Numeric.Optimization.AD.ProblemReverse f)
instance Data.Traversable.Traversable f => Numeric.Optimization.HasGrad (Numeric.Optimization.AD.ProblemReverse f)
instance Data.Traversable.Traversable f => Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad (Numeric.Optimization.AD.ProblemReverse f))
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian (Numeric.Optimization.AD.ProblemReverse f))