-- Hoogle documentation, generated by Haddock
-- See Hoogle, http://www.haskell.org/hoogle/
-- | Unified interface to various numerical optimization algorithms
--
-- Please see the README on GitHub at
-- https://github.com/msakai/nonlinear-optimization-ad/tree/master/numeric-optimization#readme
@package numeric-optimization
@version 0.1.0.0
-- | This module aims to provides unifined interface to various numerical
-- optimization, like scipy.optimize in Python.
--
-- In this module, you need to explicitly provide the function to
-- calculate the gradient, -- but you can use
-- numeric-optimization-ad or
-- numeric-optimization-backprop to define it using automatic
-- differentiation.
module Numeric.Optimization
-- | Minimization of scalar function of one or more variables.
--
-- This function is intended to provide functionality similar to Python's
-- scipy.optimize.minimize.
--
-- Example:
--
--
-- {-# LANGUAGE OverloadedLists #-}
--
-- import Data.Vector.Storable (Vector)
-- import Numeric.Optimization
--
-- main :: IO ()
-- main = do
-- (x, result, stat) <- minimize LBFGS def (WithGrad rosenbrock rosenbrock') [-3,-4]
-- print (resultSuccess result) -- True
-- print (resultSolution result) -- [0.999999999009131,0.9999999981094296]
-- print (resultValue result) -- 1.8129771632403013e-18
--
-- -- https://en.wikipedia.org/wiki/Rosenbrock_function
-- rosenbrock :: Vector Double -> Double
-- rosenbrock [x,y] = sq (1 - x) + 100 * sq (y - sq x)
--
-- rosenbrock' :: Vector Double -> Vector Double
-- rosenbrock' [x,y] =
-- [ 2 * (1 - x) * (-1) + 100 * 2 * (y - sq x) * (-2) * x
-- , 100 * 2 * (y - sq x)
-- ]
--
-- sq :: Floating a => a -> a
-- sq x = x ** 2
--
minimize :: forall prob. (IsProblem prob, Optionally (HasGrad prob), Optionally (HasHessian prob)) => Method -> Params (Vector Double) -> prob -> Vector Double -> IO (Result (Vector Double))
-- | Optimization problems
class IsProblem prob
-- | Objective function
--
-- It is called fun in scipy.optimize.minimize.
func :: IsProblem prob => prob -> Vector Double -> Double
-- | Bounds
bounds :: IsProblem prob => prob -> Maybe (Vector (Double, Double))
-- | Constraints
constraints :: IsProblem prob => prob -> [Constraint]
-- | Optimization problem equipped with gradient information
class IsProblem prob => HasGrad prob
-- | Gradient of a function computed by func
--
-- It is called jac in scipy.optimize.minimize.
grad :: HasGrad prob => prob -> Vector Double -> Vector Double
-- | Pair of func and grad
grad' :: HasGrad prob => prob -> Vector Double -> (Double, Vector Double)
-- | Similar to grad' but destination passing style is used for
-- gradient vector
grad'M :: (HasGrad prob, PrimMonad m) => prob -> Vector Double -> MVector (PrimState m) Double -> m Double
-- | Optimization problem equipped with hessian information
class IsProblem prob => HasHessian prob
-- | Hessian of a function computed by func
--
-- It is called hess in scipy.optimize.minimize.
hessian :: HasHessian prob => prob -> Vector Double -> Matrix Double
-- | The product of the hessian H of a function f at
-- x with a vector x.
--
-- It is called hessp in scipy.optimize.minimize.
--
-- See also
-- https://hackage.haskell.org/package/ad-4.5.4/docs/Numeric-AD.html#v:hessianProduct.
hessianProduct :: HasHessian prob => prob -> Vector Double -> Vector Double -> Vector Double
-- | Type of constraint
--
-- Currently, no constraints are supported.
data Constraint
-- | Bounds for unconstrained problems, i.e. (-∞,+∞).
boundsUnconstrained :: Int -> Vector (Double, Double)
-- | Whether all lower bounds are -∞ and all upper bounds are +∞.
isUnconstainedBounds :: Vector (Double, Double) -> Bool
-- | Wrapper type for adding gradient function to a problem
data WithGrad prob
WithGrad :: prob -> (Vector Double -> Vector Double) -> WithGrad prob
-- | Wrapper type for adding hessian to a problem
data WithHessian prob
WithHessian :: prob -> (Vector Double -> Matrix Double) -> WithHessian prob
-- | Wrapper type for adding bounds to a problem
data WithBounds prob
WithBounds :: prob -> Vector (Double, Double) -> WithBounds prob
-- | Wrapper type for adding constraints to a problem
data WithConstraints prob
WithConstraints :: prob -> [Constraint] -> WithConstraints prob
-- | Selection of numerical optimization algorithms
data Method
-- | Conjugate gradient method based on Hager and Zhang [1].
--
-- The implementation is provided by nonlinear-optimization package [3]
-- which is a binding library of [2].
--
-- This method requires gradient but does not require hessian.
--
--
CGDescent :: Method
-- | Limited memory BFGS (L-BFGS) algorithm [1]
--
-- The implementtion is provided by lbfgs package [2] which is a binding
-- of liblbfgs [3].
--
-- This method requires gradient but does not require hessian.
--
--
LBFGS :: Method
-- | Native implementation of Newton method
--
-- This method requires both gradient and hessian.
Newton :: Method
-- | Whether a Method is supported under the current environment.
isSupportedMethod :: Method -> Bool
-- | Parameters for optimization algorithms
--
-- TODO:
--
--
-- - How to pass algorithm specific parameters?
-- - Separate callback from other more concrete serializeable
-- parameters?
--
data Params a
Params :: Maybe (a -> IO Bool) -> Maybe Double -> Params a
-- | If callback function returns True, the algorithm execution is
-- terminated.
[paramsCallback] :: Params a -> Maybe (a -> IO Bool)
-- | Tolerance for termination. When tol is specified, the
-- selected algorithm sets some relevant solver-specific tolerance(s)
-- equal to tol.
[paramsTol] :: Params a -> Maybe Double
-- | Optimization result
data Result a
Result :: Bool -> String -> a -> Double -> Maybe a -> Maybe (Matrix Double) -> Maybe (Matrix Double) -> Statistics -> Result a
-- | Whether or not the optimizer exited successfully.
[resultSuccess] :: Result a -> Bool
-- | Description of the cause of the termination.
[resultMessage] :: Result a -> String
-- | Solution
[resultSolution] :: Result a -> a
-- | Value of the function at the solution.
[resultValue] :: Result a -> Double
-- | Gradient at the solution
[resultGrad] :: Result a -> Maybe a
-- | Hessian at the solution; may be an approximation.
[resultHessian] :: Result a -> Maybe (Matrix Double)
-- | Inverse of Hessian at the solution; may be an approximation.
[resultHessianInv] :: Result a -> Maybe (Matrix Double)
-- | Statistics of optimizaion process
[resultStatistics] :: Result a -> Statistics
-- | Statistics of optimizaion process
data Statistics
Statistics :: Int -> Int -> Int -> Int -> Statistics
-- | Total number of iterations.
[totalIters] :: Statistics -> Int
-- | Total number of function evaluations.
[funcEvals] :: Statistics -> Int
-- | Total number of gradient evaluations.
[gradEvals] :: Statistics -> Int
-- | Total number of hessian evaluations.
[hessEvals] :: Statistics -> Int
-- | The bad things that can happen when you use the library.
data OptimizationException
UnsupportedProblem :: String -> OptimizationException
UnsupportedMethod :: Method -> OptimizationException
GradUnavailable :: OptimizationException
HessianUnavailable :: OptimizationException
-- | A class for types with a default value.
class Default a
-- | The default value for this type.
def :: Default a => a
-- | Optional constraint
class Optionally c
optionalDict :: Optionally c => Maybe (Dict c)
-- | Utility function to define Optionally instances
hasOptionalDict :: c => Maybe (Dict c)
instance GHC.Enum.Bounded Numeric.Optimization.Method
instance GHC.Show.Show Numeric.Optimization.Method
instance GHC.Enum.Enum Numeric.Optimization.Method
instance GHC.Classes.Ord Numeric.Optimization.Method
instance GHC.Classes.Eq Numeric.Optimization.Method
instance GHC.Show.Show Numeric.Optimization.OptimizationException
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.IsProblem (Numeric.Optimization.WithConstraints prob)
instance Numeric.Optimization.HasGrad prob => Numeric.Optimization.HasGrad (Numeric.Optimization.WithConstraints prob)
instance Numeric.Optimization.HasHessian prob => Numeric.Optimization.HasHessian (Numeric.Optimization.WithConstraints prob)
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad prob) => Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad (Numeric.Optimization.WithConstraints prob))
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian prob) => Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian (Numeric.Optimization.WithConstraints prob))
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.IsProblem (Numeric.Optimization.WithBounds prob)
instance Numeric.Optimization.HasGrad prob => Numeric.Optimization.HasGrad (Numeric.Optimization.WithBounds prob)
instance Numeric.Optimization.HasHessian prob => Numeric.Optimization.HasHessian (Numeric.Optimization.WithBounds prob)
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad prob) => Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad (Numeric.Optimization.WithBounds prob))
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian prob) => Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian (Numeric.Optimization.WithBounds prob))
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.IsProblem (Numeric.Optimization.WithHessian prob)
instance Numeric.Optimization.HasGrad prob => Numeric.Optimization.HasGrad (Numeric.Optimization.WithHessian prob)
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.HasHessian (Numeric.Optimization.WithHessian prob)
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad prob) => Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad (Numeric.Optimization.WithHessian prob))
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian (Numeric.Optimization.WithHessian prob))
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.IsProblem (Numeric.Optimization.WithGrad prob)
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.HasGrad (Numeric.Optimization.WithGrad prob)
instance Numeric.Optimization.HasHessian prob => Numeric.Optimization.HasHessian (Numeric.Optimization.WithGrad prob)
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad (Numeric.Optimization.WithGrad prob))
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian prob) => Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian (Numeric.Optimization.WithGrad prob))
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad (Data.Vector.Storable.Vector GHC.Types.Double -> GHC.Types.Double))
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian (Data.Vector.Storable.Vector GHC.Types.Double -> GHC.Types.Double))
instance Numeric.Optimization.IsProblem (Data.Vector.Storable.Vector GHC.Types.Double -> GHC.Types.Double)
instance GHC.Exception.Type.Exception Numeric.Optimization.OptimizationException
instance GHC.Base.Functor Numeric.Optimization.Result
instance Data.Default.Class.Default (Numeric.Optimization.Params a)
instance Data.Functor.Contravariant.Contravariant Numeric.Optimization.Params