-- Hoogle documentation, generated by Haddock
-- See Hoogle, http://www.haskell.org/hoogle/
-- | Unified interface to various numerical optimization algorithms
--
-- Please see the README on GitHub at
-- https://github.com/msakai/nonlinear-optimization-ad/tree/master/numeric-optimization#readme
@package numeric-optimization
@version 0.1.1.0
-- | This module aims to provide unified interface to various numerical
-- optimization, like scipy.optimize in Python.
--
-- In this module, you need to explicitly provide the function to
-- calculate the gradient, but you can use numeric-optimization-ad
-- or numeric-optimization-backprop to define it using automatic
-- differentiation.
module Numeric.Optimization
-- | Minimization of scalar function of one or more variables.
--
-- This function is intended to provide functionality similar to Python's
-- scipy.optimize.minimize.
--
-- Example:
--
--
-- {-# LANGUAGE OverloadedLists #-}
--
-- import Data.Vector.Storable (Vector)
-- import Numeric.Optimization
--
-- main :: IO ()
-- main = do
-- (x, result, stat) <- minimize LBFGS def (WithGrad rosenbrock rosenbrock') [-3,-4]
-- print (resultSuccess result) -- True
-- print (resultSolution result) -- [0.999999999009131,0.9999999981094296]
-- print (resultValue result) -- 1.8129771632403013e-18
--
-- -- https://en.wikipedia.org/wiki/Rosenbrock_function
-- rosenbrock :: Vector Double -> Double
-- rosenbrock [x,y] = sq (1 - x) + 100 * sq (y - sq x)
--
-- rosenbrock' :: Vector Double -> Vector Double
-- rosenbrock' [x,y] =
-- [ 2 * (1 - x) * (-1) + 100 * 2 * (y - sq x) * (-2) * x
-- , 100 * 2 * (y - sq x)
-- ]
--
-- sq :: Floating a => a -> a
-- sq x = x ** 2
--
minimize :: forall prob. (IsProblem prob, Optionally (HasGrad prob), Optionally (HasHessian prob)) => Method -> Params (Vector Double) -> prob -> Vector Double -> IO (Result (Vector Double))
-- | Optimization problems
class IsProblem prob
-- | Objective function
--
-- It is called fun in scipy.optimize.minimize.
func :: IsProblem prob => prob -> Vector Double -> Double
-- | Bounds
bounds :: IsProblem prob => prob -> Maybe (Vector (Double, Double))
-- | Constraints
constraints :: IsProblem prob => prob -> [Constraint]
-- | Optimization problem equipped with gradient information
class IsProblem prob => HasGrad prob
-- | Gradient of a function computed by func
--
-- It is called jac in scipy.optimize.minimize.
grad :: HasGrad prob => prob -> Vector Double -> Vector Double
-- | Pair of func and grad
grad' :: HasGrad prob => prob -> Vector Double -> (Double, Vector Double)
-- | Similar to grad' but destination passing style is used for
-- gradient vector
grad'M :: (HasGrad prob, PrimMonad m) => prob -> Vector Double -> MVector (PrimState m) Double -> m Double
-- | Optimization problem equipped with hessian information
class IsProblem prob => HasHessian prob
-- | Hessian of a function computed by func
--
-- It is called hess in scipy.optimize.minimize.
hessian :: HasHessian prob => prob -> Vector Double -> Matrix Double
-- | The product of the hessian H of a function f at
-- x with a vector x.
--
-- It is called hessp in scipy.optimize.minimize.
--
-- See also
-- https://hackage.haskell.org/package/ad-4.5.4/docs/Numeric-AD.html#v:hessianProduct.
hessianProduct :: HasHessian prob => prob -> Vector Double -> Vector Double -> Vector Double
-- | Type of constraint
--
-- Currently, no constraints are supported.
data Constraint
-- | Bounds for unconstrained problems, i.e. (-∞,+∞).
boundsUnconstrained :: Int -> Vector (Double, Double)
-- | Whether all lower bounds are -∞ and all upper bounds are +∞.
isUnconstainedBounds :: Vector (Double, Double) -> Bool
-- | Wrapper type for adding gradient function to a problem
data WithGrad prob
WithGrad :: prob -> (Vector Double -> Vector Double) -> WithGrad prob
-- | Wrapper type for adding hessian to a problem
data WithHessian prob
WithHessian :: prob -> (Vector Double -> Matrix Double) -> WithHessian prob
-- | Wrapper type for adding bounds to a problem
data WithBounds prob
WithBounds :: prob -> Vector (Double, Double) -> WithBounds prob
-- | Wrapper type for adding constraints to a problem
data WithConstraints prob
WithConstraints :: prob -> [Constraint] -> WithConstraints prob
-- | Selection of numerical optimization algorithms
data Method
-- | Conjugate gradient method based on Hager and Zhang [1].
--
-- The implementation is provided by nonlinear-optimization package [3]
-- which is a binding library of [2].
--
-- This method requires gradient but does not require hessian.
--
--
CGDescent :: Method
-- | Limited memory BFGS (L-BFGS) algorithm [1]
--
-- The implementtion is provided by lbfgs package [2] which is a binding
-- of liblbfgs [3].
--
-- This method requires gradient but does not require hessian.
--
--
LBFGS :: Method
-- | Limited memory BFGS algorithm with bound constraints (L-BFGS-B)
-- [1][2][3]
--
-- The implementation is provided by l-bfgs-b package [4] which is a
-- bindign to L-BFGS-B fortran code [5].
--
--
-- - [1] R. H. Byrd, P. Lu and J. Nocedal. A Limited Memory
-- Algorithm for Bound Constrained Optimization, (1995), SIAM Journal
-- on Scientific and Statistical Computing , 16, 5, pp. 1190-1208.
-- - [2] C. Zhu, R. H. Byrd and J. Nocedal. L-BFGS-B: Algorithm 778:
-- L-BFGS-B, FORTRAN routines for large scale bound constrained
-- optimization (1997), ACM Transactions on Mathematical Software,
-- Vol 23, Num. 4, pp. 550-560.
-- - [3] J. L. Morales and J. Nocedal. L-BFGS-B: Remark on Algorithm
-- 778: L-BFGS-B, FORTRAN routines for large scale bound constrained
-- optimization (2011), ACM Transactions on Mathematical Software,
-- Vol 38, Num. 7, pp. 1–4
-- - [4] https://hackage.haskell.org/package/l-bfgs-b
-- - [5]
-- http://users.iems.northwestern.edu/~nocedal/lbfgsb.html
--
LBFGSB :: Method
-- | Naïve implementation of Newton method in Haskell
--
-- This method requires both gradient and hessian.
Newton :: Method
-- | Whether a Method is supported under the current environment.
isSupportedMethod :: Method -> Bool
-- | Parameters for optimization algorithms
--
-- TODO:
--
--
-- - Better way to pass algorithm specific parameters?
-- - Separate paramsCallback from other more concrete
-- serializeable parameters?
--
data Params a
Params :: Maybe (a -> IO Bool) -> Maybe Double -> Maybe Double -> Maybe Double -> Maybe Int -> Maybe Int -> Maybe Int -> Params a
-- | If callback function returns True, the algorithm execution is
-- terminated.
[paramsCallback] :: Params a -> Maybe (a -> IO Bool)
-- | Tolerance for termination. When tol is specified, the
-- selected algorithm sets some relevant solver-specific tolerance(s)
-- equal to tol.
--
-- If specified, this value is used as defaults for paramsFTol and
-- paramsGTol.
[paramsTol] :: Params a -> Maybe Double
-- | LBFGS stops iteration when delta-based convergence test (see
-- paramsPast) is enabled and the following condition is met:
--
-- <math>
--
-- where f' is the objective value of past
-- (paramsPast) iterations ago, and f is the objective
-- value of the current iteration. The default value is 1e-5.
--
-- LBFGSB stops iteration when the following condition is met:
--
-- <math>
--
-- The default value is 1e7 * (epsilon :: Double) =
-- 2.220446049250313e-9.
[paramsFTol] :: Params a -> Maybe Double
-- | LBFGSB stops iteration when <math> where <math> is
-- the i-th component of the projected gradient.
[paramsGTol] :: Params a -> Maybe Double
-- | Maximum number of iterations.
--
-- Currently only LBFGSB, CGDescent, and Newton uses
-- this.
[paramsMaxIters] :: Params a -> Maybe Int
-- | Distance for delta-based convergence test in LBFGS
--
-- This parameter determines the distance, in iterations, to compute the
-- rate of decrease of the objective function. If the value of this
-- parameter is Nothing, the library does not perform the
-- delta-based convergence test. The default value is Nothing.
[paramsPast] :: Params a -> Maybe Int
-- | The maximum number of variable metric corrections used in
-- LBFGSB to define the limited memory matrix.
[paramsMaxCorrections] :: Params a -> Maybe Int
-- | Optimization result
data Result a
Result :: Bool -> String -> a -> Double -> Maybe a -> Maybe (Matrix Double) -> Maybe (Matrix Double) -> Statistics -> Result a
-- | Whether or not the optimizer exited successfully.
[resultSuccess] :: Result a -> Bool
-- | Description of the cause of the termination.
[resultMessage] :: Result a -> String
-- | Solution
[resultSolution] :: Result a -> a
-- | Value of the function at the solution.
[resultValue] :: Result a -> Double
-- | Gradient at the solution
[resultGrad] :: Result a -> Maybe a
-- | Hessian at the solution; may be an approximation.
[resultHessian] :: Result a -> Maybe (Matrix Double)
-- | Inverse of Hessian at the solution; may be an approximation.
[resultHessianInv] :: Result a -> Maybe (Matrix Double)
-- | Statistics of optimizaion process
[resultStatistics] :: Result a -> Statistics
-- | Statistics of optimizaion process
data Statistics
Statistics :: Int -> Int -> Int -> Int -> Int -> Statistics
-- | Total number of iterations.
[totalIters] :: Statistics -> Int
-- | Total number of function evaluations.
[funcEvals] :: Statistics -> Int
-- | Total number of gradient evaluations.
[gradEvals] :: Statistics -> Int
-- | Total number of hessian evaluations.
[hessianEvals] :: Statistics -> Int
-- | Total number of hessian evaluations.
-- | Deprecated: Use hessianEvals instead
[hessEvals] :: Statistics -> Int
-- | The bad things that can happen when you use the library.
data OptimizationException
UnsupportedProblem :: String -> OptimizationException
UnsupportedMethod :: Method -> OptimizationException
GradUnavailable :: OptimizationException
HessianUnavailable :: OptimizationException
-- | A class for types with a default value.
class Default a
-- | The default value for this type.
def :: Default a => a
-- | Optional constraint
class Optionally c
optionalDict :: Optionally c => Maybe (Dict c)
-- | Utility function to define Optionally instances
hasOptionalDict :: c => Maybe (Dict c)
instance GHC.Enum.Bounded Numeric.Optimization.Method
instance GHC.Show.Show Numeric.Optimization.Method
instance GHC.Enum.Enum Numeric.Optimization.Method
instance GHC.Classes.Ord Numeric.Optimization.Method
instance GHC.Classes.Eq Numeric.Optimization.Method
instance GHC.Show.Show Numeric.Optimization.Statistics
instance GHC.Show.Show a => GHC.Show.Show (Numeric.Optimization.Result a)
instance GHC.Classes.Eq Numeric.Optimization.OptimizationException
instance GHC.Show.Show Numeric.Optimization.OptimizationException
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.IsProblem (Numeric.Optimization.WithConstraints prob)
instance Numeric.Optimization.HasGrad prob => Numeric.Optimization.HasGrad (Numeric.Optimization.WithConstraints prob)
instance Numeric.Optimization.HasHessian prob => Numeric.Optimization.HasHessian (Numeric.Optimization.WithConstraints prob)
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad prob) => Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad (Numeric.Optimization.WithConstraints prob))
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian prob) => Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian (Numeric.Optimization.WithConstraints prob))
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.IsProblem (Numeric.Optimization.WithBounds prob)
instance Numeric.Optimization.HasGrad prob => Numeric.Optimization.HasGrad (Numeric.Optimization.WithBounds prob)
instance Numeric.Optimization.HasHessian prob => Numeric.Optimization.HasHessian (Numeric.Optimization.WithBounds prob)
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad prob) => Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad (Numeric.Optimization.WithBounds prob))
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian prob) => Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian (Numeric.Optimization.WithBounds prob))
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.IsProblem (Numeric.Optimization.WithHessian prob)
instance Numeric.Optimization.HasGrad prob => Numeric.Optimization.HasGrad (Numeric.Optimization.WithHessian prob)
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.HasHessian (Numeric.Optimization.WithHessian prob)
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad prob) => Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad (Numeric.Optimization.WithHessian prob))
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian (Numeric.Optimization.WithHessian prob))
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.IsProblem (Numeric.Optimization.WithGrad prob)
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.HasGrad (Numeric.Optimization.WithGrad prob)
instance Numeric.Optimization.HasHessian prob => Numeric.Optimization.HasHessian (Numeric.Optimization.WithGrad prob)
instance Numeric.Optimization.IsProblem prob => Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad (Numeric.Optimization.WithGrad prob))
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian prob) => Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian (Numeric.Optimization.WithGrad prob))
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasGrad (Data.Vector.Storable.Vector GHC.Types.Double -> GHC.Types.Double))
instance Numeric.Optimization.Optionally (Numeric.Optimization.HasHessian (Data.Vector.Storable.Vector GHC.Types.Double -> GHC.Types.Double))
instance Numeric.Optimization.IsProblem (Data.Vector.Storable.Vector GHC.Types.Double -> GHC.Types.Double)
instance GHC.Exception.Type.Exception Numeric.Optimization.OptimizationException
instance GHC.Base.Functor Numeric.Optimization.Result
instance Data.Default.Class.Default (Numeric.Optimization.Params a)
instance Data.Functor.Contravariant.Contravariant Numeric.Optimization.Params