-- Hoogle documentation, generated by Haddock -- See Hoogle, http://www.haskell.org/hoogle/ -- | Various iterative algorithms for optimization of nonlinear functions. -- -- This library implements numerical algorithms to optimize nonlinear -- functions. Optimization means that we try to find a minimum of the -- function. Currently all algorithms guarantee only that local minima -- will be found, not global ones. -- -- Almost any continuosly differentiable function f : R^n -> -- R may be optimized by this library. Any further restrictions are -- listed in the modules that need them. -- -- We use the vector package to represent vectors and matrices, -- although it would be possible to use something like hmatrix -- easily. -- -- Currently only CG_DESCENT method is implemented. @package nonlinear-optimization @version 0.3.11 -- | This module implements the algorithms described by Hager and Zhang -- [1]. We use bindings to CG_DESCENT library by the same -- authors, version 3.0 from 18/05/2008 [2]. The library code is also -- licensed under the terms of the GPL. -- --
-- Q_k = 1 + (qdecay)Q_{k-1}, Q_0 = 0
-- C_k = C_{k-1} + (|f_k| - C_{k-1})/Q_k
--
--
-- Defaults to 0.7.
[qdecay] :: Parameters -> Double
-- | Stop rules that define when the iterations should end. Defaults to
-- DefaultStopRule 0.
[stopRules] :: Parameters -> StopRules
-- | How to calculate the estimated error in the function value. Defaults
-- to RelativeEpsilon 1e-6.
[estimateError] :: Parameters -> EstimateError
-- | When to attempt quadratic interpolation in line search. If
-- Nothing then never try a quadratic interpolation step. If
-- Just cutoff, then attemp quadratic interpolation in line
-- search when |f_{k+1} - f_k| / f_k <= cutoff. Defaults to
-- Just 1e-12.
[quadraticStep] :: Parameters -> Maybe Double
-- | If Just tol, then always check that f_{k+1} - f_k <=
-- tol * C_k. Otherwise, if Nothing then no checking of
-- function values is done. Defaults to Nothing.
[debugTol] :: Parameters -> Maybe Double
-- | If Just step, then use step as the initial step of
-- the line search. Otherwise, if Nothing then the initial step
-- is programatically calculated. Defaults to Nothing.
[initialStep] :: Parameters -> Maybe Double
-- | Defines the maximum number of iterations. The process is aborted when
-- maxItersFac * n iterations are done, where n is the
-- number of dimensions. Defaults to infinity.
[maxItersFac] :: Parameters -> Double
-- | Maximum number of times the bracketing interval grows or shrinks in
-- the line search. Defaults to 50.
[nexpand] :: Parameters -> CInt
-- | Maximum number of secant iterations in line search. Defaults to
-- 50.
[nsecant] :: Parameters -> CInt
-- | Restart the conjugate gradient method after restartFac * n
-- iterations. Defaults to 1.
[restartFac] :: Parameters -> Double
-- | Stop when -alpha * dphi0, the estimated change in function
-- value, is less than funcEpsilon * |f|. Defaults to
-- 0.
[funcEpsilon] :: Parameters -> Double
-- | After encountering NaN while calculating the step length,
-- growth factor when searching for a bracketing interval. Defaults to
-- 1.3.
[nanRho] :: Parameters -> Double
-- | Technical parameters which you probably should not touch.
[techParameters] :: Parameters -> TechParameters
-- | How verbose we should be.
data Verbose
-- | Do not output anything to stdout, which most of the time is
-- good.
Quiet :: Verbose
-- | Print what work is being done on each iteraction.
Verbose :: Verbose
-- | Print information about every step, may be useful for troubleshooting.
VeryVerbose :: Verbose
-- | Line search methods that may be used.
data LineSearch
-- | Use approximate Wolfe line search.
ApproximateWolfe :: LineSearch
-- | Use ordinary Wolfe line search, switch to approximate Wolfe when
--
--
-- |f_{k+1} - f_k| < AWolfeFac * C_k
--
--
-- where C_k is the average size of cost and AWolfeFac
-- is the parameter to this constructor.
AutoSwitch :: Double -> LineSearch
-- | Stop rules used to decided when to stop iterating.
data StopRules
-- | DefaultStopRule stop_fac stops when
--
-- -- |g_k|_infty <= max(grad_tol, |g_0|_infty * stop_fac) ---- -- where |g_i|_infty is the maximum absolute component of the -- gradient at the i-th step. DefaultStopRule :: Double -> StopRules -- | AlternativeStopRule stops when -- --
-- |g_k|_infty <= grad_tol * (1 + |f_k|) --AlternativeStopRule :: StopRules -- | How to calculate the estimated error in the function value. data EstimateError -- | AbsoluteEpsilon eps estimates the error as eps. AbsoluteEpsilon :: Double -> EstimateError -- | RelativeEpsilon eps estimates the error as eps * -- C_k. RelativeEpsilon :: Double -> EstimateError -- | Technical parameters which you probably should not touch. You should -- read the papers of CG_DESCENT to understand how you can tune -- these parameters. data TechParameters TechParameters :: Double -> Double -> Double -> Double -> Double -> Double -> Double -> Double -> TechParameters -- | Wolfe line search parameter. Defaults to 0.1. [techDelta] :: TechParameters -> Double -- | Wolfe line search parameter. Defaults to 0.9. [techSigma] :: TechParameters -> Double -- | Decay factor for bracket interval width. Defaults to 0.66. [techGamma] :: TechParameters -> Double -- | Growth factor when searching for initial bracketing interval. Defaults -- to 5. [techRho] :: TechParameters -> Double -- | Lower bound for the conjugate gradient update parameter -- beta_k is techEta * ||d||_2. Defaults to -- 0.01. [techEta] :: TechParameters -> Double -- | Factor used in starting guess for iteration 1. Defaults to -- 0.01. [techPsi0] :: TechParameters -> Double -- | In performing a QuadStep, we evaluate the function at psi1 * -- previous step. Defaults to 0.1. [techPsi1] :: TechParameters -> Double -- | When starting a new CG iteration, our initial guess for the line -- search stepsize is psi2 * previous step. Defaults to -- 2. [techPsi2] :: TechParameters -> Double instance GHC.Read.Read Numeric.Optimization.Algorithms.HagerZhang05.Parameters instance GHC.Show.Show Numeric.Optimization.Algorithms.HagerZhang05.Parameters instance GHC.Classes.Ord Numeric.Optimization.Algorithms.HagerZhang05.Parameters instance GHC.Classes.Eq Numeric.Optimization.Algorithms.HagerZhang05.Parameters instance GHC.Read.Read Numeric.Optimization.Algorithms.HagerZhang05.EstimateError instance GHC.Show.Show Numeric.Optimization.Algorithms.HagerZhang05.EstimateError instance GHC.Classes.Ord Numeric.Optimization.Algorithms.HagerZhang05.EstimateError instance GHC.Classes.Eq Numeric.Optimization.Algorithms.HagerZhang05.EstimateError instance GHC.Read.Read Numeric.Optimization.Algorithms.HagerZhang05.StopRules instance GHC.Show.Show Numeric.Optimization.Algorithms.HagerZhang05.StopRules instance GHC.Classes.Ord Numeric.Optimization.Algorithms.HagerZhang05.StopRules instance GHC.Classes.Eq Numeric.Optimization.Algorithms.HagerZhang05.StopRules instance GHC.Read.Read Numeric.Optimization.Algorithms.HagerZhang05.LineSearch instance GHC.Show.Show Numeric.Optimization.Algorithms.HagerZhang05.LineSearch instance GHC.Classes.Ord Numeric.Optimization.Algorithms.HagerZhang05.LineSearch instance GHC.Classes.Eq Numeric.Optimization.Algorithms.HagerZhang05.LineSearch instance GHC.Enum.Enum Numeric.Optimization.Algorithms.HagerZhang05.Verbose instance GHC.Read.Read Numeric.Optimization.Algorithms.HagerZhang05.Verbose instance GHC.Show.Show Numeric.Optimization.Algorithms.HagerZhang05.Verbose instance GHC.Classes.Ord Numeric.Optimization.Algorithms.HagerZhang05.Verbose instance GHC.Classes.Eq Numeric.Optimization.Algorithms.HagerZhang05.Verbose instance GHC.Read.Read Numeric.Optimization.Algorithms.HagerZhang05.TechParameters instance GHC.Show.Show Numeric.Optimization.Algorithms.HagerZhang05.TechParameters instance GHC.Classes.Ord Numeric.Optimization.Algorithms.HagerZhang05.TechParameters instance GHC.Classes.Eq Numeric.Optimization.Algorithms.HagerZhang05.TechParameters instance GHC.Read.Read Numeric.Optimization.Algorithms.HagerZhang05.Statistics instance GHC.Show.Show Numeric.Optimization.Algorithms.HagerZhang05.Statistics instance GHC.Classes.Ord Numeric.Optimization.Algorithms.HagerZhang05.Statistics instance GHC.Classes.Eq Numeric.Optimization.Algorithms.HagerZhang05.Statistics instance GHC.Enum.Enum Numeric.Optimization.Algorithms.HagerZhang05.Result instance GHC.Read.Read Numeric.Optimization.Algorithms.HagerZhang05.Result instance GHC.Show.Show Numeric.Optimization.Algorithms.HagerZhang05.Result instance GHC.Classes.Ord Numeric.Optimization.Algorithms.HagerZhang05.Result instance GHC.Classes.Eq Numeric.Optimization.Algorithms.HagerZhang05.Result instance Foreign.Storable.Storable Numeric.Optimization.Algorithms.HagerZhang05.Parameters instance Foreign.Storable.Storable Numeric.Optimization.Algorithms.HagerZhang05.Statistics