module Numeric.AD.Lagrangian.Internal where
import Numeric.Optimization.Algorithms.HagerZhang05
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector.Storable as S
import Numeric.AD
import GHC.IO (unsafePerformIO)
import Numeric.AD.Types
import Numeric.AD.Internal.Classes
import Numeric.LinearAlgebra.Algorithms
import qualified Data.Packed.Vector as V
import qualified Data.Packed.Matrix as M
import Numeric.AD.Internal.Tower
infixr 1 <=>
(<=>) :: ([a] -> a) -> a -> Constraint a
g <=> c = (g,c)
type Constraint a = ([a] -> a, a)
type AD2 s r a = AD s (AD r Double)
minimize :: Double
-> (forall s r. (Mode s, Mode r) => [AD2 s r Double] -> AD2 s r Double)
-> (forall s r. (Mode s, Mode r) => [Constraint (AD2 s r Double)] )
-> Int
-> Either (Result, Statistics) (S.Vector Double, S.Vector Double)
minimize tolerance toMin constraints argCount = result where
obj argsAndLams =
squaredGrad (lagrangian toMin constraints argCount) argsAndLams
constraintCount = length (constraints :: [Constraint (AD Tower (AD Tower Double))])
guess = U.replicate (argCount + constraintCount) (1.0 :: Double)
result = case unsafePerformIO $
optimize
(defaultParameters { printFinal = False })
tolerance
guess
(VFunction (lowerFU obj . U.toList))
(VGradient (U.fromList . grad obj . U.toList))
Nothing of
(vs, ToleranceStatisfied, _) -> Right (S.take argCount vs,
S.drop argCount vs)
(_, x, y) -> Left (x, y)
maximize :: Double
-> (forall s r. (Mode s, Mode r) => [AD2 s r Double] -> AD2 s r Double)
-> (forall s r. (Mode s, Mode r) => [Constraint (AD2 s r Double)] )
-> Int
-> Either (Result, Statistics) (S.Vector Double, S.Vector Double)
maximize tolerance toMax constraints argCount =
minimize tolerance (negate1 . toMax) constraints argCount
lagrangian :: Num a
=> ([a] -> a)
-> [Constraint a]
-> Int
-> [a]
-> a
lagrangian f constraints argCount argsAndLams = result where
args = take argCount argsAndLams
lams = drop argCount argsAndLams
appliedConstraints = fmap (\(f, c) -> f args c) constraints
result = f args + (sum . zipWith (*) lams $ appliedConstraints)
squaredGrad :: Num a
=> (forall s. Mode s => [AD s a] -> AD s a)
-> [a] -> a
squaredGrad f vs = sum . fmap (\x -> x*x) . grad f $ vs
feasible :: (forall s r. (Mode s, Mode r) => [AD2 s r Double] -> AD2 s r Double)
-> (forall s r. (Mode s, Mode r) => [Constraint (AD2 s r Double)] )
-> [Double]
-> Bool
feasible toMin constraints points = result where
obj argsAndLams =
squaredGrad (lagrangian toMin constraints $ length points) argsAndLams
hessianMatrix = M.fromLists . hessian obj $ points
result = all (>0) . V.toList . eigenvaluesSH $ hessianMatrix