module Numeric.AD.Lagrangian.Internal where
import Numeric.Optimization.Algorithms.HagerZhang05
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector.Storable as S
import Numeric.AD
import GHC.IO (unsafePerformIO)
import Numeric.AD.Types
import Numeric.AD.Internal.Classes
import Numeric.LinearAlgebra.Algorithms
import qualified Data.Packed.Vector as V
import qualified Data.Packed.Matrix as M
import Numeric.AD.Internal.Tower
infixr 1 <=>
(<=>) :: (forall s r. (Mode s, Mode r) => [AD2 s r a] -> AD2 s r a) -> a -> Constraint a
g <=> c = (FU g,c)
type Constraint a = (FU a, a)
type AD2 s r a = AD s (AD r a)
newtype FU a = FU {unFU :: forall s r. (Mode s, Mode r) => [AD2 s r a] -> AD2 s r a}
minimize :: Double
-> (forall s r. (Mode s, Mode r) => [AD2 s r Double] -> AD2 s r Double)
-> [Constraint Double]
-> Int
-> Either (Result, Statistics) (S.Vector Double, S.Vector Double)
minimize tolerance toMin constraints argCount = result where
obj argsAndLams =
squaredGrad (lagrangian toMin constraints argCount) argsAndLams
constraintCount = length constraints
guess = U.replicate (argCount + constraintCount) (1.0 :: Double)
result = case unsafePerformIO $
optimize
(defaultParameters { printFinal = False })
tolerance
guess
(VFunction (lowerFU obj . U.toList))
(VGradient (U.fromList . grad obj . U.toList))
Nothing of
(vs, ToleranceStatisfied, _) -> Right (S.take argCount vs,
S.drop argCount vs)
(_, x, y) -> Left (x, y)
maximize :: Double
-> (forall s r. (Mode s, Mode r) => [AD2 s r Double] -> AD2 s r Double)
-> [Constraint Double]
-> Int
-> Either (Result, Statistics) (S.Vector Double, S.Vector Double)
maximize tolerance toMax constraints argCount =
minimize tolerance (negate1 . toMax) constraints argCount
lagrangian :: (Num a, Mode s, Mode r)
=> (forall s r. (Mode s, Mode r) => [AD2 s r a] -> AD2 s r a)
-> [Constraint a]
-> Int
-> [AD2 s r a]
-> AD2 s r a
lagrangian f constraints argCount argsAndLams = result where
args = take argCount argsAndLams
lams = drop argCount argsAndLams
appliedConstraints = fmap (\(FU f, c) -> f args (auto . auto) c) constraints
result = f args + (sum . zipWith (*) lams $ appliedConstraints)
squaredGrad :: Num a
=> (forall s. Mode s => [AD s a] -> AD s a)
-> [a] -> a
squaredGrad f vs = sum . fmap (\x -> x*x) . grad f $ vs
feasible :: (forall s r. (Mode s, Mode r) => [AD2 s r Double] -> AD2 s r Double)
-> [Constraint Double]
-> [Double]
-> Bool
feasible toMin constraints points = result where
obj argsAndLams =
squaredGrad (lagrangian toMin constraints $ length points) argsAndLams
hessianMatrix = M.fromLists . hessian obj $ points
result = all (>0) . V.toList . eigenvaluesSH $ hessianMatrix