module MaxEnt.Internal where
import Numeric.Optimization.Algorithms.HagerZhang05
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector.Storable as S
import Numeric.AD
import GHC.IO (unsafePerformIO)
import Data.Traversable
import Numeric.AD.Types
import Numeric.AD.Internal.Classes
sumMap :: Num b => (a -> b) -> [a] -> b
sumMap f = sum . map f
sumWith :: Num c => (a -> b -> c) -> [a] -> [b] -> c
sumWith f xs = sum . zipWith f xs
pOfK :: Floating a => [a] -> [ExpectationFunction a] -> [a] -> Int -> a
pOfK values fs ls k = exp (negate . sumWith (\l f -> l * f (values !! k)) ls $ fs) /
partitionFunc values fs ls
pOfKLinear :: Floating a => [[a]] -> [a] -> Int -> a
pOfKLinear matrix ls k =
exp (dot (matrix !! k) ls) /
partitionFuncLinear matrix ls
probs :: (Floating b)
=> [b]
-> [ExpectationFunction b]
-> [b]
-> [b]
probs values fs ls = map (pOfK values fs ls) [0..length values 1]
partitionFunc :: Floating a
=> [a]
-> [ExpectationFunction a]
-> [a]
-> a
partitionFunc values fs ls = sum $ [ exp ((l) * f x) |
x <- values,
(f, l) <- zip fs ls]
objectiveFunc :: Floating a
=> [a]
-> [ExpectationFunction a]
-> [a]
-> [a]
-> a
objectiveFunc values fs moments ls = log (partitionFunc values fs ls)
+ sumWith (*) ls moments
dot :: Num a => [a] -> [a] -> a
dot x y = sum . zipWith (*) x $ y
partitionFuncLinear :: Floating a
=> [[a]]
-> [a]
-> a
partitionFuncLinear matrix ws = sum $ [ exp (dot as ws) |
as <- matrix]
objectiveFuncLinear :: Floating a
=> [a]
-> [[a]]
-> [a]
-> [a]
-> a
objectiveFuncLinear values as moments ls =
log (partitionFuncLinear as ls) dot ls moments
linProbs :: (Floating b)
=> [[b]]
-> [b]
-> [b]
linProbs matrix ls =
map (pOfKLinear matrix ls) [0..length matrix 1]
entropy :: Floating a => [a] -> a
entropy = negate . sumMap (\p -> p * log p)
type GeneralConstraint a = [a] -> [a] -> a
lagrangian :: Floating a
=> ([a], [GeneralConstraint a], [a])
-> [a]
-> a
lagrangian (values, fs, constants) lamsAndProbs = result where
result = entropy ps + (sum $ zipWith (*) lams constraints)
constraints = zipWith () appliedConstraints constants
appliedConstraints = map (\f -> f values ps) fs
ps = take (length values) lamsAndProbs
lams = drop (length values) lamsAndProbs
squaredGrad :: Num a
=> (forall s. Mode s => [AD s a] -> AD s a) -> [a] -> a
squaredGrad f vs = sumMap (\x -> x*x) (grad f vs)
generalObjectiveFunc :: Floating a => (forall b. Floating b =>
([b], [GeneralConstraint b], [b]))
-> [a]
-> a
generalObjectiveFunc params lamsAndProbs =
squaredGrad lang lamsAndProbs where
lang :: Floating a => (forall s. Mode s => [AD s a] -> AD s a)
lang = lagrangian params
toFunction :: (forall a. Floating a => [a] -> a) -> Function Simple
toFunction f = VFunction (f . U.toList)
toGradient :: (forall a. Floating a => [a] -> a) -> Gradient Simple
toGradient f = VGradient (U.fromList . grad f . U.toList)
toDoubleF :: (forall a. Floating a => [a] -> a) -> [Double] -> Double
toDoubleF f x = f x
type Constraint a = (ExpectationFunction a, a)
type ExpectationFunction a = (a -> a)
constraint :: Floating a => ExpectationFunction a -> a -> Constraint a
constraint = (,)
average :: Floating a => a -> Constraint a
average m = constraint id m
variance :: Floating a => a -> Constraint a
variance sigma = constraint (^(2 :: Int)) sigma
generalMaxent :: (forall a. Floating a => ([a], [(GeneralConstraint a, a)]))
-> Either (Result, Statistics) [Double]
generalMaxent params = result where
obj :: Floating a => [a] -> a
obj = generalObjectiveFunc objInput
values :: Floating a => [a]
values = fst params
constraints :: Floating a => [(GeneralConstraint a, a)]
constraints = snd params
fsmoments :: Floating a => ([GeneralConstraint a], [a])
fsmoments = unzip constraints
sumToOne :: Floating a => GeneralConstraint a
sumToOne _ xs = sumMap id xs
gcons' :: Floating a => [GeneralConstraint a]
gcons' = fst fsmoments
gcons :: Floating a => [GeneralConstraint a]
gcons = sumToOne : gcons'
moments :: Floating a => [a]
moments = 1 : snd fsmoments
objInput :: Floating b => ([b], [GeneralConstraint b], [b])
objInput = (values, gcons, moments)
fs :: [[Double] -> [Double] -> Double]
fs = fst fsmoments
guess = U.fromList $ replicate
(length fs) (1.0 :: Double)
result = case unsafePerformIO (optimize defaultParameters 0.00001 guess
(toFunction obj)
(toGradient obj)
Nothing) of
(vs, ToleranceStatisfied, _) -> Right $ (S.toList vs)
(_, x, y) -> Left (x, y)
maxentLinear :: (forall a. Floating a => ([a], ([[a]], [a])))
-> Either (Result, Statistics) [Double]
maxentLinear params = result where
obj :: Floating a => [a] -> a
obj = uncurry (objectiveFuncLinear values) fsmoments
values :: Floating a => [a]
values = fst params
constraints :: Floating a => ([[a]], [a])
constraints = snd params
fsmoments :: Floating a => ([[a]], [a])
fsmoments = constraints
fs :: [[Double]]
fs = fst fsmoments
guess = U.fromList $ replicate
(length fs) (2.0 :: Double)
result = case unsafePerformIO (optimize defaultParameters 0.00001 guess
(toFunction obj)
(toGradient obj)
Nothing) of
(vs, ToleranceStatisfied, _) -> Right $ linProbs fs (S.toList vs)
(_, x, y) -> Left (x, y)
maxent :: (forall a. Floating a => ([a], [Constraint a]))
-> Either (Result, Statistics) [Double]
maxent params = result where
obj :: Floating a => [a] -> a
obj = uncurry (objectiveFunc values) fsmoments
values :: Floating a => [a]
values = fst params
constraints :: Floating a => [(ExpectationFunction a, a)]
constraints = snd params
fsmoments :: Floating a => ([ExpectationFunction a], [a])
fsmoments = unzip constraints
fs :: [Double -> Double]
fs = fst fsmoments
guess = U.fromList $ replicate
(length fs) (1.0 :: Double)
result = case unsafePerformIO (optimize defaultParameters 0.00001 guess
(toFunction obj)
(toGradient obj)
Nothing) of
(vs, ToleranceStatisfied, _) -> Right $ probs values fs (S.toList vs)
(_, x, y) -> Left (x, y)