-- Hoogle documentation, generated by Haddock -- See Hoogle, http://www.haskell.org/hoogle/ -- | Automatic Differentiation -- -- Forward-, reverse- and mixed- mode automatic differentiation -- combinators with a common API. -- -- Type-level "branding" is used to both prevent the end user from -- confusing infinitesimals and to limit unsafe access to the -- implementation details of each Mode. -- -- Each mode has a separate module full of combinators. -- --
-- instance Lifted $t ---- -- given supplied instances for -- --
-- instance Lifted $t => Primal $t where ... -- instance Lifted $t => Jacobian $t where ... ---- -- The seemingly redundant Lifted $t constraints are -- caused by Template Haskell staging restrictions. deriveLifted :: ([Q Pred] -> [Q Pred]) -> Q Type -> Q [Dec] -- | deriveNumeric f g provides the following instances: -- --
-- instance ('Lifted' $f, 'Num' a, 'Enum' a) => 'Enum' ($g a)
-- instance ('Lifted' $f, 'Num' a, 'Eq' a) => 'Eq' ($g a)
-- instance ('Lifted' $f, 'Num' a, 'Ord' a) => 'Ord' ($g a)
-- instance ('Lifted' $f, 'Num' a, 'Bounded' a) => 'Bounded' ($g a)
--
--
--
-- instance ('Lifted' $f, 'Show' a) => 'Show' ($g a)
-- instance ('Lifted' $f, 'Num' a) => 'Num' ($g a)
-- instance ('Lifted' $f, 'Fractional' a) => 'Fractional' ($g a)
-- instance ('Lifted' $f, 'Floating' a) => 'Floating' ($g a)
-- instance ('Lifted' $f, 'RealFloat' a) => 'RealFloat' ($g a)
-- instance ('Lifted' $f, 'RealFrac' a) => 'RealFrac' ($g a)
-- instance ('Lifted' $f, 'Real' a) => 'Real' ($g a)
--
deriveNumeric :: ([Q Pred] -> [Q Pred]) -> Q Type -> Q [Dec]
class Lifted t
showsPrec1 :: (Lifted t, Num a, Show a) => Int -> t a -> ShowS
(==!) :: (Lifted t, Num a, Eq a) => t a -> t a -> Bool
compare1 :: (Lifted t, Num a, Ord a) => t a -> t a -> Ordering
fromInteger1 :: (Lifted t, Num a) => Integer -> t a
(+!, *!, -!) :: (Lifted t, Num a) => t a -> t a -> t a
negate1, signum1, abs1 :: (Lifted t, Num a) => t a -> t a
(/!) :: (Lifted t, Fractional a) => t a -> t a -> t a
recip1 :: (Lifted t, Fractional a) => t a -> t a
fromRational1 :: (Lifted t, Fractional a) => Rational -> t a
toRational1 :: (Lifted t, Real a) => t a -> Rational
pi1 :: (Lifted t, Floating a) => t a
exp1, sqrt1, log1 :: (Lifted t, Floating a) => t a -> t a
(**!, logBase1) :: (Lifted t, Floating a) => t a -> t a -> t a
sin1, atan1, acos1, asin1, tan1, cos1 :: (Lifted t, Floating a) => t a -> t a
sinh1, atanh1, acosh1, asinh1, tanh1, cosh1 :: (Lifted t, Floating a) => t a -> t a
properFraction1 :: (Lifted t, RealFrac a, Integral b) => t a -> (b, t a)
truncate1, floor1, ceiling1, round1 :: (Lifted t, RealFrac a, Integral b) => t a -> b
floatRadix1 :: (Lifted t, RealFloat a) => t a -> Integer
floatDigits1 :: (Lifted t, RealFloat a) => t a -> Int
floatRange1 :: (Lifted t, RealFloat a) => t a -> (Int, Int)
decodeFloat1 :: (Lifted t, RealFloat a) => t a -> (Integer, Int)
encodeFloat1 :: (Lifted t, RealFloat a) => Integer -> Int -> t a
exponent1 :: (Lifted t, RealFloat a) => t a -> Int
significand1 :: (Lifted t, RealFloat a) => t a -> t a
scaleFloat1 :: (Lifted t, RealFloat a) => Int -> t a -> t a
isNaN1, isIEEE1, isNegativeZero1, isDenormalized1, isInfinite1 :: (Lifted t, RealFloat a) => t a -> Bool
atan21 :: (Lifted t, RealFloat a) => t a -> t a -> t a
succ1, pred1 :: (Lifted t, Num a, Enum a) => t a -> t a
toEnum1 :: (Lifted t, Num a, Enum a) => Int -> t a
fromEnum1 :: (Lifted t, Num a, Enum a) => t a -> Int
enumFrom1 :: (Lifted t, Num a, Enum a) => t a -> [t a]
enumFromThen1 :: (Lifted t, Num a, Enum a) => t a -> t a -> [t a]
enumFromTo1 :: (Lifted t, Num a, Enum a) => t a -> t a -> [t a]
enumFromThenTo1 :: (Lifted t, Num a, Enum a) => t a -> t a -> t a -> [t a]
minBound1 :: (Lifted t, Num a, Bounded a) => t a
maxBound1 :: (Lifted t, Num a, Bounded a) => t a
class Iso a b
iso :: Iso a b => f a -> f b
osi :: Iso a b => f b -> f a
instance Iso a a
-- | Reverse-Mode Automatic Differentiation using a single tape.
--
-- This version uses Data.Reflection to update a single tape.
--
-- This is asymptotically faster than using Reverse, which is
-- forced to reify and topologically sort the graph, but it is less
-- friendly to the use of sparks.
module Numeric.AD.Internal.Var
-- | Used to mark variables for inspection during the reverse pass
class Primal v => Var v
var :: Var v => a -> Int -> v a
varId :: Var v => v a -> Int
bind :: (Traversable f, Var v) => f a -> (f (v a), (Int, Int))
unbind :: (Functor f, Var v) => f (v a) -> Array Int a -> f a
unbindMap :: (Functor f, Var v, Num a) => f (v a) -> IntMap a -> f a
unbindWith :: (Functor f, Var v, Num a) => (a -> b -> c) -> f (v a) -> Array Int b -> f c
unbindMapWithDefault :: (Functor f, Var v, Num a) => b -> (a -> b -> c) -> f (v a) -> IntMap b -> f c
data Variable a
Variable :: a -> {-# UNPACK #-} !Int -> Variable a
vary :: Var f => Variable a -> f a
instance Primal Variable
instance Var Variable
instance Monad S
instance Var f => Var (AD f)
module Numeric.AD.Internal.Tower
-- | Tower is an AD Mode that calculates a tangent tower by
-- forward AD, and provides fast diffsUU, diffsUF
newtype Tower a
Tower :: [a] -> Tower a
getTower :: Tower a -> [a]
zeroPad :: Num a => [a] -> [a]
zeroPadF :: (Functor f, Num a) => [f a] -> [f a]
transposePadF :: (Foldable f, Functor f) => a -> f [a] -> [f a]
d :: Num a => [a] -> a
d' :: Num a => [a] -> (a, a)
withD :: (a, a) -> AD Tower a
tangents :: Tower a -> Tower a
bundle :: a -> Tower a -> Tower a
apply :: Num a => (AD Tower a -> b) -> a -> b
getADTower :: AD Tower a -> [a]
tower :: [a] -> AD Tower a
instance Lifted Tower
instance Typeable1 Tower
instance Data a => Data (Tower a)
instance Lifted Tower => Jacobian Tower
instance Lifted Tower => Mode Tower
instance Primal Tower
instance Show a => Show (Tower a)
module Numeric.AD.Internal.Sparse
newtype Index
Index :: (IntMap Int) -> Index
emptyIndex :: Index
addToIndex :: Int -> Index -> Index
indices :: Index -> [Int]
-- | We only store partials in sorted order, so the map contained in a
-- partial will only contain partials with equal or greater keys to that
-- of the map in which it was found. This should be key for efficiently
-- computing sparse hessians. there are only (n + k - 1) choose k
-- distinct nth partial derivatives of a function with k inputs.
data Sparse a
Sparse :: !a -> (IntMap (Sparse a)) -> Sparse a
Zero :: Sparse a
apply :: (Traversable f, Num a) => (f (AD Sparse a) -> b) -> f a -> b
vars :: (Traversable f, Num a) => f a -> f (AD Sparse a)
d :: (Traversable f, Num a) => f b -> AD Sparse a -> f a
d' :: (Traversable f, Num a) => f a -> AD Sparse a -> (a, f a)
ds :: (Traversable f, Num a) => f b -> AD Sparse a -> Cofree f a
skeleton :: Traversable f => f a -> f Int
spartial :: Num a => [Int] -> Sparse a -> Maybe a
partial :: Num a => [Int] -> Sparse a -> a
vgrad :: Grad i o o' a => i -> o
vgrad' :: Grad i o o' a => i -> o'
vgrads :: Grads i o a => i -> o
class Num a => Grad i o o' a | i -> a o o', o -> a i o', o' -> a i o
pack :: Grad i o o' a => i -> [AD Sparse a] -> AD Sparse a
unpack :: Grad i o o' a => ([a] -> [a]) -> o
unpack' :: Grad i o o' a => ([a] -> (a, [a])) -> o'
class Num a => Grads i o a | i -> a o, o -> a i
packs :: Grads i o a => i -> [AD Sparse a] -> AD Sparse a
unpacks :: Grads i o a => ([a] -> Cofree [] a) -> o
instance Grads i o a => Grads (AD Sparse a -> i) (a -> o) a
instance Num a => Grads (AD Sparse a) (Cofree [] a) a
instance Grad i o o' a => Grad (AD Sparse a -> i) (a -> o) (a -> o') a
instance Num a => Grad (AD Sparse a) [a] (a, [a]) a
instance Lifted Sparse
instance Typeable1 Sparse
instance Show a => Show (Sparse a)
instance Data a => Data (Sparse a)
instance Lifted Sparse => Jacobian Sparse
instance Lifted Sparse => Mode Sparse
instance Primal Sparse
-- | Variadic combinators for sparse forward mode automatic
-- differentiation.
--
-- Unfortunately, variadicity comes at the expense of being able to use
-- quantification to avoid sensitivity confusion, so be careful when
-- counting the number of lift you use when taking the gradient
-- of a function that takes gradients!
module Numeric.AD.Variadic.Sparse
class Num a => Grad i o o' a | i -> a o o', o -> a i o', o' -> a i o
vgrad :: Grad i o o' a => i -> o
vgrad' :: Grad i o o' a => i -> o'
class Num a => Grads i o a | i -> a o, o -> a i
vgrads :: Grads i o a => i -> o
-- | Reverse-Mode Automatic Differentiation using a single tape.
--
-- This version uses Data.Reflection to find and update the tape
--
-- This is asymptotically faster than using Reverse, which is
-- forced to reify and topologically sort the graph, but it requires a
-- fairly expensive rendezvous during construction.
module Numeric.AD.Internal.Chain
data Chain s a
Zero :: Chain s a
Lift :: a -> Chain s a
Chain :: {-# UNPACK #-} !Int -> a -> Chain s a
newtype Tape
Tape :: IORef Head -> Tape
getTape :: Tape -> IORef Head
data Head
Head :: {-# UNPACK #-} !Int -> Cells -> Head
data Cells
Nil :: Cells
Unary :: {-# UNPACK #-} !Int -> a -> Cells -> Cells
Binary :: {-# UNPACK #-} !Int -> {-# UNPACK #-} !Int -> a -> a -> Cells -> Cells
-- | Construct a tape that starts with n variables.
reifyTape :: Int -> (forall s. Reifies s Tape => Proxy s -> r) -> r
-- | Extract the partials from the current chain for a given AD variable.
partials :: (Reifies s Tape, Num a) => AD (Chain s) a -> [a]
-- | Return an Array of partials given bounds for the
-- variable IDs.
partialArrayOf :: (Reifies s Tape, Num a) => Proxy s -> (Int, Int) -> AD (Chain s) a -> Array Int a
-- | Return an IntMap of sparse partials
partialMapOf :: (Reifies s Tape, Num a) => Proxy s -> AD (Chain s) a -> IntMap a
-- | Helper that extracts the derivative of a chain when the chain was
-- constructed with one variable.
derivativeOf :: (Reifies s Tape, Num a) => Proxy s -> AD (Chain s) a -> a
-- | Helper that extracts both the primal and derivative of a chain when
-- the chain was constructed with one variable.
derivativeOf' :: (Reifies s Tape, Num a) => Proxy s -> AD (Chain s) a -> (a, a)
instance Var (Chain s)
instance Reifies s Tape => Lifted (Chain s)
instance Typeable2 Chain
instance Show a => Show (Chain s a)
instance (Reifies s Tape, Lifted (Chain s)) => Jacobian (Chain s)
instance Primal (Chain s)
instance (Reifies s Tape, Lifted (Chain s)) => Mode (Chain s)
-- | Unsafe and often partial combinators intended for internal usage.
--
-- Handle with care.
module Numeric.AD.Internal.Forward
-- | Forward mode AD.
data Forward a
Forward :: !a -> a -> Forward a
Lift :: !a -> Forward a
Zero :: Forward a
-- | Calculate the tangent using forward mode AD.
tangent :: Num a => AD Forward a -> a
bundle :: a -> a -> AD Forward a
unbundle :: Num a => AD Forward a -> (a, a)
apply :: Num a => (AD Forward a -> b) -> a -> b
bind :: (Traversable f, Num a) => (f (AD Forward a) -> b) -> f a -> f b
bind' :: (Traversable f, Num a) => (f (AD Forward a) -> b) -> f a -> (b, f b)
bindWith :: (Traversable f, Num a) => (a -> b -> c) -> (f (AD Forward a) -> b) -> f a -> f c
bindWith' :: (Traversable f, Num a) => (a -> b -> c) -> (f (AD Forward a) -> b) -> f a -> (b, f c)
transposeWith :: (Functor f, Foldable f, Traversable g) => (b -> f a -> c) -> f (g a) -> g b -> g c
instance Lifted Forward
instance Typeable1 Forward
instance Show a => Show (Forward a)
instance Data a => Data (Forward a)
instance Lifted Forward => Jacobian Forward
instance Lifted Forward => Mode Forward
instance Primal Forward
-- | Reverse-Mode Automatic Differentiation implementation details
--
-- For reverse mode AD we use StableName to recover sharing
-- information from the tape to avoid combinatorial explosion, and thus
-- run asymptotically faster than it could without such sharing
-- information, but the use of side-effects contained herein is benign.
module Numeric.AD.Internal.Reverse
-- | Reverse is a Mode using reverse-mode automatic
-- differentiation that provides fast diffFU, diff2FU,
-- grad, grad2 and a fast jacobian when you
-- have a significantly smaller number of outputs than inputs.
newtype Reverse a
Reverse :: (Tape a (Reverse a)) -> Reverse a
-- | A Tape records the information needed back propagate from the
-- output to each input during Reverse Mode AD.
data Tape a t
Zero :: Tape a t
Lift :: !a -> Tape a t
Var :: !a -> {-# UNPACK #-} !Int -> Tape a t
Binary :: !a -> a -> a -> t -> t -> Tape a t
Unary :: !a -> a -> t -> Tape a t
-- | This returns a list of contributions to the partials. The variable ids
-- returned in the list are likely not unique!
partials :: Num a => AD Reverse a -> [(Int, a)]
-- | Return an Array of partials given bounds for the
-- variable IDs.
partialArray :: Num a => (Int, Int) -> AD Reverse a -> Array Int a
-- | Return an IntMap of sparse partials
partialMap :: Num a => AD Reverse a -> IntMap a
derivative :: Num a => AD Reverse a -> a
derivative' :: Num a => AD Reverse a -> (a, a)
vgrad :: Grad i o o' a => i -> o
vgrad' :: Grad i o o' a => i -> o'
class Num a => Grad i o o' a | i -> a o o', o -> a i o', o' -> a i o
pack :: Grad i o o' a => i -> [AD Reverse a] -> AD Reverse a
unpack :: Grad i o o' a => ([a] -> [a]) -> o
unpack' :: Grad i o o' a => ([a] -> (a, [a])) -> o'
instance Grad i o o' a => Grad (AD Reverse a -> i) (a -> o) (a -> o') a
instance Num a => Grad (AD Reverse a) [a] (a, [a]) a
instance Var Reverse
instance Monad S
instance Lifted Reverse
instance Typeable2 Tape
instance Typeable1 Reverse
instance (Show a, Show t) => Show (Tape a t)
instance (Data a, Data t) => Data (Tape a t)
instance Show a => Show (Reverse a)
instance Lifted Reverse => Jacobian Reverse
instance Primal Reverse
instance Lifted Reverse => Mode Reverse
instance MuRef (Reverse a)
-- | Variadic combinators for reverse-mode automatic differentiation.
--
-- Unfortunately, variadicity comes at the expense of being able to use
-- quantification to avoid sensitivity confusion, so be careful when
-- counting the number of lift you use when taking the gradient
-- of a function that takes gradients!
module Numeric.AD.Variadic.Reverse
vgrad :: Grad i o o' a => i -> o
vgrad' :: Grad i o o' a => i -> o'
class Num a => Grad i o o' a | i -> a o o', o -> a i o', o' -> a i o
-- | Variadic combinators for variadic mixed-mode automatic
-- differentiation.
--
-- Unfortunately, variadicity comes at the expense of being able to use
-- quantification to avoid sensitivity confusion, so be careful when
-- counting the number of lift you use when taking the gradient
-- of a function that takes gradients!
module Numeric.AD.Variadic
class Num a => Grad i o o' a | i -> a o o', o -> a i o', o' -> a i o
vgrad :: Grad i o o' a => i -> o
vgrad' :: Grad i o o' a => i -> o'
class Num a => Grads i o a | i -> a o, o -> a i
vgrads :: Grads i o a => i -> o
-- | Dense Forward AD. Useful when the result involves the majority of the
-- input elements. Do not use for hessian and beyond, since they
-- only contain a small number of unique nth derivatives --
-- (n + k - 1) choose k for functions of k
-- inputs rather than the k^n that would be generated by using
-- Dense, not to mention the redundant intermediate derivatives
-- that would be calculated over and over during that process!
--
-- Assumes all instances of f have the same number of elements.
--
-- NB: We don't need the full power of Traversable here, we could
-- get by with a notion of zippable that can plug in 0's for the missing
-- entries. This might allow for gradients where f has
-- exponentials like ((->) a)
module Numeric.AD.Internal.Dense
data Dense f a
Lift :: !a -> Dense f a
Dense :: !a -> (f a) -> Dense f a
Zero :: Dense f a
ds :: f a -> AD (Dense f) a -> f a
ds' :: Num a => f a -> AD (Dense f) a -> (a, f a)
vars :: (Traversable f, Num a) => f a -> f (AD (Dense f) a)
apply :: (Traversable f, Num a) => (f (AD (Dense f) a) -> b) -> f a -> b
instance Traversable f => Lifted (Dense f)
instance (Traversable f, Lifted (Dense f)) => Jacobian (Dense f)
instance (Traversable f, Lifted (Dense f)) => Mode (Dense f)
instance Primal (Dense f)
instance Show a => Show (Dense f a)
module Numeric.AD.Internal.Composition
-- | Functor composition, used to nest the use of jacobian and grad
newtype ComposeFunctor f g a
ComposeFunctor :: f (g a) -> ComposeFunctor f g a
decomposeFunctor :: ComposeFunctor f g a -> f (g a)
-- | The composition of two AD modes is an AD mode in its own right
newtype ComposeMode f g a
ComposeMode :: f (AD g a) -> ComposeMode f g a
runComposeMode :: ComposeMode f g a -> f (AD g a)
composeMode :: AD f (AD g a) -> AD (ComposeMode f g) a
decomposeMode :: AD (ComposeMode f g) a -> AD f (AD g a)
instance (Typeable1 f, Typeable1 g, Data (f (AD g a)), Data a) => Data (ComposeMode f g a)
instance (Typeable1 f, Typeable1 g, Typeable a) => Typeable (ComposeMode f g a)
instance (Typeable1 f, Typeable1 g) => Typeable1 (ComposeMode f g)
instance (Mode f, Mode g) => Lifted (ComposeMode f g)
instance (Mode f, Mode g) => Mode (ComposeMode f g)
instance (Primal f, Mode g, Primal g) => Primal (ComposeMode f g)
instance (Typeable1 f, Typeable1 g, Data (f (g a)), Data a) => Data (ComposeFunctor f g a)
instance (Typeable1 f, Typeable1 g) => Typeable1 (ComposeFunctor f g)
instance (Traversable f, Traversable g) => Traversable (ComposeFunctor f g)
instance (Foldable f, Foldable g) => Foldable (ComposeFunctor f g)
instance (Functor f, Functor g) => Functor (ComposeFunctor f g)
module Numeric.AD.Types
class Lifted t => Mode t where isKnownConstant _ = False isKnownZero _ = False a *^ b = lift a *! b a ^* b = a *! lift b a ^/ b = a ^* recip b zero = lift 0
isKnownConstant :: Mode t => t a -> Bool
isKnownZero :: (Mode t, Num a) => t a -> Bool
lift :: (Mode t, Num a) => a -> t a
(<+>) :: (Mode t, Num a) => t a -> t a -> t a
(*^) :: (Mode t, Num a) => a -> t a -> t a
(^*) :: (Mode t, Num a) => t a -> a -> t a
(^/) :: (Mode t, Fractional a) => t a -> a -> t a
(<**>) :: (Mode t, Floating a) => t a -> t a -> t a
zero :: (Mode t, Num a) => t a
-- | AD serves as a common wrapper for different Mode
-- instances, exposing a traditional numerical tower. Universal
-- quantification is used to limit the actions in user code to machinery
-- that will return the same answers under all AD modes, allowing us to
-- use modes interchangeably as both the type level "brand" and
-- dictionary, providing a common API.
newtype AD f a
AD :: f a -> AD f a
runAD :: AD f a -> f a
-- | A Jet is a tower of all (higher order) partial derivatives of a
-- function
--
-- At each step, a Jet f is wrapped in another layer
-- worth of f.
--
-- -- a :- f a :- f (f a) :- f (f (f a)) :- ... --data Jet f a (:-) :: a -> Jet f (f a) -> Jet f a -- | Take the head of a Jet. headJet :: Jet f a -> a -- | Take the tail of a Jet. tailJet :: Jet f a -> Jet f (f a) -- | Construct a Jet by unzipping the layers of a Cofree -- Comonad. jet :: Functor f => Cofree f a -> Jet f a -- | Evaluate a scalar-to-scalar function in the trivial identity AD mode. lowerUU :: (forall s. Mode s => AD s a -> AD s a) -> a -> a -- | Evaluate a scalar-to-nonscalar function in the trivial identity AD -- mode. lowerUF :: (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f a -- | Evaluate a nonscalar-to-scalar function in the trivial identity AD -- mode. lowerFU :: (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> a -- | Evaluate a nonscalar-to-nonscalar function in the trivial identity AD -- mode. lowerFF :: (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g a -- | Forward mode automatic differentiation module Numeric.AD.Mode.Forward -- | Compute the gradient of a function using forward mode AD. -- -- Note, this performs O(n) worse than grad for n -- inputs, in exchange for better space utilization. grad :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f a -- | Compute the gradient and answer to a function using forward mode AD. -- -- Note, this performs O(n) worse than grad' for n -- inputs, in exchange for better space utilization. grad' :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f a) -- | Compute the gradient of a function using forward mode AD and combine -- the result with the input using a user-specified function. -- -- Note, this performs O(n) worse than gradWith for -- n inputs, in exchange for better space utilization. gradWith :: (Traversable f, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f b -- | Compute the gradient of a function using forward mode AD and the -- answer, and combine the result with the input using a user-specified -- function. -- -- Note, this performs O(n) worse than gradWith' for -- n inputs, in exchange for better space utilization. gradWith' :: (Traversable f, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f b) -- | Compute the Jacobian using Forward mode AD. This must -- transpose the result, so jacobianT is faster and allows more -- result types. -- --
-- >>> jacobian (\[x,y] -> [y,x,x+y,x*y,exp x * sin y]) [pi,1] -- [[0.0,1.0],[1.0,0.0],[1.0,1.0],[1.0,3.141592653589793],[19.472221418841606,12.502969588876512]] --jacobian :: (Traversable f, Traversable g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f a) -- | Compute the Jacobian using Forward mode AD along with -- the actual answer. jacobian' :: (Traversable f, Traversable g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f a) -- | Compute the Jacobian using Forward mode AD and combine -- the output with the input. This must transpose the result, so -- jacobianWithT is faster, and allows more result types. jacobianWith :: (Traversable f, Traversable g, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f b) -- | Compute the Jacobian using Forward mode AD combined with -- the input using a user specified function, along with the actual -- answer. jacobianWith' :: (Traversable f, Traversable g, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f b) -- | A fast, simple, transposed Jacobian computed with forward-mode AD. jacobianT :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> f (g a) -- | A fast, simple, transposed Jacobian computed with Forward mode -- AD that combines the output with the input. jacobianWithT :: (Traversable f, Functor g, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> f (g b) -- | Compute the product of a vector with the Hessian using -- forward-on-forward-mode AD. hessianProduct :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f (a, a) -> f a -- | Compute the gradient and hessian product using forward-on-forward-mode -- AD. hessianProduct' :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f (a, a) -> f (a, a) -- | The diff function calculates the first derivative of a -- scalar-to-scalar function by forward-mode AD -- --
-- >>> diff sin 0 -- 1.0 --diff :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> a -- | The diff' function calculates the result and first derivative -- of scalar-to-scalar function by Forward mode AD -- --
-- diff' sin == sin &&& cos -- diff' f = f &&& d f ---- --
-- >>> diff' sin 0 -- (0.0,1.0) ---- --
-- >>> diff' exp 0 -- (1.0,1.0) --diff' :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> (a, a) -- | The diffF function calculates the first derivatives of -- scalar-to-nonscalar function by Forward mode AD -- --
-- >>> diffF (\a -> [sin a, cos a]) 0 -- [1.0,-0.0] --diffF :: (Functor f, Num a) => (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f a -- | The diffF' function calculates the result and first derivatives -- of a scalar-to-non-scalar function by Forward mode AD -- --
-- >>> diffF' (\a -> [sin a, cos a]) 0 -- [(0.0,1.0),(1.0,-0.0)] --diffF' :: (Functor f, Num a) => (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f (a, a) -- | Compute the directional derivative of a function given a zipped up -- Functor of the input values and their derivatives du :: (Functor f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f (a, a) -> a -- | Compute the answer and directional derivative of a function given a -- zipped up Functor of the input values and their derivatives du' :: (Functor f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f (a, a) -> (a, a) -- | Compute a vector of directional derivatives for a function given a -- zipped up Functor of the input values and their derivatives. duF :: (Functor f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f (a, a) -> g a -- | Compute a vector of answers and directional derivatives for a function -- given a zipped up Functor of the input values and their -- derivatives. duF' :: (Functor f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f (a, a) -> g (a, a) -- | Higher order derivatives via a "dual number tower". module Numeric.AD.Mode.Tower taylor :: Fractional a => (forall s. Mode s => AD s a -> AD s a) -> a -> a -> [a] taylor0 :: Fractional a => (forall s. Mode s => AD s a -> AD s a) -> a -> a -> [a] maclaurin :: Fractional a => (forall s. Mode s => AD s a -> AD s a) -> a -> [a] maclaurin0 :: Fractional a => (forall s. Mode s => AD s a -> AD s a) -> a -> [a] diff :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> a diff' :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> (a, a) diffs :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> [a] diffs0 :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> [a] diffsF :: (Functor f, Num a) => (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f [a] diffs0F :: (Functor f, Num a) => (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f [a] du :: (Functor f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f (a, a) -> a du' :: (Functor f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f (a, a) -> (a, a) dus :: (Functor f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f [a] -> [a] dus0 :: (Functor f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f [a] -> [a] duF :: (Functor f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f (a, a) -> g a duF' :: (Functor f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f (a, a) -> g (a, a) dusF :: (Functor f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f [a] -> g [a] dus0F :: (Functor f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f [a] -> g [a] -- | Mixed-Mode Automatic Differentiation. -- -- For reverse mode AD we use StableName to recover sharing -- information from the tape to avoid combinatorial explosion, and thus -- run asymptotically faster than it could without such sharing -- information, but the use of side-effects contained herein is benign. module Numeric.AD.Mode.Reverse -- | The grad function calculates the gradient of a -- non-scalar-to-scalar function with Reverse AD in a single pass. -- --
-- >>> grad (\[x,y,z] -> x*y+z) [1,2,3] -- [2,1,1] --grad :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f a -- | The grad' function calculates the result and gradient of a -- non-scalar-to-scalar function with Reverse AD in a single pass. -- --
-- >>> grad' (\[x,y,z] -> 4*x*exp y+cos z) [1,2,3] -- (28.566231899122155,[29.5562243957226,29.5562243957226,-0.1411200080598672]) --grad' :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f a) -- | grad g f function calculates the gradient of a -- non-scalar-to-scalar function f with reverse-mode AD in a -- single pass. The gradient is combined element-wise with the argument -- using the function g. -- --
-- grad = gradWith (_ dx -> dx) -- id = gradWith const --gradWith :: (Traversable f, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f b -- | grad' g f calculates the result and gradient of a -- non-scalar-to-scalar function f with Reverse AD in a -- single pass the gradient is combined element-wise with the argument -- using the function g. -- --
-- grad' == gradWith' (_ dx -> dx) --gradWith' :: (Traversable f, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f b) -- | The jacobian function calculates the jacobian of a -- non-scalar-to-non-scalar function with reverse AD lazily in m -- passes for m outputs. -- --
-- >>> jacobian (\[x,y] -> [y,x,x*y]) [2,1] -- [[0,1],[1,0],[1,2]] ---- --
-- >>> jacobian (\[x,y] -> [exp y,cos x,x+y]) [1,2] -- [[0.0,7.38905609893065],[-0.8414709848078965,0.0],[1.0,1.0]] --jacobian :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f a) -- | The jacobian' function calculates both the result and the -- Jacobian of a nonscalar-to-nonscalar function, using m -- invocations of reverse AD, where m is the output -- dimensionality. Applying fmap snd to the result will recover -- the result of jacobian | An alias for gradF' -- -- ghci> jacobian' ([x,y] -> [y,x,x*y]) [2,1] -- [(1,[0,1]),(2,[1,0]),(2,[1,2])] jacobian' :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f a) -- | 'jacobianWith g f' calculates the Jacobian of a -- non-scalar-to-non-scalar function f with reverse AD lazily in -- m passes for m outputs. -- -- Instead of returning the Jacobian matrix, the elements of the matrix -- are combined with the input using the g. -- --
-- jacobian = jacobianWith (_ dx -> dx) -- jacobianWith const = (f x -> const x <$> f x) --jacobianWith :: (Traversable f, Functor g, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f b) -- | jacobianWith g f' calculates both the result and the Jacobian -- of a nonscalar-to-nonscalar function f, using m -- invocations of reverse AD, where m is the output -- dimensionality. Applying fmap snd to the result will recover -- the result of jacobianWith -- -- Instead of returning the Jacobian matrix, the elements of the matrix -- are combined with the input using the g. -- --
-- jacobian' == jacobianWith' (_ dx -> dx) --jacobianWith' :: (Traversable f, Functor g, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f b) -- | Compute the hessian via the jacobian of the gradient. -- gradient is computed in reverse mode and then the jacobian is -- computed in reverse mode. -- -- However, since the grad f :: f a -> f a is square -- this is not as fast as using the forward-mode jacobian of a -- reverse mode gradient provided by hessian. -- --
-- >>> hessian (\[x,y] -> x*y) [1,2] -- [[0,1],[1,0]] --hessian :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f (f a) -- | Compute the order 3 Hessian tensor on a non-scalar-to-non-scalar -- function via the reverse-mode Jacobian of the reverse-mode Jacobian of -- the function. -- -- Less efficient than hessianF. -- --
-- >>> hessianF (\[x,y] -> [x*y,x+y,exp x*cos y]) [1,2] -- [[[0.0,1.0],[1.0,0.0]],[[0.0,0.0],[0.0,0.0]],[[-1.1312043837568135,-2.4717266720048188],[-2.4717266720048188,1.1312043837568135]]] --hessianF :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f (f a)) -- | Compute the derivative of a function. -- --
-- >>> diff sin 0 -- 1.0 ---- --
-- >>> cos 0 -- 1.0 --diff :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> a -- | The diff' function calculates the value and derivative, as a -- pair, of a scalar-to-scalar function. -- --
-- >>> diff' sin 0 -- (0.0,1.0) --diff' :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> (a, a) -- | Compute the derivatives of a function that returns a vector with -- regards to its single input. -- --
-- >>> diffF (\a -> [sin a, cos a]) 0 -- [1.0,0.0] --diffF :: (Functor f, Num a) => (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f a -- | Compute the derivatives of a function that returns a vector with -- regards to its single input as well as the primal answer. -- --
-- >>> diffF' (\a -> [sin a, cos a]) 0 -- [(0.0,1.0),(1.0,0.0)] --diffF' :: (Functor f, Num a) => (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f (a, a) vgrad :: Grad i o o' a => i -> o vgrad' :: Grad i o o' a => i -> o' class Num a => Grad i o o' a | i -> a o o', o -> a i o', o' -> a i o -- | Higher order derivatives via a "dual number tower". module Numeric.AD.Mode.Sparse grad :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f a grad' :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f a) gradWith :: (Traversable f, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f b gradWith' :: (Traversable f, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f b) grads :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> Cofree f a jacobian :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f a) jacobian' :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f a) jacobianWith :: (Traversable f, Functor g, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f b) jacobianWith' :: (Traversable f, Functor g, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f b) jacobians :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (Cofree f a) hessian :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f (f a) hessian' :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f (a, f a)) hessianF :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f (f a)) hessianF' :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f (a, f a)) vgrad :: Grad i o o' a => i -> o vgrads :: Grads i o a => i -> o class Num a => Grad i o o' a | i -> a o o', o -> a i o', o' -> a i o class Num a => Grads i o a | i -> a o, o -> a i module Numeric.AD.Newton -- | The findZero function finds a zero of a scalar function using -- Newton's method; its output is a stream of increasingly accurate -- results. (Modulo the usual caveats.) -- -- Examples: -- --
-- >>> take 10 $ findZero (\x->x^2-4) 1 -- [1.0,2.5,2.05,2.000609756097561,2.0000000929222947,2.000000000000002,2.0] ---- --
-- >>> import Data.Complex -- -- >>> last $ take 10 $ findZero ((+1).(^2)) (1 :+ 1) -- 0.0 :+ 1.0 --findZero :: (Fractional a, Eq a) => (forall s. Mode s => AD s a -> AD s a) -> a -> [a] -- | The inverse function inverts a scalar function using Newton's -- method; its output is a stream of increasingly accurate results. -- (Modulo the usual caveats.) -- -- Example: -- --
-- >>> last $ take 10 $ inverse sqrt 1 (sqrt 10) -- 10.0 --inverse :: (Fractional a, Eq a) => (forall s. Mode s => AD s a -> AD s a) -> a -> a -> [a] -- | The fixedPoint function find a fixedpoint of a scalar function -- using Newton's method; its output is a stream of increasingly accurate -- results. (Modulo the usual caveats.) -- --
-- >>> last $ take 10 $ fixedPoint cos 1 -- 0.7390851332151607 --fixedPoint :: (Fractional a, Eq a) => (forall s. Mode s => AD s a -> AD s a) -> a -> [a] -- | The extremum function finds an extremum of a scalar function -- using Newton's method; produces a stream of increasingly accurate -- results. (Modulo the usual caveats.) -- --
-- >>> last $ take 10 $ extremum cos 1 -- 0.0 --extremum :: (Fractional a, Eq a) => (forall s. Mode s => AD s a -> AD s a) -> a -> [a] -- | The gradientDescent function performs a multivariate -- optimization, based on the naive-gradient-descent in the file -- stalingrad/examples/flow-tests/pre-saddle-1a.vlad from the -- VLAD compiler Stalingrad sources. Its output is a stream of -- increasingly accurate results. (Modulo the usual caveats.) -- -- It uses reverse mode automatic differentiation to compute the -- gradient. gradientDescent :: (Traversable f, Fractional a, Ord a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> [f a] gradientAscent :: (Traversable f, Fractional a, Ord a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> [f a] -- | Root finding using Halley's rational method (the second in the class -- of Householder methods). Assumes the function is three times -- continuously differentiable and converges cubically when progress can -- be made. module Numeric.AD.Halley -- | The findZero function finds a zero of a scalar function using -- Halley's method; its output is a stream of increasingly accurate -- results. (Modulo the usual caveats.) -- -- Examples: -- --
-- >>> take 10 $ findZero (\x->x^2-4) 1 -- [1.0,1.8571428571428572,1.9997967892704736,1.9999999999994755,2.0] ---- --
-- >>> import Data.Complex -- -- >>> last $ take 10 $ findZero ((+1).(^2)) (1 :+ 1) -- 0.0 :+ 1.0 --findZero :: (Fractional a, Eq a) => (forall s. Mode s => AD s a -> AD s a) -> a -> [a] -- | The inverse function inverts a scalar function using Halley's -- method; its output is a stream of increasingly accurate results. -- (Modulo the usual caveats.) -- -- Note: the take 10 $ inverse sqrt 1 (sqrt 10) example that -- works for Newton's method fails with Halley's method because the -- preconditions do not hold! inverse :: (Fractional a, Eq a) => (forall s. Mode s => AD s a -> AD s a) -> a -> a -> [a] -- | The fixedPoint function find a fixedpoint of a scalar function -- using Halley's method; its output is a stream of increasingly accurate -- results. (Modulo the usual caveats.) -- --
-- >>> last $ take 10 $ fixedPoint cos 1 -- 0.7390851332151607 --fixedPoint :: (Fractional a, Eq a) => (forall s. Mode s => AD s a -> AD s a) -> a -> [a] -- | The extremum function finds an extremum of a scalar function -- using Halley's method; produces a stream of increasingly accurate -- results. (Modulo the usual caveats.) -- --
-- >>> take 10 $ extremum cos 1 -- [1.0,0.29616942658570555,4.59979519460002e-3,1.6220740159042513e-8,0.0] --extremum :: (Fractional a, Eq a) => (forall s. Mode s => AD s a -> AD s a) -> a -> [a] -- | Reverse Automatic Differentiation using Data.Reflection module Numeric.AD.Mode.Chain -- | The grad function calculates the gradient of a -- non-scalar-to-scalar function with reverse-mode AD in a single pass. -- --
-- >>> grad (\[x,y,z] -> x*y+z) [1,2,3] -- [2,1,1] --grad :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f a -- | The grad' function calculates the result and gradient of a -- non-scalar-to-scalar function with reverse-mode AD in a single pass. -- --
-- >>> grad' (\[x,y,z] -> x*y+z) [1,2,3] -- (5,[2,1,1]) --grad' :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f a) -- | grad g f function calculates the gradient of a -- non-scalar-to-scalar function f with reverse-mode AD in a -- single pass. The gradient is combined element-wise with the argument -- using the function g. -- --
-- grad == gradWith (_ dx -> dx) -- id == gradWith const --gradWith :: (Traversable f, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f b -- | grad' g f calculates the result and gradient of a -- non-scalar-to-scalar function f with reverse-mode AD in a -- single pass the gradient is combined element-wise with the argument -- using the function g. -- --
-- grad' == gradWith' (_ dx -> dx) --gradWith' :: (Traversable f, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f b) -- | The jacobian function calculates the jacobian of a -- non-scalar-to-non-scalar function with reverse AD lazily in m -- passes for m outputs. -- --
-- >>> jacobian (\[x,y] -> [y,x,x*y]) [2,1] -- [[0,1],[1,0],[1,2]] --jacobian :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f a) -- | The jacobian' function calculates both the result and the -- Jacobian of a nonscalar-to-nonscalar function, using m -- invocations of reverse AD, where m is the output -- dimensionality. Applying fmap snd to the result will recover -- the result of jacobian | An alias for gradF' -- --
-- >>> jacobian' (\[x,y] -> [y,x,x*y]) [2,1] -- [(1,[0,1]),(2,[1,0]),(2,[1,2])] --jacobian' :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f a) -- | 'jacobianWith g f' calculates the Jacobian of a -- non-scalar-to-non-scalar function f with reverse AD lazily in -- m passes for m outputs. -- -- Instead of returning the Jacobian matrix, the elements of the matrix -- are combined with the input using the g. -- --
-- jacobian == jacobianWith (_ dx -> dx) -- jacobianWith const == (f x -> const x <$> f x) --jacobianWith :: (Traversable f, Functor g, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f b) -- | jacobianWith g f' calculates both the result and the Jacobian -- of a nonscalar-to-nonscalar function f, using m -- invocations of reverse AD, where m is the output -- dimensionality. Applying fmap snd to the result will recover -- the result of jacobianWith -- -- Instead of returning the Jacobian matrix, the elements of the matrix -- are combined with the input using the g. -- --
-- jacobian' == jacobianWith' (_ dx -> dx) --jacobianWith' :: (Traversable f, Functor g, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f b) -- | Compute the hessian via the jacobian of the gradient. gradient is -- computed in reverse mode and then the jacobian is computed in reverse -- mode. -- -- However, since the grad f :: f a -> f a is square -- this is not as fast as using the forward-mode Jacobian of a reverse -- mode gradient provided by hessian. -- --
-- >>> hessian (\[x,y] -> x*y) [1,2] -- [[0,1],[1,0]] --hessian :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f (f a) -- | Compute the order 3 Hessian tensor on a non-scalar-to-non-scalar -- function via the reverse-mode Jacobian of the reverse-mode Jacobian of -- the function. -- -- Less efficient than hessianF. -- --
-- >>> hessianF (\[x,y] -> [x*y,x+y,exp x*cos y]) [1,2] -- [[[0.0,1.0],[1.0,0.0]],[[0.0,0.0],[0.0,0.0]],[[-1.1312043837568135,-2.4717266720048188],[-2.4717266720048188,1.1312043837568135]]] --hessianF :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f (f a)) -- | Compute the derivative of a function. -- --
-- >>> diff sin 0 -- 1.0 --diff :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> a -- | The diff' function calculates the result and derivative, as a -- pair, of a scalar-to-scalar function. -- --
-- >>> diff' sin 0 -- (0.0,1.0) ---- --
-- >>> diff' exp 0 -- (1.0,1.0) --diff' :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> (a, a) -- | Compute the derivatives of each result of a scalar-to-vector function -- with regards to its input. -- --
-- >>> diffF (\a -> [sin a, cos a]) 0 -- [1.0,0.0] --diffF :: (Functor f, Num a) => (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f a -- | Compute the derivatives of each result of a scalar-to-vector function -- with regards to its input along with the answer. -- --
-- >>> diffF' (\a -> [sin a, cos a]) 0 -- [(0.0,1.0),(1.0,0.0)] --diffF' :: (Functor f, Num a) => (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f (a, a) -- | Mixed-Mode Automatic Differentiation. -- -- Each combinator exported from this module chooses an appropriate AD -- mode. The following basic operations are supported, modified as -- appropriate by the suffixes below: -- --
-- >>> grad (\[x,y,z] -> x*y+z) [1,2,3] -- [2,1,1] --grad :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f a -- | The grad' function calculates the result and gradient of a -- non-scalar-to-scalar function with Reverse AD in a single pass. -- --
-- >>> grad' (\[x,y,z] -> 4*x*exp y+cos z) [1,2,3] -- (28.566231899122155,[29.5562243957226,29.5562243957226,-0.1411200080598672]) --grad' :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f a) -- | grad g f function calculates the gradient of a -- non-scalar-to-scalar function f with reverse-mode AD in a -- single pass. The gradient is combined element-wise with the argument -- using the function g. -- --
-- grad = gradWith (_ dx -> dx) -- id = gradWith const --gradWith :: (Traversable f, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f b -- | grad' g f calculates the result and gradient of a -- non-scalar-to-scalar function f with Reverse AD in a -- single pass the gradient is combined element-wise with the argument -- using the function g. -- --
-- grad' == gradWith' (_ dx -> dx) --gradWith' :: (Traversable f, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f b) grads :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> Cofree f a -- | Calculate the Jacobian of a non-scalar-to-non-scalar function, -- automatically choosing between forward and reverse mode AD based on -- the number of inputs and outputs. -- -- If you know the relative number of inputs and outputs, consider -- jacobian or jacobian. jacobian :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f a) -- | Calculate both the answer and Jacobian of a non-scalar-to-non-scalar -- function, automatically choosing between forward- and reverse- mode AD -- based on the relative, based on the number of inputs -- -- If you know the relative number of inputs and outputs, consider -- jacobian' or jacobian'. jacobian' :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f a) -- | jacobianWith g f calculates the Jacobian of a -- non-scalar-to-non-scalar function, automatically choosing between -- forward and reverse mode AD based on the number of inputs and outputs. -- -- The resulting Jacobian matrix is then recombined element-wise with the -- input using g. -- -- If you know the relative number of inputs and outputs, consider -- jacobianWith or jacobianWith. jacobianWith :: (Traversable f, Functor g, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f b) -- | jacobianWith' g f calculates the answer and Jacobian -- of a non-scalar-to-non-scalar function, automatically choosing between -- sparse and reverse mode AD based on the number of inputs and outputs. -- -- The resulting Jacobian matrix is then recombined element-wise with the -- input using g. -- -- If you know the relative number of inputs and outputs, consider -- jacobianWith' or jacobianWith'. jacobianWith' :: (Traversable f, Functor g, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f b) jacobians :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (Cofree f a) -- | A fast, simple, transposed Jacobian computed with forward-mode AD. jacobianT :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> f (g a) -- | A fast, simple, transposed Jacobian computed with Forward mode -- AD that combines the output with the input. jacobianWithT :: (Traversable f, Functor g, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> f (g b) -- | Compute the Hessian via the Jacobian of the gradient. gradient is -- computed in reverse mode and then the Jacobian is computed in sparse -- (forward) mode. hessian :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f (f a) hessian' :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f (a, f a)) -- | Compute the order 3 Hessian tensor on a non-scalar-to-non-scalar -- function using Sparse or 'Sparse'-on-'Reverse' hessianF :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f (f a)) hessianF' :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f (a, f a)) -- | hessianProduct f wv computes the product of the -- hessian H of a non-scalar-to-scalar function f at -- w = fst $ wv with a vector v = snd $ -- wv using "Pearlmutter's method" from -- http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.29.6143, -- which states: -- --
-- H v = (d/dr) grad_w (w + r v) | r = 0 ---- -- Or in other words, we take the directional derivative of the gradient. -- The gradient is calculated in reverse mode, then the directional -- derivative is calculated in forward mode. hessianProduct :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f (a, a) -> f a -- | hessianProduct' f wv computes both the gradient of a -- non-scalar-to-scalar f at w = fst $ wv -- and the product of the hessian H at w with a vector -- v = snd $ wv using "Pearlmutter's method". The outputs -- are returned wrapped in the same functor. -- --
-- H v = (d/dr) grad_w (w + r v) | r = 0 ---- -- Or in other words, we return the gradient and the directional -- derivative of the gradient. The gradient is calculated in reverse -- mode, then the directional derivative is calculated in forward mode. hessianProduct' :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f (a, a) -> f (a, a) -- | The diff function calculates the first derivative of a -- scalar-to-scalar function by forward-mode AD -- --
-- >>> diff sin 0 -- 1.0 --diff :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> a -- | The diffF function calculates the first derivatives of -- scalar-to-nonscalar function by Forward mode AD -- --
-- >>> diffF (\a -> [sin a, cos a]) 0 -- [1.0,-0.0] --diffF :: (Functor f, Num a) => (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f a -- | The diff' function calculates the result and first derivative -- of scalar-to-scalar function by Forward mode AD -- --
-- diff' sin == sin &&& cos -- diff' f = f &&& d f ---- --
-- >>> diff' sin 0 -- (0.0,1.0) ---- --
-- >>> diff' exp 0 -- (1.0,1.0) --diff' :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> (a, a) -- | The diffF' function calculates the result and first derivatives -- of a scalar-to-non-scalar function by Forward mode AD -- --
-- >>> diffF' (\a -> [sin a, cos a]) 0 -- [(0.0,1.0),(1.0,-0.0)] --diffF' :: (Functor f, Num a) => (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f (a, a) diffs :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> [a] diffsF :: (Functor f, Num a) => (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f [a] diffs0 :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> [a] diffs0F :: (Functor f, Num a) => (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f [a] -- | Compute the directional derivative of a function given a zipped up -- Functor of the input values and their derivatives du :: (Functor f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f (a, a) -> a -- | Compute the answer and directional derivative of a function given a -- zipped up Functor of the input values and their derivatives du' :: (Functor f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f (a, a) -> (a, a) -- | Compute a vector of directional derivatives for a function given a -- zipped up Functor of the input values and their derivatives. duF :: (Functor f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f (a, a) -> g a -- | Compute a vector of answers and directional derivatives for a function -- given a zipped up Functor of the input values and their -- derivatives. duF' :: (Functor f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f (a, a) -> g (a, a) dus :: (Functor f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f [a] -> [a] dus0 :: (Functor f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f [a] -> [a] dusF :: (Functor f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f [a] -> g [a] dus0F :: (Functor f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f [a] -> g [a] taylor :: Fractional a => (forall s. Mode s => AD s a -> AD s a) -> a -> a -> [a] taylor0 :: Fractional a => (forall s. Mode s => AD s a -> AD s a) -> a -> a -> [a] maclaurin :: Fractional a => (forall s. Mode s => AD s a -> AD s a) -> a -> [a] maclaurin0 :: Fractional a => (forall s. Mode s => AD s a -> AD s a) -> a -> [a] instance Eq Nat instance Ord Nat -- | Allows the choice of AD Mode to be specified at the term level -- for benchmarking or more complicated usage patterns. module Numeric.AD.Mode.Directed grad :: (Traversable f, Num a) => Direction -> (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f a grad' :: (Traversable f, Num a) => Direction -> (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f a) jacobian :: (Traversable f, Traversable g, Num a) => Direction -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f a) jacobian' :: (Traversable f, Traversable g, Num a) => Direction -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f a) diff :: Num a => Direction -> (forall s. Mode s => AD s a -> AD s a) -> a -> a diff' :: Num a => Direction -> (forall s. Mode s => AD s a -> AD s a) -> a -> (a, a) data Direction Forward :: Direction Reverse :: Direction Chain :: Direction Tower :: Direction Mixed :: Direction instance Show Direction instance Eq Direction instance Ord Direction instance Read Direction instance Bounded Direction instance Enum Direction instance Ix Direction