-- Hoogle documentation, generated by Haddock -- See Hoogle, http://www.haskell.org/hoogle/ -- | Heterogeneous automatic differentation (backpropagation) -- -- Write your functions to compute your result, and the library will -- automatically generate functions to compute your gradient. -- -- Implements heterogeneous reverse-mode automatic differentiation, -- commonly known as "backpropagation". -- -- See README.md @package backprop @version 0.2.2.0 -- | Provides the Backprop typeclass, a class for values that can be -- used for backpropagation. -- -- This class replaces the old (version 0.1) API relying on Num. module Numeric.Backprop.Class -- | Class of values that can be backpropagated in general. -- -- For instances of Num, these methods can be given by -- zeroNum, addNum, and oneNum. There are also -- generic options given in Numeric.Backprop.Class for functors, -- IsList instances, and Generic instances. -- --
-- instance Backprop Double where -- zero = zeroNum -- add = addNum -- one = oneNum ---- -- If you leave the body of an instance declaration blank, GHC Generics -- will be used to derive instances if the type has a single constructor -- and each field is an instance of Backprop. -- -- To ensure that backpropagation works in a sound way, should obey the -- laws: -- --
-- Op '[Int, Bool] Double ---- -- is a function from an Int and a Bool, returning a -- Double. It can be differentiated to give a gradient of -- an Int and a Bool if given a total derivative for the -- Double. If we call Bool <math>, then, -- mathematically, it is akin to a: -- -- <math> -- -- See runOp, gradOp, and gradOpWith for examples on -- how to run it, and Op for instructions on creating it. -- -- It is simpler to not use this type constructor directly, and instead -- use the op2, op1, op2, and op3 helper -- smart constructors. -- -- See Numeric.Backprop.Op#prod for a mini-tutorial on using -- Prod and Tuple. -- -- To use an Op with the backprop library, see -- liftOp, liftOp1, liftOp2, and -- liftOp3. newtype Op as a -- | Construct an Op by giving a function creating the result, and -- also a continuation on how to create the gradient, given the total -- derivative of a. -- -- See the module documentation for Numeric.Backprop.Op for more -- details on the function that this constructor and Op expect. Op :: (Tuple as -> (a, a -> Tuple as)) -> Op as a -- | Run the function that the Op encodes, returning a continuation -- to compute the gradient, given the total derivative of a. See -- documentation for Numeric.Backprop.Op for more information. [runOpWith] :: Op as a -> Tuple as -> (a, a -> Tuple as) data Prod k (f :: k -> *) (a :: [k]) :: forall k. () => (k -> *) -> [k] -> * [Ø] :: Prod k f [] k [:<] :: Prod k f (:) k a1 as -- | A Prod of simple Haskell types. type Tuple = Prod * I newtype I a :: * -> * I :: a -> I a [getI] :: I a -> a -- | Run the function that an Op encodes, to get the resulting -- output and also its gradient with respect to the inputs. -- --
-- >>> gradOp' (op2 (*)) (3 ::< 5 ::< Ø) -- (15, 5 ::< 3 ::< Ø) --runOp :: Num a => Op as a -> Tuple as -> (a, Tuple as) -- | Run the function that an Op encodes, to get the result. -- --
-- >>> runOp (op2 (*)) (3 ::< 5 ::< Ø) -- 15 --evalOp :: Op as a -> Tuple as -> a -- | Run the function that an Op encodes, and get the gradient of -- the output with respect to the inputs. -- --
-- >>> gradOp (op2 (*)) (3 ::< 5 ::< Ø) -- 5 ::< 3 ::< Ø -- -- the gradient of x*y is (y, x) ---- --
-- gradOp o xs = gradOpWith o xs 1 --gradOp :: Num a => Op as a -> Tuple as -> Tuple as -- | Get the gradient function that an Op encodes, with a third -- argument expecting the total derivative of the result. -- -- See the module documentaiton for Numeric.Backprop.Op for more -- information. gradOpWith :: Op as a -> Tuple as -> a -> Tuple as -- | Create an Op that takes no inputs and always returns the given -- value. -- -- There is no gradient, of course (using gradOp will give you an -- empty tuple), because there is no input to have a gradient of. -- --
-- >>> runOp (op0 10) Ø -- (10, Ø) ---- -- For a constant Op that takes input and ignores it, see -- opConst and opConst'. op0 :: a -> Op '[] a -- | An Op that ignores all of its inputs and returns a given -- constant value. -- --
-- >>> gradOp' (opConst 10) (1 ::< 2 ::< 3 ::< Ø) -- (10, 0 ::< 0 ::< 0 ::< Ø) --opConst :: (Every Num as, Known Length as) => a -> Op as a -- | An Op that just returns whatever it receives. The identity -- function. -- --
-- idOp = opIso id id --idOp :: Op '[a] a -- | A version of opConst taking explicit Length, indicating -- the number of inputs and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use opConst. opConst' :: Every Num as => Length as -> a -> Op as a -- | An Op that extracts a value from an input value using a -- Lens'. -- -- Warning: This is unsafe! It assumes that it extracts a specific value -- unchanged, with derivative 1, so will break for things that -- numerically manipulate things before returning them. opLens :: Num a => Lens' a b -> Op '[a] b -- | Create an Op of a function taking one input, by giving its -- explicit derivative. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the derivative <math>, it would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that squares its input: -- --
-- square :: Num a => Op '[a] a -- square = op1 $ \x -> (x*x, \d -> 2 * d * x -- ) ---- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op1 :: (a -> (b, b -> a)) -> Op '[a] b -- | Create an Op of a function taking two inputs, by giving its -- explicit gradient. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the gradient <math> would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that multiplies its inputs: -- --
-- mul :: Num a => Op '[a, a] a -- mul = op2' $ \x y -> (x*y, \d -> (d*y, x*d) -- ) ---- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c -- | Create an Op of a function taking three inputs, by giving its -- explicit gradient. See documentation for op2 for more details. op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d -- | An Op that coerces an item into another item whose type has the -- same runtime representation. -- --
-- >>> gradOp' opCoerce (Identity 5) :: (Int, Identity Int) -- (5, Identity 1) ---- --
-- opCoerce = opIso coerced coerce --opCoerce :: Coercible a b => Op '[a] b -- | An Op that takes as and returns exactly the input -- tuple. -- --
-- >>> gradOp' opTup (1 ::< 2 ::< 3 ::< Ø) -- (1 ::< 2 ::< 3 ::< Ø, 1 ::< 1 ::< 1 ::< Ø) --opTup :: Op as (Tuple as) -- | An Op that runs the input value through an isomorphism. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. opIso :: (a -> b) -> (b -> a) -> Op '[a] b -- | An Op that runs the two input values through an isomorphism. -- Useful for things like constructors. See opIso for caveats. opIso2 :: (a -> b -> c) -> (c -> (a, b)) -> Op '[a, b] c -- | An Op that runs the three input values through an isomorphism. -- Useful for things like constructors. See opIso for caveats. opIso3 :: (a -> b -> c -> d) -> (d -> (a, b, c)) -> Op '[a, b, c] d -- | An Op that runs the input value through an isomorphism between -- a tuple of values and a value. See opIso for caveats. -- -- In Numeric.Backprop.Op since version 0.1.2.0, but only exported -- from Numeric.Backprop since version 0.1.3.0. opIsoN :: (Tuple as -> b) -> (b -> Tuple as) -> Op as b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp1, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad1 :: (a -> b) -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad :: (Tuple as -> b) -> Op as b -- | Compose Ops together, like sequence for functions, or -- liftAN. -- -- That is, given an Op as b1, an Op as -- b2, and an Op as b3, it can compose them with an -- Op '[b1,b2,b3] c to create an Op as c. composeOp :: (Every Num as, Known Length as) => Prod (Op as) bs -> Op bs c -> Op as c -- | Convenient wrapper over composeOp for the case where the second -- function only takes one input, so the two Ops can be directly -- piped together, like for .. composeOp1 :: (Every Num as, Known Length as) => Op as b -> Op '[b] c -> Op as c -- | Convenient infix synonym for (flipped) composeOp1. Meant to be -- used just like .: -- --
-- f :: Op '[b] c -- g :: Op '[a,a] b -- -- f ~. g :: Op '[a, a] c --(~.) :: (Known Length as, Every Num as) => Op '[b] c -> Op as b -> Op as c infixr 9 ~. -- | A version of composeOp taking explicit Length, -- indicating the number of inputs expected and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use composeOp. composeOp' :: Every Num as => Length as -> Prod (Op as) bs -> Op bs c -> Op as c -- | A version of composeOp1 taking explicit Length, -- indicating the number of inputs expected and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use composeOp1. composeOp1' :: Every Num as => Length as -> Op as b -> Op '[b] c -> Op as c -- | Construct a two element Prod. Since the precedence of (:>) is -- higher than (:<), we can conveniently write lists like: -- --
-- >>> a :< b :> c ---- -- Which is identical to: -- --
-- >>> a :< b :< c :< Ø --infix 6 :> -- | Build a singleton Prod. only :: () => f a -> Prod k f (:) k a [] k head' :: () => Prod k f (:<) k a as -> f a -- | Cons onto a Tuple. infixr 5 ::< -- | Singleton Tuple. only_ :: () => a -> Tuple (:) * a [] * -- | Op for addition (+.) :: Num a => Op '[a, a] a -- | Op for subtraction (-.) :: Num a => Op '[a, a] a -- | Op for multiplication (*.) :: Num a => Op '[a, a] a -- | Op for negation negateOp :: Num a => Op '[a] a -- | Op for absolute value absOp :: Num a => Op '[a] a -- | Op for signum signumOp :: Num a => Op '[a] a -- | Op for division (/.) :: Fractional a => Op '[a, a] a -- | Op for multiplicative inverse recipOp :: Fractional a => Op '[a] a -- | Op for exp expOp :: Floating a => Op '[a] a -- | Op for the natural logarithm logOp :: Floating a => Op '[a] a -- | Op for square root sqrtOp :: Floating a => Op '[a] a -- | Op for exponentiation (**.) :: Floating a => Op '[a, a] a -- | Op for logBase logBaseOp :: Floating a => Op '[a, a] a -- | Op for sine sinOp :: Floating a => Op '[a] a -- | Op for cosine cosOp :: Floating a => Op '[a] a -- | Op for tangent tanOp :: Floating a => Op '[a] a -- | Op for arcsine asinOp :: Floating a => Op '[a] a -- | Op for arccosine acosOp :: Floating a => Op '[a] a -- | Op for arctangent atanOp :: Floating a => Op '[a] a -- | Op for hyperbolic sine sinhOp :: Floating a => Op '[a] a -- | Op for hyperbolic cosine coshOp :: Floating a => Op '[a] a -- | Op for hyperbolic tangent tanhOp :: Floating a => Op '[a] a -- | Op for hyperbolic arcsine asinhOp :: Floating a => Op '[a] a -- | Op for hyperbolic arccosine acoshOp :: Floating a => Op '[a] a -- | Op for hyperbolic arctangent atanhOp :: Floating a => Op '[a] a instance (Type.Class.Known.Known Data.Type.Length.Length as, Data.Type.Index.Every GHC.Num.Num as, GHC.Num.Num a) => GHC.Num.Num (Numeric.Backprop.Op.Op as a) instance (Type.Class.Known.Known Data.Type.Length.Length as, Data.Type.Index.Every GHC.Real.Fractional as, Data.Type.Index.Every GHC.Num.Num as, GHC.Real.Fractional a) => GHC.Real.Fractional (Numeric.Backprop.Op.Op as a) instance (Type.Class.Known.Known Data.Type.Length.Length as, Data.Type.Index.Every GHC.Float.Floating as, Data.Type.Index.Every GHC.Real.Fractional as, Data.Type.Index.Every GHC.Num.Num as, GHC.Float.Floating a) => GHC.Float.Floating (Numeric.Backprop.Op.Op as a) -- | Provides "explicit" versions of all of the functions in -- Numeric.Backprop. Instead of relying on a Backprop -- instance, allows you to manually provide zero, add, and -- one on a per-value basis. -- -- It is recommended you use Backprop or Num instead, -- unless your type has no Num instance, or you else you want to -- avoid defining orphan Backprop instances for external types. -- Can also be useful if mixing and matching styles. -- -- See Numeric.Backprop for fuller documentation on using these -- functions. module Numeric.Backprop.Explicit -- | A BVar s a is a value of type a that can be -- "backpropagated". -- -- Functions referring to BVars are tracked by the library and can -- be automatically differentiated to get their gradients and results. -- -- For simple numeric values, you can use its Num, -- Fractional, and Floating instances to manipulate them as -- if they were the numbers they represent. -- -- If a contains items, the items can be accessed and extracted -- using lenses. A Lens' b a can be used to access an -- a inside a b, using ^^. (viewVar): -- --
-- (^.) :: a -> Lens' a b -> b -- (^^.) :: BVar s a -> Lens' a b -> BVar s b ---- -- There is also ^^? (previewVar), to use a -- Prism' or Traversal' to extract a target that may or -- may not be present (which can implement pattern matching), -- ^^.. (toListOfVar) to use a Traversal' to -- extract all targets inside a BVar, and .~~ -- (setVar) to set and update values inside a BVar. -- -- If you have control over your data type definitions, you can also use -- splitBV and joinBV to manipulate data types by easily -- extracting fields out of a BVar of data types and creating -- BVars of data types out of BVars of their fields. See -- Numeric.Backprop#hkd for a tutorial on this use pattern. -- -- For more complex operations, libraries can provide functions on -- BVars using liftOp and related functions. This is how -- you can create primitive functions that users can use to manipulate -- your library's values. See -- https://github.com/mstksg/backprop/wiki/Equipping-your-Library-with-Backprop -- for a detailed guide. -- -- For example, the hmatrix library has a matrix-vector -- multiplication function, #> :: L m n -> R n -> L m. -- -- A library could instead provide a function #> :: BVar (L -- m n) -> BVar (R n) -> BVar (R m), which the user can then -- use to manipulate their BVars of L m ns and R -- ns, etc. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. data BVar s a -- | An ephemeral Wengert Tape in the environment. Used internally to track -- of the computational graph of variables. -- -- For the end user, one can just imagine Reifies s -- W as a required constraint on s that allows -- backpropagation to work. data W -- | Class of values that can be backpropagated in general. -- -- For instances of Num, these methods can be given by -- zeroNum, addNum, and oneNum. There are also -- generic options given in Numeric.Backprop.Class for functors, -- IsList instances, and Generic instances. -- --
-- instance Backprop Double where -- zero = zeroNum -- add = addNum -- one = oneNum ---- -- If you leave the body of an instance declaration blank, GHC Generics -- will be used to derive instances if the type has a single constructor -- and each field is an instance of Backprop. -- -- To ensure that backpropagation works in a sound way, should obey the -- laws: -- --
-- Op '[Int, Bool] Double ---- -- is a function from an Int and a Bool, returning a -- Double. It can be differentiated to give a gradient of -- an Int and a Bool if given a total derivative for the -- Double. If we call Bool <math>, then, -- mathematically, it is akin to a: -- -- <math> -- -- See runOp, gradOp, and gradOpWith for examples on -- how to run it, and Op for instructions on creating it. -- -- It is simpler to not use this type constructor directly, and instead -- use the op2, op1, op2, and op3 helper -- smart constructors. -- -- See Numeric.Backprop.Op#prod for a mini-tutorial on using -- Prod and Tuple. -- -- To use an Op with the backprop library, see -- liftOp, liftOp1, liftOp2, and -- liftOp3. newtype Op as a -- | Construct an Op by giving a function creating the result, and -- also a continuation on how to create the gradient, given the total -- derivative of a. -- -- See the module documentation for Numeric.Backprop.Op for more -- details on the function that this constructor and Op expect. Op :: (Tuple as -> (a, a -> Tuple as)) -> Op as a -- | Run the function that the Op encodes, returning a continuation -- to compute the gradient, given the total derivative of a. See -- documentation for Numeric.Backprop.Op for more information. [runOpWith] :: Op as a -> Tuple as -> (a, a -> Tuple as) -- | Create an Op that takes no inputs and always returns the given -- value. -- -- There is no gradient, of course (using gradOp will give you an -- empty tuple), because there is no input to have a gradient of. -- --
-- >>> runOp (op0 10) Ø -- (10, Ø) ---- -- For a constant Op that takes input and ignores it, see -- opConst and opConst'. op0 :: a -> Op '[] a -- | An Op that ignores all of its inputs and returns a given -- constant value. -- --
-- >>> gradOp' (opConst 10) (1 ::< 2 ::< 3 ::< Ø) -- (10, 0 ::< 0 ::< 0 ::< Ø) --opConst :: (Every Num as, Known Length as) => a -> Op as a -- | An Op that just returns whatever it receives. The identity -- function. -- --
-- idOp = opIso id id --idOp :: Op '[a] a -- | A version of opConst taking explicit Length, indicating -- the number of inputs and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use opConst. opConst' :: Every Num as => Length as -> a -> Op as a -- | Create an Op of a function taking one input, by giving its -- explicit derivative. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the derivative <math>, it would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that squares its input: -- --
-- square :: Num a => Op '[a] a -- square = op1 $ \x -> (x*x, \d -> 2 * d * x -- ) ---- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op1 :: (a -> (b, b -> a)) -> Op '[a] b -- | Create an Op of a function taking two inputs, by giving its -- explicit gradient. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the gradient <math> would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that multiplies its inputs: -- --
-- mul :: Num a => Op '[a, a] a -- mul = op2' $ \x y -> (x*y, \d -> (d*y, x*d) -- ) ---- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c -- | Create an Op of a function taking three inputs, by giving its -- explicit gradient. See documentation for op2 for more details. op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d -- | An Op that coerces an item into another item whose type has the -- same runtime representation. -- --
-- >>> gradOp' opCoerce (Identity 5) :: (Int, Identity Int) -- (5, Identity 1) ---- --
-- opCoerce = opIso coerced coerce --opCoerce :: Coercible a b => Op '[a] b -- | An Op that takes as and returns exactly the input -- tuple. -- --
-- >>> gradOp' opTup (1 ::< 2 ::< 3 ::< Ø) -- (1 ::< 2 ::< 3 ::< Ø, 1 ::< 1 ::< 1 ::< Ø) --opTup :: Op as (Tuple as) -- | An Op that runs the input value through an isomorphism. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. opIso :: (a -> b) -> (b -> a) -> Op '[a] b -- | An Op that runs the input value through an isomorphism between -- a tuple of values and a value. See opIso for caveats. -- -- In Numeric.Backprop.Op since version 0.1.2.0, but only exported -- from Numeric.Backprop since version 0.1.3.0. opIsoN :: (Tuple as -> b) -> (b -> Tuple as) -> Op as b -- | An Op that extracts a value from an input value using a -- Lens'. -- -- Warning: This is unsafe! It assumes that it extracts a specific value -- unchanged, with derivative 1, so will break for things that -- numerically manipulate things before returning them. opLens :: Num a => Lens' a b -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp1, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad1 :: (a -> b) -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad :: (Tuple as -> b) -> Op as b data Prod k (f :: k -> *) (a :: [k]) :: forall k. () => (k -> *) -> [k] -> * [Ø] :: Prod k f [] k [:<] :: Prod k f (:) k a1 as -- | Construct a two element Prod. Since the precedence of (:>) is -- higher than (:<), we can conveniently write lists like: -- --
-- >>> a :< b :> c ---- -- Which is identical to: -- --
-- >>> a :< b :< c :< Ø --infix 6 :> -- | Build a singleton Prod. only :: () => f a -> Prod k f (:) k a [] k head' :: () => Prod k f (:<) k a as -> f a -- | A Prod of simple Haskell types. type Tuple = Prod * I -- | Cons onto a Tuple. infixr 5 ::< -- | Singleton Tuple. only_ :: () => a -> Tuple (:) * a [] * newtype I a :: * -> * I :: a -> I a [getI] :: I a -> a class Reifies k (s :: k) a | s -> a instance Numeric.Backprop.Explicit.BVGroup s '[] (GHC.Generics.K1 i a) (GHC.Generics.K1 i (Numeric.Backprop.Internal.BVar s a)) instance Numeric.Backprop.Explicit.BVGroup s as i o => Numeric.Backprop.Explicit.BVGroup s as (GHC.Generics.M1 p c i) (GHC.Generics.M1 p c o) instance Numeric.Backprop.Explicit.BVGroup s '[] GHC.Generics.V1 GHC.Generics.V1 instance Numeric.Backprop.Explicit.BVGroup s '[] GHC.Generics.U1 GHC.Generics.U1 instance (Data.Reflection.Reifies s Numeric.Backprop.Internal.W, Numeric.Backprop.Explicit.BVGroup s as i1 o1, Numeric.Backprop.Explicit.BVGroup s bs i2 o2, cs ~ (as Type.Family.List.++ bs), Type.Class.Known.Known Data.Type.Length.Length as) => Numeric.Backprop.Explicit.BVGroup s (i1 () : i2 () : cs) (i1 GHC.Generics.:*: i2) (o1 GHC.Generics.:*: o2) instance (Data.Reflection.Reifies s Numeric.Backprop.Internal.W, Numeric.Backprop.Explicit.BVGroup s as i1 o1, Numeric.Backprop.Explicit.BVGroup s bs i2 o2, cs ~ (as Type.Family.List.++ bs), Type.Class.Known.Known Data.Type.Length.Length as) => Numeric.Backprop.Explicit.BVGroup s (i1 () : i2 () : cs) (i1 GHC.Generics.:+: i2) (o1 GHC.Generics.:+: o2) -- | Provides the exact same API as Numeric.Backprop, except -- requiring Num instances for all types involved instead of -- Backprop instances. -- -- This was the original API of the library (for version 0.1). -- -- Num is strictly more powerful than Backprop, and is a -- stronger constraint on types than is necessary for proper -- backpropagating. In particular, fromInteger is a problem for -- many types, preventing useful backpropagation for lists, -- variable-length vectors (like Data.Vector) and variable-size -- matrices from linear algebra libraries like hmatrix and -- accelerate. -- -- However, this module might be useful in situations where you are -- working with external types with Num instances, and you want to -- avoid writing orphan instances for external types. -- -- If you have external types that are not Num instances, consider -- instead Numeric.Backprop.External. -- -- If you need a Num instance for tuples, you can use the -- canonical 2- and 3-tuples for the library in -- Numeric.Backprop.Tuple. If you need one for larger tuples, -- consider making a custom product type instead (making Num instances -- with something like -- <https://hackage.haskell.org/package/one-liner-instances -- one-liner-instances>). You can also use the orphan instances in the -- NumInstances package (in particular, -- Data.NumInstances.Tuple) if you are writing an application and -- do not have to worry about orphan instances. -- -- See Numeric.Backprop for fuller documentation on using these -- functions. module Numeric.Backprop.Num -- | A BVar s a is a value of type a that can be -- "backpropagated". -- -- Functions referring to BVars are tracked by the library and can -- be automatically differentiated to get their gradients and results. -- -- For simple numeric values, you can use its Num, -- Fractional, and Floating instances to manipulate them as -- if they were the numbers they represent. -- -- If a contains items, the items can be accessed and extracted -- using lenses. A Lens' b a can be used to access an -- a inside a b, using ^^. (viewVar): -- --
-- (^.) :: a -> Lens' a b -> b -- (^^.) :: BVar s a -> Lens' a b -> BVar s b ---- -- There is also ^^? (previewVar), to use a -- Prism' or Traversal' to extract a target that may or -- may not be present (which can implement pattern matching), -- ^^.. (toListOfVar) to use a Traversal' to -- extract all targets inside a BVar, and .~~ -- (setVar) to set and update values inside a BVar. -- -- If you have control over your data type definitions, you can also use -- splitBV and joinBV to manipulate data types by easily -- extracting fields out of a BVar of data types and creating -- BVars of data types out of BVars of their fields. See -- Numeric.Backprop#hkd for a tutorial on this use pattern. -- -- For more complex operations, libraries can provide functions on -- BVars using liftOp and related functions. This is how -- you can create primitive functions that users can use to manipulate -- your library's values. See -- https://github.com/mstksg/backprop/wiki/Equipping-your-Library-with-Backprop -- for a detailed guide. -- -- For example, the hmatrix library has a matrix-vector -- multiplication function, #> :: L m n -> R n -> L m. -- -- A library could instead provide a function #> :: BVar (L -- m n) -> BVar (R n) -> BVar (R m), which the user can then -- use to manipulate their BVars of L m ns and R -- ns, etc. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. data BVar s a -- | An ephemeral Wengert Tape in the environment. Used internally to track -- of the computational graph of variables. -- -- For the end user, one can just imagine Reifies s -- W as a required constraint on s that allows -- backpropagation to work. data W -- | backprop, but with Num constraints instead of -- Backprop constraints. -- -- See module documentation for Numeric.Backprop.Num for -- information on using this with tuples. backprop :: (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, a) -- | Turn a function BVar s a -> BVar s b into -- the function a -> b that it represents. -- -- Benchmarks show that this should have virtually no overhead over -- directly writing a a -> b. BVar is, in this -- situation, a zero-cost abstraction, performance-wise. -- -- See documentation of backprop for more information. evalBP :: (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> b -- | gradBP, but with Num constraints instead of -- Backprop constraints. gradBP :: (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> a -- | backpropWith, but with Num constraints instead of -- Backprop constraints. -- -- See module documentation for Numeric.Backprop.Num for -- information on using this with tuples. backpropWith :: Num a => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b -> b) -> (b, a) -- | evalBP but with no arguments. Useful when everything is just -- given through constVar. evalBP0 :: (forall s. Reifies s W => BVar s a) -> a -- | backprop2, but with Num constraints instead of -- Backprop constraints. backprop2 :: (Num a, Num b, Num c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, (a, b)) -- | evalBP for a two-argument function. See backprop2 for -- notes. evalBP2 :: (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> c -- | gradBP2, but with Num constraints instead of -- Backprop constraints. gradBP2 :: (Num a, Num b, Num c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (a, b) -- | backpropWith2, but with Num constraints instead of -- Backprop constraints. backpropWith2 :: (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c -> c) -> (c, (a, b)) -- | backpropN, but with Num constraints instead of -- Backprop constraints. -- -- The Every Num as in the constraint says that -- every value in the type-level list as must have a Num -- instance. This means you can use, say, '[Double, Float, Int], -- but not '[Double, Bool, String]. -- -- If you stick to concerete, monomorphic usage of this (with -- specific types, typed into source code, known at compile-time), then -- Every Num as should be fulfilled automatically. backpropN :: (Every Num as, Known Length as, Num b) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> (b, Tuple as) -- | evalBP generalized to multiple inputs of different types. See -- documentation for backpropN for more details. evalBPN :: forall as b. () => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> b -- | gradBPN, but with Num constraints instead of -- Backprop constraints. gradBPN :: (Every Num as, Known Length as, Num b) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> Tuple as -- | backpropWithN, but with Num constraints instead of -- Backprop constraints. -- -- See backpropN for information on the Every constraint. backpropWithN :: (Every Num as, Known Length as) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> (b -> b) -> (b, Tuple as) class EveryC k c as => Every k (c :: k -> Constraint) (as :: [k]) -- | Lift a value into a BVar representing a constant value. -- -- This value will not be considered an input, and its gradients will not -- be backpropagated. constVar :: a -> BVar s a -- | Shorter alias for constVar, inspired by the ad library. auto :: a -> BVar s a -- | Coerce a BVar contents. Useful for things like newtype -- wrappers. coerceVar :: Coercible a b => BVar s a -> BVar s b -- | ^^., but with Num constraints instead of -- Backprop constraints. (^^.) :: forall b a s. (Num a, Reifies s W) => BVar s b -> Lens' b a -> BVar s a infixl 8 ^^. -- | .~~, but with Num constraints instead of -- Backprop constraints. (.~~) :: (Num a, Num b, Reifies s W) => Lens' b a -> BVar s a -> BVar s b -> BVar s b infixl 8 .~~ -- | ^^?, but with Num constraints instead of -- Backprop constraints. -- -- Note that many automatically-generated prisms by the lens -- package use tuples, which cannot work this this by default (because -- tuples do not have a Num instance). -- -- If you are writing an application or don't have to worry about orphan -- instances, you can pull in the orphan instances from -- NumInstances. Alternatively, you can chain those prisms with -- conversions to the anonymous canonical strict tuple types in -- Numeric.Backprop.Tuple, which do have Num instances. -- --
-- myPrism :: Prism' c (a, b) -- myPrism . iso tupT2 t2Tup :: Prism' c (T2 a b) --(^^?) :: forall b a s. (Num a, Reifies s W) => BVar s b -> Traversal' b a -> Maybe (BVar s a) -- | ^^.., but with Num constraints instead of -- Backprop constraints. (^^..) :: forall b a s. (Num a, Reifies s W) => BVar s b -> Traversal' b a -> [BVar s a] -- | ^^?!, but with Num constraints instead of -- Backprop constraints. -- -- Like ^^?!, is *UNSAFE*. (^^?!) :: forall b a s. (Num a, Reifies s W) => BVar s b -> Traversal' b a -> BVar s a -- | viewVar, but with Num constraints instead of -- Backprop constraints. viewVar :: forall b a s. (Num a, Reifies s W) => Lens' b a -> BVar s b -> BVar s a -- | setVar, but with Num constraints instead of -- Backprop constraints. setVar :: forall a b s. (Num a, Num b, Reifies s W) => Lens' b a -> BVar s a -> BVar s b -> BVar s b -- | sequenceVar, but with Num constraints instead of -- Backprop constraints. sequenceVar :: (Traversable t, Num a, Reifies s W) => BVar s (t a) -> t (BVar s a) -- | collectVar, but with Num constraints instead of -- Backprop constraints. -- -- If you are using a list or vector, I recommend using -- vector-sized instead: it's a fixed-length vector type with a -- very appropriate Num instance! collectVar :: (Foldable t, Functor t, Num a, Num (t a), Reifies s W) => t (BVar s a) -> BVar s (t a) -- | previewVar, but with Num constraints instead of -- Backprop constraints. -- -- See documentation for ^^? for more information and important -- notes. previewVar :: forall b a s. (Num a, Reifies s W) => Traversal' b a -> BVar s b -> Maybe (BVar s a) -- | toListOfVar, but with Num constraints instead of -- Backprop constraints. toListOfVar :: forall b a s. (Num a, Reifies s W) => Traversal' b a -> BVar s b -> [BVar s a] -- | isoVar, but with Num constraints instead of -- Backprop constraints. isoVar :: (Num a, Num b, Reifies s W) => (a -> b) -> (b -> a) -> BVar s a -> BVar s b -- | isoVar, but with Num constraints instead of -- Backprop constraints. isoVar2 :: (Num a, Num b, Num c, Reifies s W) => (a -> b -> c) -> (c -> (a, b)) -> BVar s a -> BVar s b -> BVar s c -- | isoVar3, but with Num constraints instead of -- Backprop constraints. isoVar3 :: (Num a, Num b, Num c, Num d, Reifies s W) => (a -> b -> c -> d) -> (d -> (a, b, c)) -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | isoVarN, but with Num constraints instead of -- Backprop constraints. isoVarN :: (Every Num as, Known Length as, Num b, Reifies s W) => (Tuple as -> b) -> (b -> Tuple as) -> Prod (BVar s) as -> BVar s b -- | liftOp, but with Num constraints instead of -- Backprop constraints. liftOp :: (Every Num as, Known Length as, Num b, Reifies s W) => Op as b -> Prod (BVar s) as -> BVar s b -- | liftOp1, but with Num constraints instead of -- Backprop constraints. liftOp1 :: (Num a, Num b, Reifies s W) => Op '[a] b -> BVar s a -> BVar s b -- | liftOp2, but with Num constraints instead of -- Backprop constraints. liftOp2 :: (Num a, Num b, Num c, Reifies s W) => Op '[a, b] c -> BVar s a -> BVar s b -> BVar s c -- | liftOp3, but with Num constraints instead of -- Backprop constraints. liftOp3 :: (Num a, Num b, Num c, Num d, Reifies s W) => Op '[a, b, c] d -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | An Op as a describes a differentiable function from -- as to a. -- -- For example, a value of type -- --
-- Op '[Int, Bool] Double ---- -- is a function from an Int and a Bool, returning a -- Double. It can be differentiated to give a gradient of -- an Int and a Bool if given a total derivative for the -- Double. If we call Bool <math>, then, -- mathematically, it is akin to a: -- -- <math> -- -- See runOp, gradOp, and gradOpWith for examples on -- how to run it, and Op for instructions on creating it. -- -- It is simpler to not use this type constructor directly, and instead -- use the op2, op1, op2, and op3 helper -- smart constructors. -- -- See Numeric.Backprop.Op#prod for a mini-tutorial on using -- Prod and Tuple. -- -- To use an Op with the backprop library, see -- liftOp, liftOp1, liftOp2, and -- liftOp3. newtype Op as a -- | Construct an Op by giving a function creating the result, and -- also a continuation on how to create the gradient, given the total -- derivative of a. -- -- See the module documentation for Numeric.Backprop.Op for more -- details on the function that this constructor and Op expect. Op :: (Tuple as -> (a, a -> Tuple as)) -> Op as a -- | Run the function that the Op encodes, returning a continuation -- to compute the gradient, given the total derivative of a. See -- documentation for Numeric.Backprop.Op for more information. [runOpWith] :: Op as a -> Tuple as -> (a, a -> Tuple as) -- | Create an Op that takes no inputs and always returns the given -- value. -- -- There is no gradient, of course (using gradOp will give you an -- empty tuple), because there is no input to have a gradient of. -- --
-- >>> runOp (op0 10) Ø -- (10, Ø) ---- -- For a constant Op that takes input and ignores it, see -- opConst and opConst'. op0 :: a -> Op '[] a -- | An Op that ignores all of its inputs and returns a given -- constant value. -- --
-- >>> gradOp' (opConst 10) (1 ::< 2 ::< 3 ::< Ø) -- (10, 0 ::< 0 ::< 0 ::< Ø) --opConst :: (Every Num as, Known Length as) => a -> Op as a -- | An Op that just returns whatever it receives. The identity -- function. -- --
-- idOp = opIso id id --idOp :: Op '[a] a -- | A version of opConst taking explicit Length, indicating -- the number of inputs and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use opConst. opConst' :: Every Num as => Length as -> a -> Op as a -- | Create an Op of a function taking one input, by giving its -- explicit derivative. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the derivative <math>, it would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that squares its input: -- --
-- square :: Num a => Op '[a] a -- square = op1 $ \x -> (x*x, \d -> 2 * d * x -- ) ---- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op1 :: (a -> (b, b -> a)) -> Op '[a] b -- | Create an Op of a function taking two inputs, by giving its -- explicit gradient. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the gradient <math> would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that multiplies its inputs: -- --
-- mul :: Num a => Op '[a, a] a -- mul = op2' $ \x y -> (x*y, \d -> (d*y, x*d) -- ) ---- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c -- | Create an Op of a function taking three inputs, by giving its -- explicit gradient. See documentation for op2 for more details. op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d -- | An Op that coerces an item into another item whose type has the -- same runtime representation. -- --
-- >>> gradOp' opCoerce (Identity 5) :: (Int, Identity Int) -- (5, Identity 1) ---- --
-- opCoerce = opIso coerced coerce --opCoerce :: Coercible a b => Op '[a] b -- | An Op that takes as and returns exactly the input -- tuple. -- --
-- >>> gradOp' opTup (1 ::< 2 ::< 3 ::< Ø) -- (1 ::< 2 ::< 3 ::< Ø, 1 ::< 1 ::< 1 ::< Ø) --opTup :: Op as (Tuple as) -- | An Op that runs the input value through an isomorphism. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. opIso :: (a -> b) -> (b -> a) -> Op '[a] b -- | An Op that runs the input value through an isomorphism between -- a tuple of values and a value. See opIso for caveats. -- -- In Numeric.Backprop.Op since version 0.1.2.0, but only exported -- from Numeric.Backprop since version 0.1.3.0. opIsoN :: (Tuple as -> b) -> (b -> Tuple as) -> Op as b -- | An Op that extracts a value from an input value using a -- Lens'. -- -- Warning: This is unsafe! It assumes that it extracts a specific value -- unchanged, with derivative 1, so will break for things that -- numerically manipulate things before returning them. opLens :: Num a => Lens' a b -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp1, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad1 :: (a -> b) -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad :: (Tuple as -> b) -> Op as b data Prod k (f :: k -> *) (a :: [k]) :: forall k. () => (k -> *) -> [k] -> * [Ø] :: Prod k f [] k [:<] :: Prod k f (:) k a1 as -- | Construct a two element Prod. Since the precedence of (:>) is -- higher than (:<), we can conveniently write lists like: -- --
-- >>> a :< b :> c ---- -- Which is identical to: -- --
-- >>> a :< b :< c :< Ø --infix 6 :> -- | Build a singleton Prod. only :: () => f a -> Prod k f (:) k a [] k head' :: () => Prod k f (:<) k a as -> f a -- | A Prod of simple Haskell types. type Tuple = Prod * I -- | Cons onto a Tuple. infixr 5 ::< -- | Singleton Tuple. only_ :: () => a -> Tuple (:) * a [] * newtype I a :: * -> * I :: a -> I a [getI] :: I a -> a class Reifies k (s :: k) a | s -> a -- | Automatic differentation and backpropagation. -- -- Main idea: Write a function computing what you want, and the library -- automatically provies the gradient of that function as well, for usage -- with gradient descent and other training methods. -- -- In more detail: instead of working directly with values to produce -- your result, you work with BVars containing those values. -- Working with these BVars is made smooth with the usage of -- lenses and other combinators, and libraries can offer operatons on -- BVars instead of those on normal types directly. -- -- Then, you can use: -- --
-- evalBP :: (forall s. Reifies s W. BVar s a -> BVar s b) -> (a -> b) ---- -- to turn a BVar function into the function on actual values -- a -> b. This has virtually zero overhead over writing the -- actual function directly. -- -- Then, there's: -- --
-- gradBP :: (forall s. Reifies s W. BVar s a -> BVar s b) -> (a -> a) ---- -- to automatically get the gradient, as well, for a given input. -- -- See the README for more information and links to demonstrations -- and tutorials, or dive striaght in by reading the docs for -- BVar. -- -- If you are writing a library, see -- https://github.com/mstksg/backprop/wiki/Equipping-your-Library-with-Backprop -- for a guide for equipping your library with backpropatable operations. -- -- In the original version 0.1, this module required Num instances -- for methods instead of Backprop instances. This interface is -- still available in Numeric.Backprop.Num, which has the same API -- as this module, except with Num constraints on all values -- instead of Backprop constraints. -- -- See Prelude.Backprop.Explicit for a version allowing you to -- provide zero, add, and one explicitly, which can -- be useful when attempting to avoid orphan instances or when mixing -- both Backprop and Num styles. module Numeric.Backprop -- | A BVar s a is a value of type a that can be -- "backpropagated". -- -- Functions referring to BVars are tracked by the library and can -- be automatically differentiated to get their gradients and results. -- -- For simple numeric values, you can use its Num, -- Fractional, and Floating instances to manipulate them as -- if they were the numbers they represent. -- -- If a contains items, the items can be accessed and extracted -- using lenses. A Lens' b a can be used to access an -- a inside a b, using ^^. (viewVar): -- --
-- (^.) :: a -> Lens' a b -> b -- (^^.) :: BVar s a -> Lens' a b -> BVar s b ---- -- There is also ^^? (previewVar), to use a -- Prism' or Traversal' to extract a target that may or -- may not be present (which can implement pattern matching), -- ^^.. (toListOfVar) to use a Traversal' to -- extract all targets inside a BVar, and .~~ -- (setVar) to set and update values inside a BVar. -- -- If you have control over your data type definitions, you can also use -- splitBV and joinBV to manipulate data types by easily -- extracting fields out of a BVar of data types and creating -- BVars of data types out of BVars of their fields. See -- Numeric.Backprop#hkd for a tutorial on this use pattern. -- -- For more complex operations, libraries can provide functions on -- BVars using liftOp and related functions. This is how -- you can create primitive functions that users can use to manipulate -- your library's values. See -- https://github.com/mstksg/backprop/wiki/Equipping-your-Library-with-Backprop -- for a detailed guide. -- -- For example, the hmatrix library has a matrix-vector -- multiplication function, #> :: L m n -> R n -> L m. -- -- A library could instead provide a function #> :: BVar (L -- m n) -> BVar (R n) -> BVar (R m), which the user can then -- use to manipulate their BVars of L m ns and R -- ns, etc. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. data BVar s a -- | An ephemeral Wengert Tape in the environment. Used internally to track -- of the computational graph of variables. -- -- For the end user, one can just imagine Reifies s -- W as a required constraint on s that allows -- backpropagation to work. data W -- | Class of values that can be backpropagated in general. -- -- For instances of Num, these methods can be given by -- zeroNum, addNum, and oneNum. There are also -- generic options given in Numeric.Backprop.Class for functors, -- IsList instances, and Generic instances. -- --
-- instance Backprop Double where -- zero = zeroNum -- add = addNum -- one = oneNum ---- -- If you leave the body of an instance declaration blank, GHC Generics -- will be used to derive instances if the type has a single constructor -- and each field is an instance of Backprop. -- -- To ensure that backpropagation works in a sound way, should obey the -- laws: -- --
-- x ^. myLens ---- -- would extract a piece of x :: b, specified by myLens :: -- Lens' b a. The result has type a. -- --
-- xVar ^^. myLens ---- -- would extract a piece out of xVar :: BVar s b (a -- BVar holding a b), specified by myLens :: Lens' b -- a. The result has type BVar s a (a BVar -- holding a a) -- -- This is the main way to pull out values from BVar of container -- types. -- -- If you have control of your data type definitions, consider using -- splitBV, which lets you break out BVars of values into -- BVars of their individual fields automatically without -- requiring lenses. -- -- WARNING: Do not use with any lenses that operate "numerically" -- on the contents (like multiplying). (^^.) :: forall b a s. (Backprop a, Reifies s W) => BVar s b -> Lens' b a -> BVar s a infixl 8 ^^. -- | An infix version of setVar, meant to evoke parallels to -- .~ from lens. -- -- With normal values, you can set something in a value with a lens: a -- lens: -- --
-- x & myLens .~ y ---- -- would "set" a part of x :: b, specified by myLens :: -- Lens' a b, to a new value y :: a. -- --
-- xVar & myLens .~~ yVar ---- -- would "set" a part of xVar :: BVar s b (a BVar -- holding a b), specified by myLens :: Lens' a -- b, to a new value given by yVar :: BVar s a. The -- result is a new (updated) value of type BVar s b. -- -- This is the main way to set values inside BVars of container -- types. (.~~) :: (Backprop a, Backprop b, Reifies s W) => Lens' b a -> BVar s a -> BVar s b -> BVar s b infixl 8 .~~ -- | An infix version of previewVar, meant to evoke parallels to -- ^? from lens. -- -- With normal values, you can (potentially) extract something from that -- value with a lens: -- --
-- x ^? myPrism ---- -- would (potentially) extract a piece of x :: b, specified by -- myPrism :: Traversal' b a. The result has type -- Maybe a. -- --
-- xVar ^^? myPrism ---- -- would (potentially) extract a piece out of xVar :: BVar s -- b (a BVar holding a b), specified by myPrism -- :: Prism' b a. The result has type Maybe (BVar -- s a) (Maybe a BVar holding a a). -- -- This is intended to be used with Prism's (which hits at most -- one target), but will actually work with any Traversal'. -- If the traversal hits more than one target, the first one found will -- be extracted. -- -- This can be used to "pattern match" on BVars, by using prisms -- on constructors. (^^?) :: forall b a s. (Backprop a, Reifies s W) => BVar s b -> Traversal' b a -> Maybe (BVar s a) -- | An infix version of toListOfVar, meant to evoke parallels to -- ^.. from lens. -- -- With normal values, you can extract all targets of a Traversal -- from that value with a: -- --
-- x ^.. myTraversal ---- -- would extract all targets inside of x :: b, specified by -- myTraversal :: Traversal' b a. The result has type -- [a]. -- --
-- xVar ^^.. myTraversal ---- -- would extract all targets inside of xVar :: BVar s b -- (a BVar holding a b), specified by myTraversal :: -- Traversal' b a. The result has type [BVar s a] (A -- list of BVars holding as). (^^..) :: forall b a s. (Backprop a, Reifies s W) => BVar s b -> Traversal' b a -> [BVar s a] -- | An *UNSAFE* version of previewVar assuming that it is there. -- -- Is undefined if the Traversal hits no targets. -- -- Is essentially ^^? with fromJust, or ^^.. with -- head. (^^?!) :: forall b a s. (Backprop a, Reifies s W) => BVar s b -> Traversal' b a -> BVar s a -- | Using a Lens', extract a value inside a BVar. -- Meant to evoke parallels to view from lens. -- -- If you have control of your data type definitions, consider using -- splitBV, which lets you break out BVars of values into -- BVars of their individual fields automatically without -- requiring lenses. -- -- See documentation for ^^. for more information. viewVar :: forall a b s. (Backprop a, Reifies s W) => Lens' b a -> BVar s b -> BVar s a -- | Using a Lens', set a value inside a BVar. Meant -- to evoke parallels to "set" from lens. -- -- See documentation for .~~ for more information. setVar :: (Backprop a, Backprop b, Reifies s W) => Lens' b a -> BVar s a -> BVar s b -> BVar s b -- | Extract all of the BVars out of a Traversable container -- of BVars. -- -- Note that this associates gradients in order of occurrence in the -- original data structure; the second item in the gradient is assumed to -- correspond with the second item in the input, etc.; this can cause -- unexpected behavior in Foldable instances that don't have a -- fixed number of items. sequenceVar :: (Traversable t, Backprop a, Reifies s W) => BVar s (t a) -> t (BVar s a) -- | Collect all of the BVars in a container into a BVar of -- that container's contents. -- -- Note that this associates gradients in order of occurrence in the -- original data structure; the second item in the total derivative and -- gradient is assumed to correspond with the second item in the input, -- etc.; this can cause unexpected behavior in Foldable instances -- that don't have a fixed number of items. collectVar :: (Foldable t, Functor t, Backprop a, Backprop (t a), Reifies s W) => t (BVar s a) -> BVar s (t a) -- | Using a Traversal', extract a single value inside a -- BVar, if it exists. If more than one traversal target exists, -- returns te first. Meant to evoke parallels to preview from -- lens. Really only intended to be used wth Prism's, or -- up-to-one target traversals. -- -- See documentation for ^^? for more information. previewVar :: forall b a s. (Backprop a, Reifies s W) => Traversal' b a -> BVar s b -> Maybe (BVar s a) -- | Using a Traversal', extract all targeted values inside a -- BVar. Meant to evoke parallels to toListOf from lens. -- -- See documentation for ^^.. for more information. toListOfVar :: forall b a s. (Backprop a, Reifies s W) => Traversal' b a -> BVar s b -> [BVar s a] -- | Useful pattern for constructing and deconstructing BVars of -- two-tuples. -- | Useful pattern for constructing and deconstructing BVars -- three-tuples. -- | Convert the value inside a BVar using a given isomorphism. -- Useful for things like constructors. -- -- If you have control of your data type definitions, consider using -- joinBV, which lets you use your data type constructors -- themselves to join together BVars as their fields. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. isoVar :: (Backprop a, Backprop b, Reifies s W) => (a -> b) -> (b -> a) -> BVar s a -> BVar s b -- | Convert the values inside two BVars using a given isomorphism. -- Useful for things like constructors. See isoVar for caveats. -- -- If you have control of your data type definitions, consider using -- joinBV, which lets you use your data type constructors -- themselves to join together BVars as their fields. isoVar2 :: (Backprop a, Backprop b, Backprop c, Reifies s W) => (a -> b -> c) -> (c -> (a, b)) -> BVar s a -> BVar s b -> BVar s c -- | Convert the values inside three BVars using a given -- isomorphism. Useful for things like constructors. See isoVar -- for caveats. isoVar3 :: (Backprop a, Backprop b, Backprop c, Backprop d, Reifies s W) => (a -> b -> c -> d) -> (d -> (a, b, c)) -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | Convert the values inside a tuple of BVars using a given -- isomorphism. Useful for things like constructors. See isoVar -- for caveats. -- -- If you have control of your data type definitions, consider using -- joinBV, which lets you use your data type constructors -- themselves to join together BVars as their fields. isoVarN :: (Every Backprop as, Known Length as, Backprop b, Reifies s W) => (Tuple as -> b) -> (b -> Tuple as) -> Prod (BVar s) as -> BVar s b -- | Lift an Op with an arbitrary number of inputs to a function on -- the appropriate number of BVars. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information, and -- Numeric.Backprop.Op#prod for a mini-tutorial on using -- Prod and Tuple. liftOp :: (Every Backprop as, Known Length as, Backprop b, Reifies s W) => Op as b -> Prod (BVar s) as -> BVar s b -- | Lift an Op with a single input to be a function on a single -- BVar. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. liftOp1 :: (Backprop a, Backprop b, Reifies s W) => Op '[a] b -> BVar s a -> BVar s b -- | Lift an Op with two inputs to be a function on a two -- BVars. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. liftOp2 :: (Backprop a, Backprop b, Backprop c, Reifies s W) => Op '[a, b] c -> BVar s a -> BVar s b -> BVar s c -- | Lift an Op with three inputs to be a function on a three -- BVars. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. liftOp3 :: (Backprop a, Backprop b, Backprop c, Backprop d, Reifies s W) => Op '[a, b, c] d -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | Split out a BVar of "higher-kinded data type", a la -- http://reasonablypolymorphic.com/blog/higher-kinded-data/ -- -- Lets you take BVar of a value into a separate BVar of -- every field of that value. -- -- See Numeric.Backprop#hkd for a tutorial on usage. -- -- This will work with all data types made with a single constructor, -- whose fields are all instances of Backprop, where the type -- itself has an instance of Backprop. The type also must derive -- Generic. splitBV :: (Generic (z f), Generic (z (BVar s)), BVGroup s as (Rep (z f)) (Rep (z (BVar s))), Backprop (Rep (z f) ()), Every Backprop as, Known Length as, Reifies s W) => BVar s (z f) -> z (BVar s) -- | Split out a BVar of "higher-kinded data type", a la -- http://reasonablypolymorphic.com/blog/higher-kinded-data/ -- -- It lets you take a BVar of every field of a value, and join -- them into a BVar of that value. -- -- See Numeric.Backprop#hkd for a tutorial on usage. -- -- This will work with all data types made with a single constructor, -- whose fields are all instances of Backprop, where the type -- itself has an instance of Backprop. joinBV :: (Generic (z f), Generic (z (BVar s)), BVGroup s as (Rep (z f)) (Rep (z (BVar s))), Backprop (z f), Every Backprop as, Known Length as, Reifies s W) => z (BVar s) -> BVar s (z f) -- | Helper class for generically "splitting" and "joining" BVars -- into constructors. See splitBV and joinBV. -- -- See Numeric.Backprop#hkd for a tutorial on how to use this. -- -- Instances should be available for types made with one constructor -- whose fields are all instances of Backprop, with a -- Generic instance. class BVGroup s as i o | o -> i, i -> as -- | An Op as a describes a differentiable function from -- as to a. -- -- For example, a value of type -- --
-- Op '[Int, Bool] Double ---- -- is a function from an Int and a Bool, returning a -- Double. It can be differentiated to give a gradient of -- an Int and a Bool if given a total derivative for the -- Double. If we call Bool <math>, then, -- mathematically, it is akin to a: -- -- <math> -- -- See runOp, gradOp, and gradOpWith for examples on -- how to run it, and Op for instructions on creating it. -- -- It is simpler to not use this type constructor directly, and instead -- use the op2, op1, op2, and op3 helper -- smart constructors. -- -- See Numeric.Backprop.Op#prod for a mini-tutorial on using -- Prod and Tuple. -- -- To use an Op with the backprop library, see -- liftOp, liftOp1, liftOp2, and -- liftOp3. newtype Op as a -- | Construct an Op by giving a function creating the result, and -- also a continuation on how to create the gradient, given the total -- derivative of a. -- -- See the module documentation for Numeric.Backprop.Op for more -- details on the function that this constructor and Op expect. Op :: (Tuple as -> (a, a -> Tuple as)) -> Op as a -- | Run the function that the Op encodes, returning a continuation -- to compute the gradient, given the total derivative of a. See -- documentation for Numeric.Backprop.Op for more information. [runOpWith] :: Op as a -> Tuple as -> (a, a -> Tuple as) -- | Create an Op that takes no inputs and always returns the given -- value. -- -- There is no gradient, of course (using gradOp will give you an -- empty tuple), because there is no input to have a gradient of. -- --
-- >>> runOp (op0 10) Ø -- (10, Ø) ---- -- For a constant Op that takes input and ignores it, see -- opConst and opConst'. op0 :: a -> Op '[] a -- | An Op that ignores all of its inputs and returns a given -- constant value. -- --
-- >>> gradOp' (opConst 10) (1 ::< 2 ::< 3 ::< Ø) -- (10, 0 ::< 0 ::< 0 ::< Ø) --opConst :: (Every Num as, Known Length as) => a -> Op as a -- | An Op that just returns whatever it receives. The identity -- function. -- --
-- idOp = opIso id id --idOp :: Op '[a] a -- | A version of opConst taking explicit Length, indicating -- the number of inputs and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use opConst. opConst' :: Every Num as => Length as -> a -> Op as a -- | Create an Op of a function taking one input, by giving its -- explicit derivative. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the derivative <math>, it would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that squares its input: -- --
-- square :: Num a => Op '[a] a -- square = op1 $ \x -> (x*x, \d -> 2 * d * x -- ) ---- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op1 :: (a -> (b, b -> a)) -> Op '[a] b -- | Create an Op of a function taking two inputs, by giving its -- explicit gradient. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the gradient <math> would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that multiplies its inputs: -- --
-- mul :: Num a => Op '[a, a] a -- mul = op2' $ \x y -> (x*y, \d -> (d*y, x*d) -- ) ---- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c -- | Create an Op of a function taking three inputs, by giving its -- explicit gradient. See documentation for op2 for more details. op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d -- | An Op that coerces an item into another item whose type has the -- same runtime representation. -- --
-- >>> gradOp' opCoerce (Identity 5) :: (Int, Identity Int) -- (5, Identity 1) ---- --
-- opCoerce = opIso coerced coerce --opCoerce :: Coercible a b => Op '[a] b -- | An Op that takes as and returns exactly the input -- tuple. -- --
-- >>> gradOp' opTup (1 ::< 2 ::< 3 ::< Ø) -- (1 ::< 2 ::< 3 ::< Ø, 1 ::< 1 ::< 1 ::< Ø) --opTup :: Op as (Tuple as) -- | An Op that runs the input value through an isomorphism. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. opIso :: (a -> b) -> (b -> a) -> Op '[a] b -- | An Op that runs the input value through an isomorphism between -- a tuple of values and a value. See opIso for caveats. -- -- In Numeric.Backprop.Op since version 0.1.2.0, but only exported -- from Numeric.Backprop since version 0.1.3.0. opIsoN :: (Tuple as -> b) -> (b -> Tuple as) -> Op as b -- | An Op that extracts a value from an input value using a -- Lens'. -- -- Warning: This is unsafe! It assumes that it extracts a specific value -- unchanged, with derivative 1, so will break for things that -- numerically manipulate things before returning them. opLens :: Num a => Lens' a b -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp1, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad1 :: (a -> b) -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad :: (Tuple as -> b) -> Op as b data Prod k (f :: k -> *) (a :: [k]) :: forall k. () => (k -> *) -> [k] -> * [Ø] :: Prod k f [] k [:<] :: Prod k f (:) k a1 as -- | Construct a two element Prod. Since the precedence of (:>) is -- higher than (:<), we can conveniently write lists like: -- --
-- >>> a :< b :> c ---- -- Which is identical to: -- --
-- >>> a :< b :< c :< Ø --infix 6 :> -- | Build a singleton Prod. only :: () => f a -> Prod k f (:) k a [] k head' :: () => Prod k f (:<) k a as -> f a -- | A Prod of simple Haskell types. type Tuple = Prod * I -- | Cons onto a Tuple. infixr 5 ::< -- | Singleton Tuple. only_ :: () => a -> Tuple (:) * a [] * newtype I a :: * -> * I :: a -> I a [getI] :: I a -> a class Reifies k (s :: k) a | s -> a -- | Provides "explicit" versions of all of the functions in -- Prelude.Backprop. Instead of relying on a Backprop -- instance, allows you to manually provide zero, add, and -- one on a per-value basis. module Prelude.Backprop.Explicit -- | sum, but taking explicit add and zero. sum :: (Foldable t, Functor t, Num a, Reifies s W) => AddFunc (t a) -> ZeroFunc a -> BVar s (t a) -> BVar s a -- | product, but taking explicit add and zero. product :: (Foldable t, Functor t, Fractional a, Reifies s W) => AddFunc (t a) -> ZeroFunc a -> BVar s (t a) -> BVar s a -- | length, but taking explicit add and zero. length :: (Foldable t, Num b, Reifies s W) => AddFunc (t a) -> ZeroFunc (t a) -> ZeroFunc b -> BVar s (t a) -> BVar s b -- | minimum, but taking explicit add and zero. minimum :: (Foldable t, Functor t, Ord a, Reifies s W) => AddFunc (t a) -> ZeroFunc a -> BVar s (t a) -> BVar s a -- | maximum, but taking explicit add and zero. maximum :: (Foldable t, Functor t, Ord a, Reifies s W) => AddFunc (t a) -> ZeroFunc a -> BVar s (t a) -> BVar s a -- | traverse, but taking explicit add and zero. traverse :: (Traversable t, Applicative f, Foldable f, Reifies s W) => AddFunc a -> AddFunc b -> AddFunc (t b) -> ZeroFunc a -> ZeroFunc b -> ZeroFunc (t b) -> ZeroFunc (f (t b)) -> (BVar s a -> f (BVar s b)) -> BVar s (t a) -> BVar s (f (t b)) -- | length, but taking explicit add and zero. toList :: (Traversable t, Reifies s W) => AddFunc a -> ZeroFunc a -> BVar s (t a) -> [BVar s a] -- | mapAccumL, but taking explicit add and zero. mapAccumL :: (Traversable t, Reifies s W) => AddFunc b -> AddFunc c -> ZeroFunc b -> ZeroFunc c -> ZeroFunc (t c) -> (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | mapAccumR, but taking explicit add and zero. mapAccumR :: (Traversable t, Reifies s W) => AddFunc b -> AddFunc c -> ZeroFunc b -> ZeroFunc c -> ZeroFunc (t c) -> (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | fmap, but taking explicit add and zero. fmap :: (Traversable f, Reifies s W) => AddFunc a -> AddFunc b -> ZeroFunc a -> ZeroFunc b -> ZeroFunc (f b) -> (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | pure, but taking explicit add and zero. pure :: (Foldable t, Applicative t, Reifies s W) => AddFunc a -> ZeroFunc a -> ZeroFunc (t a) -> BVar s a -> BVar s (t a) -- | liftA2, but taking explicit add and zero. liftA2 :: (Traversable f, Applicative f, Reifies s W) => AddFunc a -> AddFunc b -> AddFunc c -> ZeroFunc a -> ZeroFunc b -> ZeroFunc c -> ZeroFunc (f c) -> (BVar s a -> BVar s b -> BVar s c) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -- | liftA3, but taking explicit add and zero. liftA3 :: (Traversable f, Applicative f, Reifies s W) => AddFunc a -> AddFunc b -> AddFunc c -> AddFunc d -> ZeroFunc a -> ZeroFunc b -> ZeroFunc c -> ZeroFunc d -> ZeroFunc (f d) -> (BVar s a -> BVar s b -> BVar s c -> BVar s d) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -> BVar s (f d) -- | fromIntegral, but taking explicit add and zero. fromIntegral :: (Integral a, Integral b, Reifies s W) => AddFunc a -> ZeroFunc b -> BVar s a -> BVar s b -- | realToFrac, but taking explicit add and zero. realToFrac :: (Fractional a, Real a, Fractional b, Real b, Reifies s W) => AddFunc a -> ZeroFunc b -> BVar s a -> BVar s b -- | Coerce items inside a BVar. coerce :: Coercible a b => BVar s a -> BVar s b -- | Some lifted versions of common functions found in Prelude (or -- base in general). -- -- This module is intended to be a catch-all one, so feel free to suggest -- other functions or submit a PR if you think one would make sense. -- -- See Prelude.Backprop.Num for a version with Num -- constraints instead of Backprop constraints, and -- Prelude.Backprop.Explicit for a version allowing you to provide -- zero, add, and one explicitly. module Prelude.Backprop -- | Lifted sum. More efficient than going through toList. sum :: (Foldable t, Functor t, Backprop (t a), Backprop a, Num a, Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted product. More efficient than going through -- toList. product :: (Foldable t, Functor t, Backprop (t a), Backprop a, Fractional a, Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted length. More efficient than going through toList. length :: (Foldable t, Backprop (t a), Backprop b, Num b, Reifies s W) => BVar s (t a) -> BVar s b -- | Lifted minimum. Undefined for situations where minimum -- would be undefined. More efficient than going through toList. minimum :: (Foldable t, Functor t, Backprop a, Ord a, Backprop (t a), Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted maximum. Undefined for situations where maximum -- would be undefined. More efficient than going through toList. maximum :: (Foldable t, Functor t, Backprop a, Ord a, Backprop (t a), Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted traverse. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Functors. traverse :: (Traversable t, Applicative f, Foldable f, Backprop a, Backprop b, Backprop (f (t b)), Backprop (t b), Reifies s W) => (BVar s a -> f (BVar s b)) -> BVar s (t a) -> BVar s (f (t b)) -- | Lifted version of toList. Takes a BVar of a -- Traversable of items and returns a list of BVars for -- each item. -- -- You can use this to implement "lifted" versions of Foldable -- methods like foldr, foldl', etc.; however, sum, -- product, length, minimum, and maximum have -- more efficient implementations than simply minimum . -- toList. toList :: (Traversable t, Backprop a, Reifies s W) => BVar s (t a) -> [BVar s a] -- | Lifted version of mapAccumL. mapAccumL :: (Traversable t, Backprop b, Backprop c, Backprop (t c), Reifies s W) => (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | Lifted version of mapAccumR. mapAccumR :: (Traversable t, Backprop b, Backprop c, Backprop (t c), Reifies s W) => (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | Lifted fmap. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Functors. fmap :: (Traversable f, Backprop a, Backprop b, Backprop (f b), Reifies s W) => (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | Alias for fmap. (<$>) :: (Traversable f, Backprop a, Backprop b, Backprop (f b), Reifies s W) => (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | Lifted pure. pure :: (Foldable t, Applicative t, Backprop (t a), Backprop a, Reifies s W) => BVar s a -> BVar s (t a) -- | Lifted liftA2. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Applicatives. liftA2 :: (Traversable f, Applicative f, Backprop a, Backprop b, Backprop c, Backprop (f c), Reifies s W) => (BVar s a -> BVar s b -> BVar s c) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -- | Lifted liftA3. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Applicatives. liftA3 :: (Traversable f, Applicative f, Backprop a, Backprop b, Backprop c, Backprop d, Backprop (f d), Reifies s W) => (BVar s a -> BVar s b -> BVar s c -> BVar s d) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -> BVar s (f d) -- | Lifted conversion between two Integral instances. fromIntegral :: (Backprop a, Integral a, Backprop b, Integral b, Reifies s W) => BVar s a -> BVar s b -- | Lifted conversion between two Fractional and Real -- instances. realToFrac :: (Backprop a, Fractional a, Real a, Backprop b, Fractional b, Real b, Reifies s W) => BVar s a -> BVar s b -- | Coerce items inside a BVar. coerce :: Coercible a b => BVar s a -> BVar s b -- | Provides the exact same API as Prelude.Backprop, except -- requiring Num instances for all types involved instead of -- Backprop instances. module Prelude.Backprop.Num -- | sum, but with Num constraints instead of -- Backprop constraints. sum :: (Foldable t, Functor t, Num (t a), Num a, Reifies s W) => BVar s (t a) -> BVar s a -- | product, but with Num constraints instead of -- Backprop constraints. product :: (Foldable t, Functor t, Num (t a), Fractional a, Reifies s W) => BVar s (t a) -> BVar s a -- | length, but with Num constraints instead of -- Backprop constraints. length :: (Foldable t, Num (t a), Num b, Reifies s W) => BVar s (t a) -> BVar s b -- | minimum, but with Num constraints instead of -- Backprop constraints. minimum :: (Foldable t, Functor t, Num a, Ord a, Num (t a), Reifies s W) => BVar s (t a) -> BVar s a -- | maximum, but with Num constraints instead of -- Backprop constraints. maximum :: (Foldable t, Functor t, Num a, Ord a, Num (t a), Reifies s W) => BVar s (t a) -> BVar s a -- | traverse, but with Num constraints instead of -- Backprop constraints. traverse :: (Traversable t, Applicative f, Foldable f, Num a, Num b, Num (f (t b)), Num (t b), Reifies s W) => (BVar s a -> f (BVar s b)) -> BVar s (t a) -> BVar s (f (t b)) -- | toList, but with Num constraints instead of -- Backprop constraints. toList :: (Traversable t, Num a, Reifies s W) => BVar s (t a) -> [BVar s a] -- | mapAccumL, but with Num constraints instead of -- Backprop constraints. mapAccumL :: (Traversable t, Num b, Num c, Num (t c), Reifies s W) => (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | mapAccumR, but with Num constraints instead of -- Backprop constraints. mapAccumR :: (Traversable t, Num b, Num c, Num (t c), Reifies s W) => (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | fmap, but with Num constraints instead of -- Backprop constraints. fmap :: (Traversable f, Num a, Num b, Num (f b), Reifies s W) => (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | Alias for fmap. (<$>) :: (Traversable f, Num a, Num b, Num (f b), Reifies s W) => (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | pure, but with Num constraints instead of -- Backprop constraints. pure :: (Foldable t, Applicative t, Num (t a), Num a, Reifies s W) => BVar s a -> BVar s (t a) -- | liftA2, but with Num constraints instead of -- Backprop constraints. liftA2 :: (Traversable f, Applicative f, Num a, Num b, Num c, Num (f c), Reifies s W) => (BVar s a -> BVar s b -> BVar s c) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -- | liftA3, but with Num constraints instead of -- Backprop constraints. liftA3 :: (Traversable f, Applicative f, Num a, Num b, Num c, Num d, Num (f d), Reifies s W) => (BVar s a -> BVar s b -> BVar s c -> BVar s d) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -> BVar s (f d) -- | fromIntegral, but with Num constraints instead of -- Backprop constraints. fromIntegral :: (Integral a, Integral b, Reifies s W) => BVar s a -> BVar s b -- | realToFrac, but with Num constraints instead of -- Backprop constraints. realToFrac :: (Fractional a, Real a, Fractional b, Real b, Reifies s W) => BVar s a -> BVar s b -- | Coerce items inside a BVar. coerce :: Coercible a b => BVar s a -> BVar s b