-- Hoogle documentation, generated by Haddock -- See Hoogle, http://www.haskell.org/hoogle/ -- | Heterogeneous automatic differentation (backpropagation) -- -- Write your functions to compute your result, and the library will -- automatically generate functions to compute your gradient. -- -- Implements heterogeneous reverse-mode automatic differentiation, -- commonly known as "backpropagation". -- -- See README.md @package backprop @version 0.1.5.2 -- | Provides the Op type and combinators, which represent -- differentiable functions/operations on values, and are used internally -- by the library to perform back-propagation. -- -- Users of the library can ignore this module for the most part. Library -- authors defining backpropagatable primitives for their functions are -- recommend to simply use op0, op1, op2, -- op3, which are re-exported in Numeric.Backprop. However, -- authors who want more options in defining their primtive functions -- might find some of these functions useful. -- -- Note that if your entire function is a single non-branching -- composition of functions, Op and its utility functions alone -- are sufficient to differentiate/backprop. However, this happens rarely -- in practice. module Numeric.Backprop.Op -- | An Op as a describes a differentiable function from -- as to a. -- -- For example, a value of type -- --
-- Op '[Int, Bool] Double ---- -- is a function from an Int and a Bool, returning a -- Double. It can be differentiated to give a gradient of -- an Int and a Bool if given a total derivative for the -- Double. If we call Bool <math>, then, -- mathematically, it is akin to a: -- -- <math> -- -- See runOp, gradOp, and gradOpWith for examples on -- how to run it, and Op for instructions on creating it. -- -- It is simpler to not use this type constructor directly, and instead -- use the op2, op1, op2, and op3 helper -- smart constructors. -- -- See Numeric.Backprop.Op#prod for a mini-tutorial on using -- Prod and Tuple. newtype Op as a -- | Construct an Op by giving a function creating the result, and -- also a continuation on how to create the gradient, given the total -- derivative of a. -- -- See the module documentation for Numeric.Backprop.Op for more -- details on the function that this constructor and Op expect. Op :: (Tuple as -> (a, a -> Tuple as)) -> Op as a -- | Run the function that the Op encodes, returning a continuation -- to compute the gradient, given the total derivative of a. See -- documentation for Numeric.Backprop.Op for more information. [runOpWith] :: Op as a -> Tuple as -> (a, a -> Tuple as) data Prod k (f :: k -> *) (a :: [k]) :: forall k. () => (k -> *) -> [k] -> * [Ø] :: Prod k f [] k [:<] :: Prod k f (:) k a1 as -- | A Prod of simple Haskell types. type Tuple = Prod * I newtype I a :: * -> * I :: a -> I a [getI] :: I a -> a -- | Run the function that an Op encodes, to get the resulting -- output and also its gradient with respect to the inputs. -- --
-- >>> gradOp' (op2 (*)) (3 ::< 5 ::< Ø) -- (15, 5 ::< 3 ::< Ø) --runOp :: Num a => Op as a -> Tuple as -> (a, Tuple as) -- | Run the function that an Op encodes, to get the result. -- --
-- >>> runOp (op2 (*)) (3 ::< 5 ::< Ø) -- 15 --evalOp :: Op as a -> Tuple as -> a -- | Run the function that an Op encodes, and get the gradient of -- the output with respect to the inputs. -- --
-- >>> gradOp (op2 (*)) (3 ::< 5 ::< Ø) -- 5 ::< 3 ::< Ø -- -- the gradient of x*y is (y, x) ---- --
-- gradOp o xs = gradOpWith o xs 1 --gradOp :: Num a => Op as a -> Tuple as -> Tuple as -- | Get the gradient function that an Op encodes, with a third -- argument expecting the total derivative of the result. -- -- See the module documentaiton for Numeric.Backprop.Op for more -- information. gradOpWith :: Op as a -> Tuple as -> a -> Tuple as -- | Create an Op that takes no inputs and always returns the given -- value. -- -- There is no gradient, of course (using gradOp will give you an -- empty tuple), because there is no input to have a gradient of. -- --
-- >>> runOp (op0 10) Ø -- (10, Ø) ---- -- For a constant Op that takes input and ignores it, see -- opConst and opConst'. op0 :: a -> Op '[] a -- | An Op that ignores all of its inputs and returns a given -- constant value. -- --
-- >>> gradOp' (opConst 10) (1 ::< 2 ::< 3 ::< Ø) -- (10, 0 ::< 0 ::< 0 ::< Ø) --opConst :: (Every Num as, Known Length as) => a -> Op as a -- | An Op that just returns whatever it receives. The identity -- function. -- --
-- idOp = opIso id id --idOp :: Op '[a] a -- | A version of opConst taking explicit Length, indicating -- the number of inputs and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use opConst. opConst' :: Every Num as => Length as -> a -> Op as a -- | An Op that extracts a value from an input value using a -- Lens'. -- -- Warning: This is unsafe! It assumes that it extracts a specific value -- unchanged, with derivative 1, so will break for things that -- numerically manipulate things before returning them. opLens :: Num a => Lens' a b -> Op '[a] b -- | Create an Op of a function taking one input, by giving its -- explicit derivative. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the derivative <math>, it would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that squares its input: -- --
-- square :: Num a => Op '[a] a -- square = op1 $ \x -> (x*x, \d -> 2 * d * x -- ) ---- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op1 :: (a -> (b, b -> a)) -> Op '[a] b -- | Create an Op of a function taking two inputs, by giving its -- explicit gradient. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the gradient <math> would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that multiplies its inputs: -- --
-- mul :: Num a => Op '[a, a] a -- mul = op2' $ \x y -> (x*y, \d -> (d*y, x*d) -- ) ---- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c -- | Create an Op of a function taking three inputs, by giving its -- explicit gradient. See documentation for op2 for more details. op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d -- | An Op that coerces an item into another item whose type has the -- same runtime representation. -- --
-- >>> gradOp' opCoerce (Identity 5) :: (Int, Identity Int) -- (5, Identity 1) ---- --
-- opCoerce = opIso coerced coerce --opCoerce :: Coercible a b => Op '[a] b -- | An Op that takes as and returns exactly the input -- tuple. -- --
-- >>> gradOp' opTup (1 ::< 2 ::< 3 ::< Ø) -- (1 ::< 2 ::< 3 ::< Ø, 1 ::< 1 ::< 1 ::< Ø) --opTup :: Op as (Tuple as) -- | An Op that runs the input value through an isomorphism. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. opIso :: (a -> b) -> (b -> a) -> Op '[a] b -- | An Op that runs the two input values through an isomorphism. -- Useful for things like constructors. See opIso for caveats. opIso2 :: (a -> b -> c) -> (c -> (a, b)) -> Op '[a, b] c -- | An Op that runs the three input values through an isomorphism. -- Useful for things like constructors. See opIso for caveats. opIso3 :: (a -> b -> c -> d) -> (d -> (a, b, c)) -> Op '[a, b, c] d -- | An Op that runs the input value through an isomorphism between -- a tuple of values and a value. See opIso for caveats. -- -- In Numeric.Backprop.Op since version 0.1.2.0, but only exported -- from Numeric.Backprop since version 0.1.3.0. opIsoN :: (Tuple as -> b) -> (b -> Tuple as) -> Op as b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp1, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad1 :: (a -> b) -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad :: (Tuple as -> b) -> Op as b -- | Compose Ops together, like sequence for functions, or -- liftAN. -- -- That is, given an Op as b1, an Op as -- b2, and an Op as b3, it can compose them with an -- Op '[b1,b2,b3] c to create an Op as c. composeOp :: (Every Num as, Known Length as) => Prod (Op as) bs -> Op bs c -> Op as c -- | Convenient wrapper over composeOp for the case where the second -- function only takes one input, so the two Ops can be directly -- piped together, like for .. composeOp1 :: (Every Num as, Known Length as) => Op as b -> Op '[b] c -> Op as c -- | Convenient infix synonym for (flipped) composeOp1. Meant to be -- used just like .: -- --
-- f :: Op '[b] c -- g :: Op '[a,a] b -- -- f ~. g :: Op '[a, a] c --(~.) :: (Known Length as, Every Num as) => Op '[b] c -> Op as b -> Op as c infixr 9 ~. -- | A version of composeOp taking explicit Length, -- indicating the number of inputs expected and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use composeOp. composeOp' :: Every Num as => Length as -> Prod (Op as) bs -> Op bs c -> Op as c -- | A version of composeOp1 taking explicit Length, -- indicating the number of inputs expected and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use composeOp1. composeOp1' :: Every Num as => Length as -> Op as b -> Op '[b] c -> Op as c -- | Construct a two element Prod. Since the precedence of (:>) is -- higher than (:<), we can conveniently write lists like: -- --
-- >>> a :< b :> c ---- -- Which is identical to: -- --
-- >>> a :< b :< c :< Ø --infix 6 :> -- | Build a singleton Prod. only :: () => f a -> Prod k f (:) k a [] k head' :: () => Prod k f (:<) k a as -> f a -- | Cons onto a Tuple. infixr 5 ::< -- | Singleton Tuple. only_ :: () => a -> Tuple (:) * a [] * -- | Op for addition (+.) :: Num a => Op '[a, a] a -- | Op for subtraction (-.) :: Num a => Op '[a, a] a -- | Op for multiplication (*.) :: Num a => Op '[a, a] a -- | Op for negation negateOp :: Num a => Op '[a] a -- | Op for absolute value absOp :: Num a => Op '[a] a -- | Op for signum signumOp :: Num a => Op '[a] a -- | Op for division (/.) :: Fractional a => Op '[a, a] a -- | Op for multiplicative inverse recipOp :: Fractional a => Op '[a] a -- | Op for exp expOp :: Floating a => Op '[a] a -- | Op for the natural logarithm logOp :: Floating a => Op '[a] a -- | Op for square root sqrtOp :: Floating a => Op '[a] a -- | Op for exponentiation (**.) :: Floating a => Op '[a, a] a -- | Op for logBase logBaseOp :: Floating a => Op '[a, a] a -- | Op for sine sinOp :: Floating a => Op '[a] a -- | Op for cosine cosOp :: Floating a => Op '[a] a -- | Op for tangent tanOp :: Floating a => Op '[a] a -- | Op for arcsine asinOp :: Floating a => Op '[a] a -- | Op for arccosine acosOp :: Floating a => Op '[a] a -- | Op for arctangent atanOp :: Floating a => Op '[a] a -- | Op for hyperbolic sine sinhOp :: Floating a => Op '[a] a -- | Op for hyperbolic cosine coshOp :: Floating a => Op '[a] a -- | Op for hyperbolic tangent tanhOp :: Floating a => Op '[a] a -- | Op for hyperbolic arcsine asinhOp :: Floating a => Op '[a] a -- | Op for hyperbolic arccosine acoshOp :: Floating a => Op '[a] a -- | Op for hyperbolic arctangent atanhOp :: Floating a => Op '[a] a instance (Type.Class.Known.Known [*] (Data.Type.Length.Length *) as, Data.Type.Index.Every * GHC.Num.Num as, GHC.Num.Num a) => GHC.Num.Num (Numeric.Backprop.Op.Op as a) instance (Type.Class.Known.Known [*] (Data.Type.Length.Length *) as, Data.Type.Index.Every * GHC.Real.Fractional as, Data.Type.Index.Every * GHC.Num.Num as, GHC.Real.Fractional a) => GHC.Real.Fractional (Numeric.Backprop.Op.Op as a) instance (Type.Class.Known.Known [*] (Data.Type.Length.Length *) as, Data.Type.Index.Every * GHC.Float.Floating as, Data.Type.Index.Every * GHC.Real.Fractional as, Data.Type.Index.Every * GHC.Num.Num as, GHC.Float.Floating a) => GHC.Float.Floating (Numeric.Backprop.Op.Op as a) -- | Automatic differentation and backpropagation. -- -- Main idea: Write a function computing what you want, and the library -- automatically provies the gradient of that function as well, for usage -- with gradient descent and other training methods. -- -- In more detail: instead of working directly with values to produce -- your result, you work with BVars containing those values. -- Working with these BVars is made smooth with the usage of -- lenses and other combinators, and libraries can offer operatons on -- BVars instead of those on normal types directly. -- -- Then, you can use: -- --
-- evalBP :: (forall s. Reifies s W. BVar s a -> BVar s b) -> (a -> b) ---- -- to turn a BVar function into the function on actual values -- a -> b. This has virtually zero overhead over writing the -- actual function directly. -- -- Then, there's: -- --
-- gradBP :: (forall s. Reifies s W. BVar s a -> BVar s b) -> (a -> a) ---- -- to automatically get the gradient, as well, for a given input. -- -- See the README for more information and links to demonstrations -- and tutorials, or dive striaght in by reading the docs for -- BVar. module Numeric.Backprop -- | A BVar s a is a value of type a that can be -- "backpropagated". -- -- Functions referring to BVars are tracked by the library and can -- be automatically differentiated to get their gradients and results. -- -- For simple numeric values, you can use its Num, -- Fractional, and Floating instances to manipulate them as -- if they were the numbers they represent. -- -- If a contains items, the items can be accessed and extracted -- using lenses. A Lens' b a can be used to access an -- a inside a b, using ^^. (viewVar): -- --
-- (^.) :: a -> Lens' a b -> b -- (^^.) :: BVar s a -> Lens' a b -> BVar s b ---- -- There is also ^^? (previewVar), to use a -- Prism' or Traversal' to extract a target that may or -- may not be present (which can implement pattern matching), -- ^^.. (toListOfVar) to use a Traversal' to -- extract all targets inside a BVar, and .~~ -- (setVar) to set and update values inside a BVar. -- -- For more complex operations, libraries can provide functions on -- BVars using liftOp and related functions. This is how -- you can create primitive functions that users can use to manipulate -- your library's values. -- -- For example, the hmatrix library has a matrix-vector -- multiplication function, #> :: L m n -> R n -> L m. -- -- A library could instead provide a function #> :: BVar (L -- m n) -> BVar (R n) -> BVar (R m), which the user can then -- use to manipulate their BVars of L m ns and R -- ns, etc. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. data BVar s a -- | An ephemeral Wengert Tape in the environment. Used internally to track -- of the computational graph of variables. -- -- For the end user, one can just imagine Reifies s -- W as a required constraint on s that allows -- backpropagation to work. data W -- | Turn a function BVar s a -> BVar s b into -- the function a -> b that it represents, also computing its -- gradient a as well. -- -- The Rank-N type forall s. Reifies s W => ... -- is used to ensure that BVars do not leak out of the context -- (similar to how it is used in Control.Monad.ST), and also as a -- reference to an ephemeral Wengert tape used to track the graph of -- references. -- -- Note that every type involved has to be an instance of Num. -- This is because gradients all need to be "summable" (which is -- implemented using sum and +), and we also need to able -- to generate gradients of 1 and 0. Really, only + and -- fromInteger methods are used from the Num typeclass. -- -- This might change in the future, to allow easier integration with -- tuples (which typically do not have a Num instance), and -- potentially make types easier to use (by only requiring +, 0, -- and 1, and not the rest of the Num class). -- -- See the README for a more detailed discussion on this issue. -- -- If you need a Num instance for tuples, you can use the -- canonical 2- and 3-tuples for the library in -- Numeric.Backprop.Tuple. If you need one for larger tuples, -- consider making a custom product type instead (making Num instances -- with something like one-liner-instances). You can also use the -- orphan instances in the NumInstances package (in particular, -- Data.NumInstances.Tuple) if you are writing an application and -- do not have to worry about orphan instances. backprop :: forall a b. (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, a) -- | Turn a function BVar s a -> BVar s b into -- the function a -> b that it represents. -- -- Benchmarks show that this should have virtually no overhead over -- directly writing a a -> b. BVar is, in this -- situation, a zero-cost abstraction, performance-wise. -- -- Has a nice advantage over using backprop in that it doesn't -- require Num constraints on the input and output. -- -- See documentation of backprop for more information. evalBP :: (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> b -- | Take a function BVar s a -> BVar s b, -- interpreted as a function a -> b, and compute its gradient -- with respect to its input. -- -- The resulting a -> a tells how the input (and its -- components) affects the output. Positive numbers indicate that the -- result will vary in the same direction as any adjustment in the input. -- Negative numbers indicate that the result will vary in the opposite -- direction as any adjustment in the input. Larger numbers indicate a -- greater sensitivity of change, and small numbers indicate lower -- sensitivity. -- -- See documentation of backprop for more information. gradBP :: forall a b. (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> a -- | backprop for a two-argument function. -- -- Not strictly necessary, because you can always uncurry a function by -- passing in all of the argument inside a data type, or use T2. -- However, this could potentially be more performant. -- -- For 3 and more arguments, consider using backpropN. backprop2 :: forall a b c. (Num a, Num b, Num c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, (a, b)) -- | evalBP for a two-argument function. See backprop2 for -- notes. evalBP2 :: (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> c -- | gradBP for a two-argument function. See backprop2 for -- notes. gradBP2 :: (Num a, Num b, Num c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (a, b) -- | backprop generalized to multiple inputs of different types. -- See the Numeric.Backprop.Op#prod for a mini-tutorial on -- heterogeneous lists. -- -- Not strictly necessary, because you can always uncurry a function by -- passing in all of the inputs in a data type containing all of the -- arguments or a tuple from Numeric.Backprop.Tuple. You could -- also pass in a giant tuple with NumInstances. However, this can -- be convenient if you don't want to make a custom larger tuple type or -- pull in orphan instances. This could potentially also be more -- performant. -- -- A Prod (BVar s) '[Double, Float, Double], for -- instance, is a tuple of BVar s Double, -- BVar s Float, and BVar s -- Double, and can be pattern matched on using :< -- (cons) and 'Ø' (nil). -- -- Tuples can be built and pattern matched on using ::< (cons) -- and 'Ø' (nil), as well. -- -- The Every Num as in the constraint says that -- every value in the type-level list as must have a Num -- instance. This means you can use, say, '[Double, Float, Int], -- but not '[Double, Bool, String]. -- -- If you stick to concerete, monomorphic usage of this (with -- specific types, typed into source code, known at compile-time), then -- Every Num as should be fulfilled automatically. backpropN :: forall as b. (Every Num as, Num b) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> (b, Tuple as) -- | evalBP generalized to multiple inputs of different types. See -- documentation for backpropN for more details. evalBPN :: forall as b. () => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> b -- | gradBP generalized to multiple inputs of different types. See -- documentation for backpropN for more details. gradBPN :: forall as b. (Every Num as, Num b) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> Tuple as class EveryC k c as => Every k (c :: k -> Constraint) (as :: [k]) -- | Lift a value into a BVar representing a constant value. -- -- This value will not be considered an input, and its gradients will not -- be backpropagated. constVar :: a -> BVar s a -- | Coerce a BVar contents. Useful for things like newtype -- wrappers. coerceVar :: Coercible a b => BVar s a -> BVar s b -- | An infix version of viewVar, meant to evoke parallels to -- ^. from lens. -- -- With normal values, you can extract something from that value with a -- lens: -- --
-- x ^. myLens ---- -- would extract a piece of x :: b, specified by myLens :: -- Lens' b a. The result has type a. -- --
-- xVar ^^. myLens ---- -- would extract a piece out of xVar :: BVar s b (a -- BVar holding a b), specified by myLens :: Lens' b -- a. The result has type BVar s a (a BVar -- holding a a) -- -- This is the main way to pull out values from BVar of container -- types. -- -- WARNING: Do not use with any lenses that operate "numerically" -- on the contents (like multiplying). (^^.) :: forall a b s. (Reifies s W, Num a) => BVar s b -> Lens' b a -> BVar s a infixl 8 ^^. -- | An infix version of setVar, meant to evoke parallels to -- .~ from lens. -- -- With normal values, you can set something in a value with a lens: a -- lens: -- --
-- x & myLens .~ y ---- -- would "set" a part of x :: b, specified by myLens :: -- Lens' a b, to a new value y :: a. -- --
-- xVar & myLens .~~ yVar ---- -- would "set" a part of xVar :: BVar s b (a BVar -- holding a b), specified by myLens :: Lens' a -- b, to a new value given by yVar :: BVar s a. The -- result is a new (updated) value of type BVar s b. -- -- This is the main way to set values inside BVars of container -- types. (.~~) :: forall a b s. (Reifies s W, Num a, Num b) => Lens' b a -> BVar s a -> BVar s b -> BVar s b infixl 8 .~~ -- | An infix version of previewVar, meant to evoke parallels to -- ^? from lens. -- -- With normal values, you can (potentially) extract something from that -- value with a lens: -- --
-- x ^? myPrism ---- -- would (potentially) extract a piece of x :: b, specified by -- myPrism :: Traversal' b a. The result has type -- Maybe a. -- --
-- xVar ^^? myPrism ---- -- would (potentially) extract a piece out of xVar :: BVar s -- b (a BVar holding a b), specified by myPrism -- :: Prism' b a. The result has type Maybe (BVar -- s a) (Maybe a BVar holding a a). -- -- This is intended to be used with Prism's (which hits at most -- one target), but will actually work with any Traversal'. -- If the traversal hits more than one target, the first one found will -- be extracted. -- -- This can be used to "pattern match" on BVars, by using prisms -- on constructors. -- -- Note that many automatically-generated prisms by the lens -- package use tuples, which cannot normally be backpropagated (because -- they do not have a Num instance). -- -- If you are writing an application or don't have to worry about orphan -- instances, you can pull in the orphan instances from -- NumInstances. Alternatively, you can chain those prisms with -- conversions to the anonymous canonical strict tuple types in -- Numeric.Backprop.Tuple, which do have Num instances. -- --
-- myPrism :: Prism' c (a, b) -- myPrism . iso tupT2 t2Tup :: Prism' c (T2 a b) --(^^?) :: forall b a s. (Num a, Reifies s W) => BVar s b -> Traversal' b a -> Maybe (BVar s a) -- | An infix version of toListOfVar, meant to evoke parallels to -- ^.. from lens. -- -- With normal values, you can extract all targets of a Traversal -- from that value with a: -- --
-- x ^.. myTraversal ---- -- would extract all targets inside of x :: b, specified by -- myTraversal :: Traversal' b a. The result has type -- [a]. -- --
-- xVar ^^.. myTraversal ---- -- would extract all targets inside of xVar :: BVar s b -- (a BVar holding a b), specified by myTraversal :: -- Traversal' b a. The result has type [BVar s a] (A -- list of BVars holding as). (^^..) :: forall b a s. (Num a, Reifies s W) => BVar s b -> Traversal' b a -> [BVar s a] -- | Using a Lens', extract a value inside a BVar. -- Meant to evoke parallels to view from lens. -- -- See documentation for ^^. for more information. viewVar :: forall a b s. (Reifies s W, Num a) => Lens' b a -> BVar s b -> BVar s a -- | Using a Lens', set a value inside a BVar. Meant -- to evoke parallels to "set" from lens. -- -- See documentation for .~~ for more information. setVar :: forall a b s. (Reifies s W, Num a, Num b) => Lens' b a -> BVar s a -> BVar s b -> BVar s b -- | Extract all of the BVars out of a Traversable container -- of BVars. -- -- Note that this associates gradients in order of occurrence in the -- original data structure; the second item in the gradient is assumed to -- correspond with the second item in the input, etc.; this can cause -- unexpected behavior in Foldable instances that don't have a -- fixed number of items. sequenceVar :: forall t a s. (Reifies s W, Traversable t, Num a) => BVar s (t a) -> t (BVar s a) -- | Collect all of the BVars in a container into a BVar of -- that container's contents. -- -- Note that this associates gradients in order of occurrence in the -- original data structure; the second item in the total derivative and -- gradient is assumed to correspond with the second item in the input, -- etc.; this can cause unexpected behavior in Foldable instances -- that don't have a fixed number of items. -- -- Note that this requires t a to have a Num instance. If -- you are using a list, I recommend using vector-sized instead: -- it's a fixed-length vector type with a very appropriate Num -- instance! collectVar :: forall t a s. (Reifies s W, Foldable t, Functor t, Num (t a), Num a) => t (BVar s a) -> BVar s (t a) -- | Using a Traversal', extract a single value inside a -- BVar, if it exists. If more than one traversal target exists, -- returns te first. Meant to evoke parallels to preview from -- lens. Really only intended to be used wth Prism's, or -- up-to-one target traversals. -- -- See documentation for ^^? for more information. previewVar :: forall b a s. (Num a, Reifies s W) => Traversal' b a -> BVar s b -> Maybe (BVar s a) -- | Using a Traversal', extract all targeted values inside a -- BVar. Meant to evoke parallels to toListOf from lens. -- -- See documentation for ^^.. for more information. toListOfVar :: forall b a s. (Num a, Reifies s W) => Traversal' b a -> BVar s b -> [BVar s a] -- | Convert the value inside a BVar using a given isomorphism. -- Useful for things like constructors. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. isoVar :: (Num a, Num b, Reifies s W) => (a -> b) -> (b -> a) -> BVar s a -> BVar s b -- | Convert the values inside two BVars using a given isomorphism. -- Useful for things like constructors. See isoVar for caveats. isoVar2 :: (Num a, Num b, Num c, Reifies s W) => (a -> b -> c) -> (c -> (a, b)) -> BVar s a -> BVar s b -> BVar s c -- | Convert the values inside three BVars using a given -- isomorphism. Useful for things like constructors. See isoVar -- for caveats. isoVar3 :: (Num a, Num b, Num c, Num d, Reifies s W) => (a -> b -> c -> d) -> (d -> (a, b, c)) -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | Convert the values inside a tuple of BVars using a given -- isomorphism. Useful for things like constructors. See isoVar -- for caveats. isoVarN :: (Every Num as, Num b, Reifies s W) => (Tuple as -> b) -> (b -> Tuple as) -> Prod (BVar s) as -> BVar s b -- | Lift an Op with an arbitrary number of inputs to a function on -- the appropriate number of BVars. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information, and -- Numeric.Backprop.Op#prod for a mini-tutorial on using -- Prod and Tuple. liftOp :: forall as b s. (Reifies s W, Num b, Every Num as) => Op as b -> Prod (BVar s) as -> BVar s b -- | Lift an Op with a single input to be a function on a single -- BVar. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. liftOp1 :: forall a b s. (Reifies s W, Num a, Num b) => Op '[a] b -> BVar s a -> BVar s b -- | Lift an Op with two inputs to be a function on a two -- BVars. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. liftOp2 :: forall a b c s. (Reifies s W, Num a, Num b, Num c) => Op '[a, b] c -> BVar s a -> BVar s b -> BVar s c -- | Lift an Op with three inputs to be a function on a three -- BVars. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. liftOp3 :: forall a b c d s. (Reifies s W, Num a, Num b, Num c, Num d) => Op '[a, b, c] d -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | An Op as a describes a differentiable function from -- as to a. -- -- For example, a value of type -- --
-- Op '[Int, Bool] Double ---- -- is a function from an Int and a Bool, returning a -- Double. It can be differentiated to give a gradient of -- an Int and a Bool if given a total derivative for the -- Double. If we call Bool <math>, then, -- mathematically, it is akin to a: -- -- <math> -- -- See runOp, gradOp, and gradOpWith for examples on -- how to run it, and Op for instructions on creating it. -- -- It is simpler to not use this type constructor directly, and instead -- use the op2, op1, op2, and op3 helper -- smart constructors. -- -- See Numeric.Backprop.Op#prod for a mini-tutorial on using -- Prod and Tuple. newtype Op as a -- | Construct an Op by giving a function creating the result, and -- also a continuation on how to create the gradient, given the total -- derivative of a. -- -- See the module documentation for Numeric.Backprop.Op for more -- details on the function that this constructor and Op expect. Op :: (Tuple as -> (a, a -> Tuple as)) -> Op as a -- | Run the function that the Op encodes, returning a continuation -- to compute the gradient, given the total derivative of a. See -- documentation for Numeric.Backprop.Op for more information. [runOpWith] :: Op as a -> Tuple as -> (a, a -> Tuple as) -- | Create an Op that takes no inputs and always returns the given -- value. -- -- There is no gradient, of course (using gradOp will give you an -- empty tuple), because there is no input to have a gradient of. -- --
-- >>> runOp (op0 10) Ø -- (10, Ø) ---- -- For a constant Op that takes input and ignores it, see -- opConst and opConst'. op0 :: a -> Op '[] a -- | An Op that ignores all of its inputs and returns a given -- constant value. -- --
-- >>> gradOp' (opConst 10) (1 ::< 2 ::< 3 ::< Ø) -- (10, 0 ::< 0 ::< 0 ::< Ø) --opConst :: (Every Num as, Known Length as) => a -> Op as a -- | An Op that just returns whatever it receives. The identity -- function. -- --
-- idOp = opIso id id --idOp :: Op '[a] a -- | A version of opConst taking explicit Length, indicating -- the number of inputs and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use opConst. opConst' :: Every Num as => Length as -> a -> Op as a -- | Create an Op of a function taking one input, by giving its -- explicit derivative. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the derivative <math>, it would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that squares its input: -- --
-- square :: Num a => Op '[a] a -- square = op1 $ \x -> (x*x, \d -> 2 * d * x -- ) ---- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op1 :: (a -> (b, b -> a)) -> Op '[a] b -- | Create an Op of a function taking two inputs, by giving its -- explicit gradient. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the gradient <math> would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that multiplies its inputs: -- --
-- mul :: Num a => Op '[a, a] a -- mul = op2' $ \x y -> (x*y, \d -> (d*y, x*d) -- ) ---- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c -- | Create an Op of a function taking three inputs, by giving its -- explicit gradient. See documentation for op2 for more details. op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d -- | An Op that coerces an item into another item whose type has the -- same runtime representation. -- --
-- >>> gradOp' opCoerce (Identity 5) :: (Int, Identity Int) -- (5, Identity 1) ---- --
-- opCoerce = opIso coerced coerce --opCoerce :: Coercible a b => Op '[a] b -- | An Op that takes as and returns exactly the input -- tuple. -- --
-- >>> gradOp' opTup (1 ::< 2 ::< 3 ::< Ø) -- (1 ::< 2 ::< 3 ::< Ø, 1 ::< 1 ::< 1 ::< Ø) --opTup :: Op as (Tuple as) -- | An Op that runs the input value through an isomorphism. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. opIso :: (a -> b) -> (b -> a) -> Op '[a] b -- | An Op that runs the input value through an isomorphism between -- a tuple of values and a value. See opIso for caveats. -- -- In Numeric.Backprop.Op since version 0.1.2.0, but only exported -- from Numeric.Backprop since version 0.1.3.0. opIsoN :: (Tuple as -> b) -> (b -> Tuple as) -> Op as b -- | An Op that extracts a value from an input value using a -- Lens'. -- -- Warning: This is unsafe! It assumes that it extracts a specific value -- unchanged, with derivative 1, so will break for things that -- numerically manipulate things before returning them. opLens :: Num a => Lens' a b -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp1, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad1 :: (a -> b) -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad :: (Tuple as -> b) -> Op as b data Prod k (f :: k -> *) (a :: [k]) :: forall k. () => (k -> *) -> [k] -> * [Ø] :: Prod k f [] k [:<] :: Prod k f (:) k a1 as -- | Construct a two element Prod. Since the precedence of (:>) is -- higher than (:<), we can conveniently write lists like: -- --
-- >>> a :< b :> c ---- -- Which is identical to: -- --
-- >>> a :< b :< c :< Ø --infix 6 :> -- | Build a singleton Prod. only :: () => f a -> Prod k f (:) k a [] k head' :: () => Prod k f (:<) k a as -> f a -- | A Prod of simple Haskell types. type Tuple = Prod * I -- | Cons onto a Tuple. infixr 5 ::< -- | Singleton Tuple. only_ :: () => a -> Tuple (:) * a [] * newtype I a :: * -> * I :: a -> I a [getI] :: I a -> a class Reifies k (s :: k) a | s -> a -- | Canonical strict tuples (and unit) with Num instances for usage -- with backprop. This is here to solve the problem of orphan -- instances in libraries and potential mismatched tuple types. -- -- If you are writing a library that needs to export BVars of -- tuples, consider using the tuples in this module so that your library -- can have easy interoperability with other libraries using -- backprop. -- -- Because of API decisions, backprop and gradBP only -- work with things with Num instances. However, this disallows -- default Prelude tuples (without orphan instances from -- packages like NumInstances). -- -- Until tuples have Num instances in base, this module is -- intended to be a workaround for situations where: -- -- This comes up often in cases where: -- --
-- _ + _ = T0 -- negate _ = T0 -- fromIntegral _ = T0 --data T0 T0 :: T0 -- | Strict 2-tuple with Num, Fractional, and Floating -- instances. data T2 a b T2 :: !a -> !b -> T2 a b -- | Convert to a Haskell tuple. -- -- Forms an isomorphism with tupT2. t2Tup :: T2 a b -> (a, b) -- | Convert from Haskell tuple. -- -- Forms an isomorphism with t2Tup. tupT2 :: (a, b) -> T2 a b -- | Uncurry a function to take in a T2 of its arguments uncurryT2 :: (a -> b -> c) -> T2 a b -> c -- | Curry a function taking a T2 of its arguments curryT2 :: (T2 a b -> c) -> a -> b -> c -- | Lens into the first field of a T2. Also exported as _1 -- from Lens.Micro. t2_1 :: Lens (T2 a b) (T2 a' b) a a' -- | Lens into the second field of a T2. Also exported as _2 -- from Lens.Micro. t2_2 :: Lens (T2 a b) (T2 a b') b b' -- | Strict 3-tuple with a Num, Fractional, and -- Floating instances. data T3 a b c T3 :: !a -> !b -> !c -> T3 a b c -- | Convert to a Haskell tuple. -- -- Forms an isomorphism with tupT3. t3Tup :: T3 a b c -> (a, b, c) -- | Convert from Haskell tuple. -- -- Forms an isomorphism with t3Tup. tupT3 :: (a, b, c) -> T3 a b c -- | Lens into the first field of a T3. Also exported as _1 -- from Lens.Micro. t3_1 :: Lens (T3 a b c) (T3 a' b c) a a' -- | Lens into the second field of a T3. Also exported as _2 -- from Lens.Micro. t3_2 :: Lens (T3 a b c) (T3 a b' c) b b' -- | Lens into the third field of a T3. Also exported as _3 -- from Lens.Micro. t3_3 :: Lens (T3 a b c) (T3 a b c') c c' -- | Uncurry a function to take in a T3 of its arguments uncurryT3 :: (a -> b -> c -> d) -> T3 a b c -> d -- | Curry a function taking a T3 of its arguments curryT3 :: (T3 a b c -> d) -> a -> b -> c -> d -- | Strict inductive N-tuple with a Num, Fractional, and -- Floating instances. -- -- It is basically "yet another HList", like the one found in -- Data.Type.Product and many other locations on the haskell -- ecosystem. Because it's inductively defined, it has O(n) random -- indexing, but is efficient for zipping and mapping and other -- sequential consumption patterns. -- -- It is provided because of its Num instance, making it useful -- for backproup. Will be obsolete when Product gets -- numerical instances. data T :: [Type] -> Type [TNil] :: T '[] [:&] :: !a -> !(T as) -> T (a : as) -- | Index into a T. -- -- O(i) indexT :: Index as a -> T as -> a -- | Extract a singleton T -- -- Forms an isomorphism with onlyT tOnly :: T '[a] -> a -- | A singleton T -- -- Forms an isomorphism with tOnly onlyT :: a -> T '[a] -- | Split a T. For splits known at compile-time, you can use -- known to derive the Length automatically. -- -- Forms an isomorphism with tAppend. tSplit :: Length as -> T (as ++ bs) -> (T as, T bs) -- | Append two Ts. -- -- Forms an isomorphism with tSplit. tAppend :: T as -> T bs -> T (as ++ bs) infixr 5 `tAppend` -- | Convert a T to a Tuple. -- -- Forms an isomorphism with prodT. tProd :: T as -> Tuple as -- | Convert a Tuple to a T. -- -- Forms an isomorphism with tProd. prodT :: Tuple as -> T as -- | Lens into a given index of a T. tIx :: Index as a -> Lens' (T as) a -- | Lens into the head of a T tHead :: Lens (T (a : as)) (T (b : as)) a b -- | Lens into the tail of a T tTail :: Lens (T (a : as)) (T (a : bs)) (T as) (T bs) -- | Lens into the initial portion of a T. For splits known at -- compile-time, you can use known to derive the Length -- automatically. tTake :: forall as bs cs. Length as -> Lens (T (as ++ bs)) (T (cs ++ bs)) (T as) (T cs) -- | Lens into the ending portion of a T. For splits known at -- compile-time, you can use known to derive the Length -- automatically. tDrop :: forall as bs cs. Length as -> Lens (T (as ++ bs)) (T (as ++ cs)) (T bs) (T cs) -- | Initialize a T with a Rank-N value. Mostly used internally, but -- provided in case useful. -- -- Must be used with TypeApplications to provide the Rank-N -- constraint. constT :: forall c as. ListC (c <$> as) => (forall a. c a => a) -> Length as -> T as -- | Map over a T with a Rank-N function. Mostly used internally, -- but provided in case useful. -- -- Must be used with TypeApplications to provide the Rank-N -- constraint. mapT :: forall c as. ListC (c <$> as) => (forall a. c a => a -> a) -> T as -> T as -- | Map over a T with a Rank-N function. Mostly used internally, -- but provided in case useful. -- -- Must be used with TypeApplications to provide the Rank-N -- constraint. zipT :: forall c as. ListC (c <$> as) => (forall a. c a => a -> a -> a) -> T as -> T as -> T as instance (Data.Data.Data c, Data.Data.Data b, Data.Data.Data a) => Data.Data.Data (Numeric.Backprop.Tuple.T3 a b c) instance GHC.Base.Functor (Numeric.Backprop.Tuple.T3 a b) instance GHC.Generics.Generic (Numeric.Backprop.Tuple.T3 a b c) instance (GHC.Classes.Ord c, GHC.Classes.Ord b, GHC.Classes.Ord a) => GHC.Classes.Ord (Numeric.Backprop.Tuple.T3 a b c) instance (GHC.Classes.Eq c, GHC.Classes.Eq b, GHC.Classes.Eq a) => GHC.Classes.Eq (Numeric.Backprop.Tuple.T3 a b c) instance (GHC.Read.Read c, GHC.Read.Read b, GHC.Read.Read a) => GHC.Read.Read (Numeric.Backprop.Tuple.T3 a b c) instance (GHC.Show.Show c, GHC.Show.Show b, GHC.Show.Show a) => GHC.Show.Show (Numeric.Backprop.Tuple.T3 a b c) instance (Data.Data.Data b, Data.Data.Data a) => Data.Data.Data (Numeric.Backprop.Tuple.T2 a b) instance GHC.Base.Functor (Numeric.Backprop.Tuple.T2 a) instance GHC.Generics.Generic (Numeric.Backprop.Tuple.T2 a b) instance (GHC.Classes.Ord b, GHC.Classes.Ord a) => GHC.Classes.Ord (Numeric.Backprop.Tuple.T2 a b) instance (GHC.Classes.Eq b, GHC.Classes.Eq a) => GHC.Classes.Eq (Numeric.Backprop.Tuple.T2 a b) instance (GHC.Read.Read b, GHC.Read.Read a) => GHC.Read.Read (Numeric.Backprop.Tuple.T2 a b) instance (GHC.Show.Show b, GHC.Show.Show a) => GHC.Show.Show (Numeric.Backprop.Tuple.T2 a b) instance Data.Data.Data Numeric.Backprop.Tuple.T0 instance GHC.Generics.Generic Numeric.Backprop.Tuple.T0 instance GHC.Classes.Ord Numeric.Backprop.Tuple.T0 instance GHC.Classes.Eq Numeric.Backprop.Tuple.T0 instance GHC.Read.Read Numeric.Backprop.Tuple.T0 instance GHC.Show.Show Numeric.Backprop.Tuple.T0 instance Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint GHC.Show.Show as) => GHC.Show.Show (Numeric.Backprop.Tuple.T as) instance Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint GHC.Classes.Eq as) => GHC.Classes.Eq (Numeric.Backprop.Tuple.T as) instance (Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint GHC.Classes.Eq as), Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint GHC.Classes.Ord as)) => GHC.Classes.Ord (Numeric.Backprop.Tuple.T as) instance Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint Control.DeepSeq.NFData as) => Control.DeepSeq.NFData (Numeric.Backprop.Tuple.T as) instance Lens.Micro.Internal.Field1 (Numeric.Backprop.Tuple.T ((':) * a as)) (Numeric.Backprop.Tuple.T ((':) * a as)) a a instance Lens.Micro.Internal.Field2 (Numeric.Backprop.Tuple.T ((':) * a ((':) * b as))) (Numeric.Backprop.Tuple.T ((':) * a ((':) * b as))) b b instance Lens.Micro.Internal.Field3 (Numeric.Backprop.Tuple.T ((':) * a ((':) * b ((':) * c as)))) (Numeric.Backprop.Tuple.T ((':) * a ((':) * b ((':) * c as)))) c c instance (Type.Class.Known.Known [*] (Data.Type.Length.Length *) as, Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint GHC.Num.Num as)) => GHC.Num.Num (Numeric.Backprop.Tuple.T as) instance (Type.Class.Known.Known [*] (Data.Type.Length.Length *) as, Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint GHC.Num.Num as), Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint GHC.Real.Fractional as)) => GHC.Real.Fractional (Numeric.Backprop.Tuple.T as) instance (Type.Class.Known.Known [*] (Data.Type.Length.Length *) as, Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint GHC.Num.Num as), Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint GHC.Real.Fractional as), Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint GHC.Float.Floating as)) => GHC.Float.Floating (Numeric.Backprop.Tuple.T as) instance Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint Data.Semigroup.Semigroup as) => Data.Semigroup.Semigroup (Numeric.Backprop.Tuple.T as) instance (Type.Class.Known.Known [*] (Data.Type.Length.Length *) as, Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint Data.Semigroup.Semigroup as), Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint GHC.Base.Monoid as)) => GHC.Base.Monoid (Numeric.Backprop.Tuple.T as) instance (Type.Class.Known.Known [*] (Data.Type.Length.Length *) as, Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint Data.Binary.Class.Binary as)) => Data.Binary.Class.Binary (Numeric.Backprop.Tuple.T as) instance (Type.Class.Known.Known [*] (Data.Type.Length.Length *) as, Type.Family.List.ListC ((Type.Family.List.<$>) * GHC.Types.Constraint System.Random.Random as)) => System.Random.Random (Numeric.Backprop.Tuple.T as) instance (Control.DeepSeq.NFData a, Control.DeepSeq.NFData b, Control.DeepSeq.NFData c) => Control.DeepSeq.NFData (Numeric.Backprop.Tuple.T3 a b c) instance (System.Random.Random a, System.Random.Random b, System.Random.Random c) => System.Random.Random (Numeric.Backprop.Tuple.T3 a b c) instance (Data.Binary.Class.Binary a, Data.Binary.Class.Binary b, Data.Binary.Class.Binary c) => Data.Binary.Class.Binary (Numeric.Backprop.Tuple.T3 a b c) instance Data.Bifunctor.Bifunctor (Numeric.Backprop.Tuple.T3 a) instance Lens.Micro.Internal.Field1 (Numeric.Backprop.Tuple.T3 a b c) (Numeric.Backprop.Tuple.T3 a' b c) a a' instance Lens.Micro.Internal.Field2 (Numeric.Backprop.Tuple.T3 a b c) (Numeric.Backprop.Tuple.T3 a b' c) b b' instance Lens.Micro.Internal.Field3 (Numeric.Backprop.Tuple.T3 a b c) (Numeric.Backprop.Tuple.T3 a b c') c c' instance (GHC.Num.Num a, GHC.Num.Num b, GHC.Num.Num c) => GHC.Num.Num (Numeric.Backprop.Tuple.T3 a b c) instance (GHC.Real.Fractional a, GHC.Real.Fractional b, GHC.Real.Fractional c) => GHC.Real.Fractional (Numeric.Backprop.Tuple.T3 a b c) instance (GHC.Float.Floating a, GHC.Float.Floating b, GHC.Float.Floating c) => GHC.Float.Floating (Numeric.Backprop.Tuple.T3 a b c) instance (Data.Semigroup.Semigroup a, Data.Semigroup.Semigroup b, Data.Semigroup.Semigroup c) => Data.Semigroup.Semigroup (Numeric.Backprop.Tuple.T3 a b c) instance (Data.Semigroup.Semigroup a, Data.Semigroup.Semigroup b, Data.Semigroup.Semigroup c, GHC.Base.Monoid a, GHC.Base.Monoid b, GHC.Base.Monoid c) => GHC.Base.Monoid (Numeric.Backprop.Tuple.T3 a b c) instance (Control.DeepSeq.NFData a, Control.DeepSeq.NFData b) => Control.DeepSeq.NFData (Numeric.Backprop.Tuple.T2 a b) instance (System.Random.Random a, System.Random.Random b) => System.Random.Random (Numeric.Backprop.Tuple.T2 a b) instance (Data.Binary.Class.Binary a, Data.Binary.Class.Binary b) => Data.Binary.Class.Binary (Numeric.Backprop.Tuple.T2 a b) instance Data.Bifunctor.Bifunctor Numeric.Backprop.Tuple.T2 instance Lens.Micro.Internal.Field1 (Numeric.Backprop.Tuple.T2 a b) (Numeric.Backprop.Tuple.T2 a' b) a a' instance Lens.Micro.Internal.Field2 (Numeric.Backprop.Tuple.T2 a b) (Numeric.Backprop.Tuple.T2 a b') b b' instance (GHC.Num.Num a, GHC.Num.Num b) => GHC.Num.Num (Numeric.Backprop.Tuple.T2 a b) instance (GHC.Real.Fractional a, GHC.Real.Fractional b) => GHC.Real.Fractional (Numeric.Backprop.Tuple.T2 a b) instance (GHC.Float.Floating a, GHC.Float.Floating b) => GHC.Float.Floating (Numeric.Backprop.Tuple.T2 a b) instance (Data.Semigroup.Semigroup a, Data.Semigroup.Semigroup b) => Data.Semigroup.Semigroup (Numeric.Backprop.Tuple.T2 a b) instance (Data.Semigroup.Semigroup a, Data.Semigroup.Semigroup b, GHC.Base.Monoid a, GHC.Base.Monoid b) => GHC.Base.Monoid (Numeric.Backprop.Tuple.T2 a b) instance Control.DeepSeq.NFData Numeric.Backprop.Tuple.T0 instance System.Random.Random Numeric.Backprop.Tuple.T0 instance Data.Binary.Class.Binary Numeric.Backprop.Tuple.T0 instance GHC.Num.Num Numeric.Backprop.Tuple.T0 instance GHC.Real.Fractional Numeric.Backprop.Tuple.T0 instance GHC.Float.Floating Numeric.Backprop.Tuple.T0 instance Data.Semigroup.Semigroup Numeric.Backprop.Tuple.T0 instance GHC.Base.Monoid Numeric.Backprop.Tuple.T0 -- | Some lifted versions of common functions found in Prelude (or -- base in general). -- -- Intended to work with Functor Foldable -- Traversable instances with "fixed" number of items, i.e. -- vector-sized vectors. There might be unintended consequences -- when using it with instances where the number of items is not fixed. -- -- This module is intended to be a catch-all one, so feel free to suggest -- other functions or submit a PR if you think one would make sense. module Prelude.Backprop -- | Lifted sum sum :: forall t a s. (Foldable t, Functor t, Num (t a), Num a, Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted product product :: forall t a s. (Foldable t, Functor t, Num (t a), Fractional a, Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted length. length :: forall t a b s. (Foldable t, Num (t a), Num b, Reifies s W) => BVar s (t a) -> BVar s b -- | Lifted minimum. Undefined for situations where minimum -- would be undefined. minimum :: forall t a s. (Foldable t, Functor t, Num a, Ord a, Num (t a), Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted maximum. Undefined for situations where maximum -- would be undefined. maximum :: forall t a s. (Foldable t, Functor t, Num a, Ord a, Num (t a), Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted traverse. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Functors. -- -- Really intended only for Traversable and Applicative -- instances with fixed number of items; untintended consequences might -- arise when using it with containers with variable number of items. traverse :: forall t f a b s. (Traversable t, Applicative f, Foldable f, Num a, Num b, Num (f (t b)), Num (t b), Reifies s W) => (BVar s a -> f (BVar s b)) -> BVar s (t a) -> BVar s (f (t b)) -- | Lifted fmap. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Functors. -- -- Really intended only for Functor instances with fixed number of -- items; untintended consequences might arise when using it with -- containers with variable number of items. fmap :: forall f a b s. (Traversable f, Num a, Num b, Num (f b), Reifies s W) => (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | Alias for fmap. (<$>) :: forall f a b s. (Traversable f, Num a, Num b, Num (f b), Reifies s W) => (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | Lifted pure. Really intended only for Applicative -- instances with fixed number of items; untintended consequences might -- arise when using it with containers with variable number of items. pure :: forall t a s. (Foldable t, Applicative t, Num (t a), Num a, Reifies s W) => BVar s a -> BVar s (t a) -- | Lifted liftA2. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Applicatives. -- -- Really intended only for Traversable and Applicative -- instances with fixed number of items; untintended consequences might -- arise when using it with containers with variable number of items. liftA2 :: forall f a b c s. (Traversable f, Applicative f, Num a, Num b, Num c, Num (f c), Reifies s W) => (BVar s a -> BVar s b -> BVar s c) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -- | Lifted liftA3. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Applicatives. -- -- Really intended only for Traversable and Applicative -- instances with fixed number of items; untintended consequences might -- arise when using it with containers with variable number of items. liftA3 :: forall f a b c d s. (Traversable f, Applicative f, Num a, Num b, Num c, Num d, Num (f d), Reifies s W) => (BVar s a -> BVar s b -> BVar s c -> BVar s d) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -> BVar s (f d) -- | Coerce items inside a BVar. coerce :: forall a b s. (Coercible a b, Num a, Num b, Reifies s W) => BVar s a -> BVar s b