-- Hoogle documentation, generated by Haddock -- See Hoogle, http://www.haskell.org/hoogle/ -- | Heterogeneous automatic differentation -- -- Write your functions to compute your result, and the library will -- automatically generate functions to compute your gradient. -- -- Implements heterogeneous reverse-mode automatic differentiation, -- commonly known as "backpropagation". -- -- See https://backprop.jle.im for official introduction and -- documentation. @package backprop @version 0.2.6.2 -- | Provides the Backprop typeclass, a class for values that can be -- used for backpropagation. -- -- This class replaces the old (version 0.1) API relying on Num. module Numeric.Backprop.Class -- | Class of values that can be backpropagated in general. -- -- For instances of Num, these methods can be given by -- zeroNum, addNum, and oneNum. There are also -- generic options given in Numeric.Backprop.Class for functors, -- IsList instances, and Generic instances. -- --
--   instance Backprop Double where
--       zero = zeroNum
--       add = addNum
--       one = oneNum
--   
-- -- If you leave the body of an instance declaration blank, GHC Generics -- will be used to derive instances if the type has a single constructor -- and each field is an instance of Backprop. -- -- To ensure that backpropagation works in a sound way, should obey the -- laws: -- -- -- -- -- -- Also implies preservation of information, making zipWith -- (+) an illegal implementation for lists and vectors. -- -- This is only expected to be true up to potential "extra zeroes" in -- x and y in the result. -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- Note that not all values in the backpropagation process needs all of -- these methods: Only the "final result" needs one, for example. -- These are all grouped under one typeclass for convenience in defining -- instances, and also to talk about sensible laws. For fine-grained -- control, use the "explicit" versions of library functions (for -- example, in Numeric.Backprop.Explicit) instead of -- Backprop based ones. -- -- This typeclass replaces the reliance on Num of the previous API -- (v0.1). Num is strictly more powerful than Backprop, and -- is a stronger constraint on types than is necessary for proper -- backpropagating. In particular, fromInteger is a problem for -- many types, preventing useful backpropagation for lists, -- variable-length vectors (like Data.Vector) and variable-size -- matrices from linear algebra libraries like hmatrix and -- accelerate. class Backprop a -- | "Zero out" all components of a value. For scalar values, this should -- just be const 0. For vectors and matrices, this should -- set all components to zero, the additive identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See zeroNum for a pre-built definition for instances of -- Num and zeroFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericZero, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. zero :: Backprop a => a -> a -- | Add together two values of a type. To combine contributions of -- gradients, so should be information-preserving: -- -- -- -- Should be as strict as possible. This behavior is observed for -- all instances provided by this library. -- -- See addNum for a pre-built definition for instances of -- Num and addIsList for a definition for instances of -- IsList. If left blank, will automatically be genericAdd, -- a pre-built definition for instances of Generic with one -- constructor whose fields are all themselves instances of -- Backprop. add :: Backprop a => a -> a -> a -- | One all components of a value. For scalar values, this should -- just be const 1. For vectors and matrices, this should -- set all components to one, the multiplicative identity. -- -- As the library uses it, the most important law is: -- -- -- -- That is, one x is the gradient of the identity -- function with respect to its input. -- -- Ideally should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See oneNum for a pre-built definition for instances of -- Num and oneFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericOne, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. one :: Backprop a => a -> a -- | "Zero out" all components of a value. For scalar values, this should -- just be const 0. For vectors and matrices, this should -- set all components to zero, the additive identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See zeroNum for a pre-built definition for instances of -- Num and zeroFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericZero, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. zero :: (Backprop a, Generic a, GZero (Rep a)) => a -> a -- | Add together two values of a type. To combine contributions of -- gradients, so should be information-preserving: -- -- -- -- Should be as strict as possible. This behavior is observed for -- all instances provided by this library. -- -- See addNum for a pre-built definition for instances of -- Num and addIsList for a definition for instances of -- IsList. If left blank, will automatically be genericAdd, -- a pre-built definition for instances of Generic with one -- constructor whose fields are all themselves instances of -- Backprop. add :: (Backprop a, Generic a, GAdd (Rep a)) => a -> a -> a -- | One all components of a value. For scalar values, this should -- just be const 1. For vectors and matrices, this should -- set all components to one, the multiplicative identity. -- -- As the library uses it, the most important law is: -- -- -- -- That is, one x is the gradient of the identity -- function with respect to its input. -- -- Ideally should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See oneNum for a pre-built definition for instances of -- Num and oneFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericOne, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. one :: (Backprop a, Generic a, GOne (Rep a)) => a -> a -- | zero for instances of Num. -- -- Is lazy in its argument. zeroNum :: Num a => a -> a -- | add for instances of Num. addNum :: Num a => a -> a -> a -- | one for instances of Num. -- -- Is lazy in its argument. oneNum :: Num a => a -> a -- | zero for instances of Vector. zeroVec :: (Vector v a, Backprop a) => v a -> v a -- | add for instances of Vector. Automatically pads the end -- of the shorter vector with zeroes. addVec :: (Vector v a, Backprop a) => v a -> v a -> v a -- | one for instances of Vector. oneVec :: (Vector v a, Backprop a) => v a -> v a -- | zero for instances of Vector when the contained type is -- an instance of Num. Is potentially more performant than -- zeroVec when the vectors are larger. -- -- See NumVec for a Backprop instance for Vector -- instances that uses this for zero. zeroVecNum :: (Vector v a, Num a) => v a -> v a -- | one for instances of Vector when the contained type is -- an instance of Num. Is potentially more performant than -- oneVec when the vectors are larger. -- -- See NumVec for a Backprop instance for Vector -- instances that uses this for one. oneVecNum :: (Vector v a, Num a) => v a -> v a -- | zero for Functor instances. zeroFunctor :: (Functor f, Backprop a) => f a -> f a -- | add for instances of IsList. Automatically pads the end -- of the "shorter" value with zeroes. addIsList :: (IsList a, Backprop (Item a)) => a -> a -> a -- | add for types that are isomorphic to a list. Automatically pads -- the end of the "shorter" value with zeroes. addAsList :: Backprop b => (a -> [b]) -> ([b] -> a) -> a -> a -> a -- | one for instances of Functor. oneFunctor :: (Functor f, Backprop a) => f a -> f a -- | zero using GHC Generics; works if all fields are instances of -- Backprop. genericZero :: (Generic a, GZero (Rep a)) => a -> a -- | add using GHC Generics; works if all fields are instances of -- Backprop, but only for values with single constructors. genericAdd :: (Generic a, GAdd (Rep a)) => a -> a -> a -- | one using GHC Generics; works if all fields are instaces of -- Backprop. genericOne :: (Generic a, GOne (Rep a)) => a -> a -- | A newtype wrapper over an f a for Applicative -- f that gives a free Backprop instance (as well as -- Num etc. instances). -- -- Useful for performing backpropagation over functions that require some -- monadic context (like IO) to perform. newtype ABP f a ABP :: f a -> ABP f a [runABP] :: ABP f a -> f a -- | A newtype wrapper over an instance of Num that gives a free -- Backprop instance. -- -- Useful for things like DerivingVia, or for avoiding orphan -- instances. newtype NumBP a NumBP :: a -> NumBP a [runNumBP] :: NumBP a -> a -- | Newtype wrapper around a v a for Vector v a, -- that gives a more efficient Backprop instance for long -- vectors when a is an instance of Num. The normal -- Backprop instance for vectors will map zero or -- one over all items; this instance will completely ignore the -- contents of the original vector and instead produce a new vector of -- the same length, with all 0 or 1 using the -- Num instance of a (essentially using zeroVecNum -- and oneVecNum instead of zeroVec and oneVec). -- -- add is essentially the same as normal, but using + -- instead of the type's add. newtype NumVec v a NumVec :: v a -> NumVec v a [runNumVec] :: NumVec v a -> v a -- | Helper class for automatically deriving zero using GHC -- Generics. class GZero f -- | Helper class for automatically deriving add using GHC Generics. class GAdd f -- | Helper class for automatically deriving one using GHC Generics. class GOne f instance Data.Traversable.Traversable f => Data.Traversable.Traversable (Numeric.Backprop.Class.ABP f) instance Data.Foldable.Foldable f => Data.Foldable.Foldable (Numeric.Backprop.Class.ABP f) instance GHC.Base.MonadPlus f => GHC.Base.MonadPlus (Numeric.Backprop.Class.ABP f) instance GHC.Base.Alternative f => GHC.Base.Alternative (Numeric.Backprop.Class.ABP f) instance GHC.Base.Monad f => GHC.Base.Monad (Numeric.Backprop.Class.ABP f) instance GHC.Base.Applicative f => GHC.Base.Applicative (Numeric.Backprop.Class.ABP f) instance GHC.Base.Functor f => GHC.Base.Functor (Numeric.Backprop.Class.ABP f) instance GHC.Generics.Generic (Numeric.Backprop.Class.ABP f a) instance (Data.Typeable.Internal.Typeable f, Data.Typeable.Internal.Typeable a, Data.Data.Data (f a)) => Data.Data.Data (Numeric.Backprop.Class.ABP f a) instance GHC.Classes.Ord (f a) => GHC.Classes.Ord (Numeric.Backprop.Class.ABP f a) instance GHC.Classes.Eq (f a) => GHC.Classes.Eq (Numeric.Backprop.Class.ABP f a) instance GHC.Read.Read (f a) => GHC.Read.Read (Numeric.Backprop.Class.ABP f a) instance GHC.Show.Show (f a) => GHC.Show.Show (Numeric.Backprop.Class.ABP f a) instance Data.Traversable.Traversable v => Data.Traversable.Traversable (Numeric.Backprop.Class.NumVec v) instance Data.Foldable.Foldable v => Data.Foldable.Foldable (Numeric.Backprop.Class.NumVec v) instance GHC.Base.MonadPlus v => GHC.Base.MonadPlus (Numeric.Backprop.Class.NumVec v) instance GHC.Base.Alternative v => GHC.Base.Alternative (Numeric.Backprop.Class.NumVec v) instance GHC.Base.Monad v => GHC.Base.Monad (Numeric.Backprop.Class.NumVec v) instance GHC.Base.Applicative v => GHC.Base.Applicative (Numeric.Backprop.Class.NumVec v) instance GHC.Base.Functor v => GHC.Base.Functor (Numeric.Backprop.Class.NumVec v) instance GHC.Generics.Generic (Numeric.Backprop.Class.NumVec v a) instance (Data.Typeable.Internal.Typeable v, Data.Typeable.Internal.Typeable a, Data.Data.Data (v a)) => Data.Data.Data (Numeric.Backprop.Class.NumVec v a) instance GHC.Classes.Ord (v a) => GHC.Classes.Ord (Numeric.Backprop.Class.NumVec v a) instance GHC.Classes.Eq (v a) => GHC.Classes.Eq (Numeric.Backprop.Class.NumVec v a) instance GHC.Read.Read (v a) => GHC.Read.Read (Numeric.Backprop.Class.NumVec v a) instance GHC.Show.Show (v a) => GHC.Show.Show (Numeric.Backprop.Class.NumVec v a) instance GHC.Float.Floating a => GHC.Float.Floating (Numeric.Backprop.Class.NumBP a) instance GHC.Real.Fractional a => GHC.Real.Fractional (Numeric.Backprop.Class.NumBP a) instance GHC.Num.Num a => GHC.Num.Num (Numeric.Backprop.Class.NumBP a) instance Data.Traversable.Traversable Numeric.Backprop.Class.NumBP instance Data.Foldable.Foldable Numeric.Backprop.Class.NumBP instance GHC.Base.Functor Numeric.Backprop.Class.NumBP instance GHC.Generics.Generic (Numeric.Backprop.Class.NumBP a) instance Data.Data.Data a => Data.Data.Data (Numeric.Backprop.Class.NumBP a) instance GHC.Classes.Ord a => GHC.Classes.Ord (Numeric.Backprop.Class.NumBP a) instance GHC.Classes.Eq a => GHC.Classes.Eq (Numeric.Backprop.Class.NumBP a) instance GHC.Read.Read a => GHC.Read.Read (Numeric.Backprop.Class.NumBP a) instance GHC.Show.Show a => GHC.Show.Show (Numeric.Backprop.Class.NumBP a) instance GHC.Num.Num a => Numeric.Backprop.Class.Backprop (Numeric.Backprop.Class.NumBP a) instance (Data.Vector.Generic.Base.Vector v a, GHC.Num.Num a) => Numeric.Backprop.Class.Backprop (Numeric.Backprop.Class.NumVec v a) instance (GHC.Base.Applicative f, Numeric.Backprop.Class.Backprop a) => Numeric.Backprop.Class.Backprop (Numeric.Backprop.Class.ABP f a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.GZero (GHC.Generics.K1 i a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.GAdd (GHC.Generics.K1 i a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.GOne (GHC.Generics.K1 i a) instance Numeric.Backprop.Class.Backprop GHC.Types.Int instance Numeric.Backprop.Class.Backprop GHC.Integer.Type.Integer instance Numeric.Backprop.Class.Backprop GHC.Natural.Natural instance Numeric.Backprop.Class.Backprop GHC.Word.Word8 instance Numeric.Backprop.Class.Backprop GHC.Types.Word instance Numeric.Backprop.Class.Backprop GHC.Word.Word16 instance Numeric.Backprop.Class.Backprop GHC.Word.Word32 instance Numeric.Backprop.Class.Backprop GHC.Word.Word64 instance GHC.Real.Integral a => Numeric.Backprop.Class.Backprop (GHC.Real.Ratio a) instance GHC.Float.RealFloat a => Numeric.Backprop.Class.Backprop (Data.Complex.Complex a) instance Numeric.Backprop.Class.Backprop GHC.Types.Float instance Numeric.Backprop.Class.Backprop GHC.Types.Double instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Vector.Vector a) instance (Data.Vector.Unboxed.Base.Unbox a, Numeric.Backprop.Class.Backprop a) => Numeric.Backprop.Class.Backprop (Data.Vector.Unboxed.Base.Vector a) instance (Foreign.Storable.Storable a, Numeric.Backprop.Class.Backprop a) => Numeric.Backprop.Class.Backprop (Data.Vector.Storable.Vector a) instance (Data.Primitive.Types.Prim a, Numeric.Backprop.Class.Backprop a) => Numeric.Backprop.Class.Backprop (Data.Vector.Primitive.Vector a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop [a] instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (GHC.Base.NonEmpty a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Sequence.Internal.Seq a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (GHC.Maybe.Maybe a) instance Numeric.Backprop.Class.Backprop () instance (Numeric.Backprop.Class.Backprop a, Numeric.Backprop.Class.Backprop b) => Numeric.Backprop.Class.Backprop (a, b) instance (Numeric.Backprop.Class.Backprop a, Numeric.Backprop.Class.Backprop b, Numeric.Backprop.Class.Backprop c) => Numeric.Backprop.Class.Backprop (a, b, c) instance (Numeric.Backprop.Class.Backprop a, Numeric.Backprop.Class.Backprop b, Numeric.Backprop.Class.Backprop c, Numeric.Backprop.Class.Backprop d) => Numeric.Backprop.Class.Backprop (a, b, c, d) instance (Numeric.Backprop.Class.Backprop a, Numeric.Backprop.Class.Backprop b, Numeric.Backprop.Class.Backprop c, Numeric.Backprop.Class.Backprop d, Numeric.Backprop.Class.Backprop e) => Numeric.Backprop.Class.Backprop (a, b, c, d, e) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Functor.Identity.Identity a) instance Numeric.Backprop.Class.Backprop (Data.Proxy.Proxy a) instance Numeric.Backprop.Class.Backprop w => Numeric.Backprop.Class.Backprop (Data.Functor.Const.Const w a) instance Numeric.Backprop.Class.Backprop Data.Void.Void instance (Numeric.Backprop.Class.Backprop a, GHC.Classes.Ord k) => Numeric.Backprop.Class.Backprop (Data.Map.Internal.Map k a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.IntMap.Internal.IntMap a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (GHC.Generics.K1 i a p) instance Numeric.Backprop.Class.Backprop (f p) => Numeric.Backprop.Class.Backprop (GHC.Generics.M1 i c f p) instance (Numeric.Backprop.Class.Backprop (f p), Numeric.Backprop.Class.Backprop (g p)) => Numeric.Backprop.Class.Backprop ((GHC.Generics.:*:) f g p) instance Numeric.Backprop.Class.Backprop (GHC.Generics.V1 p) instance Numeric.Backprop.Class.Backprop (GHC.Generics.U1 p) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Semigroup.Internal.Sum a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Semigroup.Internal.Product a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Semigroup.Option a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Semigroup.First a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Semigroup.Last a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Monoid.First a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Monoid.Last a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Semigroup.Internal.Dual a) instance (Numeric.Backprop.Class.Backprop a, Numeric.Backprop.Class.Backprop b) => Numeric.Backprop.Class.Backprop (Data.Semigroup.Arg a b) instance (Numeric.Backprop.Class.Backprop (f a), Numeric.Backprop.Class.Backprop (g a)) => Numeric.Backprop.Class.Backprop (Data.Functor.Product.Product f g a) instance Numeric.Backprop.Class.Backprop (f (g a)) => Numeric.Backprop.Class.Backprop (Data.Functor.Compose.Compose f g a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (r -> a) instance (Numeric.Backprop.Class.Backprop a, GHC.Base.Applicative m) => Numeric.Backprop.Class.Backprop (Control.Arrow.Kleisli m r a) instance (Numeric.Backprop.Class.GOne f, Numeric.Backprop.Class.GOne g) => Numeric.Backprop.Class.GOne (f GHC.Generics.:*: g) instance (Numeric.Backprop.Class.GOne f, Numeric.Backprop.Class.GOne g) => Numeric.Backprop.Class.GOne (f GHC.Generics.:+: g) instance Numeric.Backprop.Class.GOne GHC.Generics.V1 instance Numeric.Backprop.Class.GOne GHC.Generics.U1 instance Numeric.Backprop.Class.GOne f => Numeric.Backprop.Class.GOne (GHC.Generics.M1 i c f) instance Numeric.Backprop.Class.GOne f => Numeric.Backprop.Class.GOne (f GHC.Generics.:.: g) instance (Numeric.Backprop.Class.GAdd f, Numeric.Backprop.Class.GAdd g) => Numeric.Backprop.Class.GAdd (f GHC.Generics.:*: g) instance Numeric.Backprop.Class.GAdd GHC.Generics.V1 instance Numeric.Backprop.Class.GAdd GHC.Generics.U1 instance Numeric.Backprop.Class.GAdd f => Numeric.Backprop.Class.GAdd (GHC.Generics.M1 i c f) instance Numeric.Backprop.Class.GAdd f => Numeric.Backprop.Class.GAdd (f GHC.Generics.:.: g) instance (Numeric.Backprop.Class.GZero f, Numeric.Backprop.Class.GZero g) => Numeric.Backprop.Class.GZero (f GHC.Generics.:*: g) instance (Numeric.Backprop.Class.GZero f, Numeric.Backprop.Class.GZero g) => Numeric.Backprop.Class.GZero (f GHC.Generics.:+: g) instance Numeric.Backprop.Class.GZero GHC.Generics.V1 instance Numeric.Backprop.Class.GZero GHC.Generics.U1 instance Numeric.Backprop.Class.GZero f => Numeric.Backprop.Class.GZero (GHC.Generics.M1 i c f) instance Numeric.Backprop.Class.GZero f => Numeric.Backprop.Class.GZero (f GHC.Generics.:.: g) instance Control.DeepSeq.NFData (f a) => Control.DeepSeq.NFData (Numeric.Backprop.Class.ABP f a) instance (GHC.Base.Applicative f, GHC.Num.Num a) => GHC.Num.Num (Numeric.Backprop.Class.ABP f a) instance (GHC.Base.Applicative f, GHC.Real.Fractional a) => GHC.Real.Fractional (Numeric.Backprop.Class.ABP f a) instance (GHC.Base.Applicative f, GHC.Float.Floating a) => GHC.Float.Floating (Numeric.Backprop.Class.ABP f a) instance Control.DeepSeq.NFData (v a) => Control.DeepSeq.NFData (Numeric.Backprop.Class.NumVec v a) instance Control.DeepSeq.NFData a => Control.DeepSeq.NFData (Numeric.Backprop.Class.NumBP a) instance GHC.Base.Applicative Numeric.Backprop.Class.NumBP instance GHC.Base.Monad Numeric.Backprop.Class.NumBP -- | Provides the Op type and combinators, which represent -- differentiable functions/operations on values, and are used internally -- by the library to perform back-propagation. -- -- Users of the library can ignore this module for the most part. Library -- authors defining backpropagatable primitives for their functions are -- recommend to simply use op0, op1, op2, -- op3, which are re-exported in Numeric.Backprop. However, -- authors who want more options in defining their primtive functions -- might find some of these functions useful. -- -- Note that if your entire function is a single non-branching -- composition of functions, Op and its utility functions alone -- are sufficient to differentiate/backprop. However, this happens rarely -- in practice. -- -- To use these Ops with the backprop library, they can be made to -- work with BVars using liftOp, liftOp1, -- liftOp2, and liftOp3. -- -- If you are writing a library, see -- https://backprop.jle.im/06-equipping-your-library.html for a -- guide for equipping your library with backpropatable operations using -- Ops. -- -- See also this guide for writing Ops manually on your own -- numerical functions. module Numeric.Backprop.Op -- | An Op as a describes a differentiable function from -- as to a. -- -- For example, a value of type -- --
--   Op '[Int, Bool] Double
--   
-- -- is a function from an Int and a Bool, returning a -- Double. It can be differentiated to give a gradient of -- an Int and a Bool if given a total derivative for the -- Double. If we call Bool <math>, then, -- mathematically, it is akin to a: -- -- <math> -- -- See runOp, gradOp, and gradOpWith for examples on -- how to run it, and Op for instructions on creating it. -- -- It is simpler to not use this type constructor directly, and instead -- use the op2, op1, op2, and op3 helper -- smart constructors. -- -- See Numeric.Backprop.Op#prod for a mini-tutorial on using -- Rec and 'Rec Identity'. -- -- To use an Op with the backprop library, see -- liftOp, liftOp1, liftOp2, and -- liftOp3. newtype Op as a -- | Construct an Op by giving a function creating the result, and -- also a continuation on how to create the gradient, given the total -- derivative of a. -- -- See the module documentation for Numeric.Backprop.Op for more -- details on the function that this constructor and Op expect. Op :: (Rec Identity as -> (a, a -> Rec Identity as)) -> Op as a -- | Run the function that the Op encodes, returning a continuation -- to compute the gradient, given the total derivative of a. See -- documentation for Numeric.Backprop.Op for more information. [runOpWith] :: Op as a -> Rec Identity as -> (a, a -> Rec Identity as) -- | A record is parameterized by a universe u, an interpretation -- f and a list of rows rs. The labels or indices of -- the record are given by inhabitants of the kind u; the type -- of values at any label r :: u is given by its interpretation -- f r :: *. data Rec (a :: u -> Type) (b :: [u]) :: forall u. () => u -> Type -> [u] -> Type [RNil] :: forall u (a :: u -> Type) (b :: [u]). () => Rec a ([] :: [u]) [:&] :: forall u (a :: u -> Type) (b :: [u]) (r :: u) (rs :: [u]). () => !a r -> !Rec a rs -> Rec a (r : rs) infixr 7 :& -- | Run the function that an Op encodes, to get the resulting -- output and also its gradient with respect to the inputs. -- --
--   >>> gradOp' (op2 (*)) (3 :& 5 :& RNil)
--   (15, 5 :& 3 :& RNil)
--   
runOp :: Num a => Op as a -> Rec Identity as -> (a, Rec Identity as) -- | Run the function that an Op encodes, to get the result. -- --
--   >>> runOp (op2 (*)) (3 :& 5 :& RNil)
--   15
--   
evalOp :: Op as a -> Rec Identity as -> a -- | Run the function that an Op encodes, and get the gradient of -- the output with respect to the inputs. -- --
--   >>> gradOp (op2 (*)) (3 :& 5 :& RNil)
--   5 :& 3 :& RNil
--   -- the gradient of x*y is (y, x)
--   
-- --
--   gradOp o xs = gradOpWith o xs 1
--   
gradOp :: Num a => Op as a -> Rec Identity as -> Rec Identity as -- | Get the gradient function that an Op encodes, with a third -- argument expecting the total derivative of the result. -- -- See the module documentaiton for Numeric.Backprop.Op for more -- information. gradOpWith :: Op as a -> Rec Identity as -> a -> Rec Identity as -- | Create an Op that takes no inputs and always returns the given -- value. -- -- There is no gradient, of course (using gradOp will give you an -- empty tuple), because there is no input to have a gradient of. -- --
--   >>> runOp (op0 10) RNil
--   (10, RNil)
--   
-- -- For a constant Op that takes input and ignores it, see -- opConst and opConst'. op0 :: a -> Op '[] a -- | An Op that ignores all of its inputs and returns a given -- constant value. -- --
--   >>> gradOp' (opConst 10) (1 :& 2 :& 3 :& RNil)
--   (10, 0 :& 0 :& 0 :& RNil)
--   
opConst :: forall as a. RPureConstrained Num as => a -> Op as a -- | An Op that just returns whatever it receives. The identity -- function. -- --
--   idOp = opIso id id
--   
idOp :: Op '[a] a -- | An Op that extracts a value from an input value using a -- Lens'. -- -- Warning: This is unsafe! It assumes that it extracts a specific value -- unchanged, with derivative 1, so will break for things that -- numerically manipulate things before returning them. opLens :: Num a => Lens' a b -> Op '[a] b -- | Create an Op of a function taking one input, by giving its -- explicit derivative. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the derivative <math>, it would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that squares its input: -- --
--   square :: Num a => Op '[a] a
--   square = op1 $ \x -> (x*x, \d -> 2 * d * x
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op1 :: (a -> (b, b -> a)) -> Op '[a] b -- | Create an Op of a function taking two inputs, by giving its -- explicit gradient. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the gradient <math> would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that multiplies its inputs: -- --
--   mul :: Num a => Op '[a, a] a
--   mul = op2' $ \x y -> (x*y, \d -> (d*y, x*d)
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c -- | Create an Op of a function taking three inputs, by giving its -- explicit gradient. See documentation for op2 for more details. op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d -- | An Op that coerces an item into another item whose type has the -- same runtime representation. -- --
--   >>> gradOp' opCoerce (Identity 5) :: (Int, Identity Int)
--   (5, Identity 1)
--   
-- --
--   opCoerce = opIso coerced coerce
--   
opCoerce :: Coercible a b => Op '[a] b -- | An Op that takes as and returns exactly the input -- tuple. -- --
--   >>> gradOp' opTup (1 :& 2 :& 3 :& RNil)
--   (1 :& 2 :& 3 :& RNil, 1 :& 1 :& 1 :& RNil)
--   
opTup :: Op as (Rec Identity as) -- | An Op that runs the input value through an isomorphism. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. opIso :: (a -> b) -> (b -> a) -> Op '[a] b -- | An Op that runs the two input values through an isomorphism. -- Useful for things like constructors. See opIso for caveats. opIso2 :: (a -> b -> c) -> (c -> (a, b)) -> Op '[a, b] c -- | An Op that runs the three input values through an isomorphism. -- Useful for things like constructors. See opIso for caveats. opIso3 :: (a -> b -> c -> d) -> (d -> (a, b, c)) -> Op '[a, b, c] d -- | An Op that runs the input value through an isomorphism between -- a tuple of values and a value. See opIso for caveats. -- -- In Numeric.Backprop.Op since version 0.1.2.0, but only exported -- from Numeric.Backprop since version 0.1.3.0. opIsoN :: (Rec Identity as -> b) -> (b -> Rec Identity as) -> Op as b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp1, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad1 :: (a -> b) -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad :: (Rec Identity as -> b) -> Op as b -- | Compose Ops together, like sequence for functions, or -- liftAN. -- -- That is, given an Op as b1, an Op as -- b2, and an Op as b3, it can compose them with an -- Op '[b1,b2,b3] c to create an Op as c. composeOp :: forall as bs c. RPureConstrained Num as => Rec (Op as) bs -> Op bs c -> Op as c -- | Convenient wrapper over composeOp for the case where the second -- function only takes one input, so the two Ops can be directly -- piped together, like for .. composeOp1 :: RPureConstrained Num as => Op as b -> Op '[b] c -> Op as c -- | Convenient infix synonym for (flipped) composeOp1. Meant to be -- used just like .: -- --
--   f :: Op '[b]   c
--   g :: Op '[a,a] b
--   
--   f ~. g :: Op '[a, a] c
--   
(~.) :: RPureConstrained Num as => Op '[b] c -> Op as b -> Op as c infixr 9 ~. -- | Op for addition (+.) :: Num a => Op '[a, a] a -- | Op for subtraction (-.) :: Num a => Op '[a, a] a -- | Op for multiplication (*.) :: Num a => Op '[a, a] a -- | Op for negation negateOp :: Num a => Op '[a] a -- | Op for absolute value absOp :: Num a => Op '[a] a -- | Op for signum signumOp :: Num a => Op '[a] a -- | Op for division (/.) :: Fractional a => Op '[a, a] a -- | Op for multiplicative inverse recipOp :: Fractional a => Op '[a] a -- | Op for exp expOp :: Floating a => Op '[a] a -- | Op for the natural logarithm logOp :: Floating a => Op '[a] a -- | Op for square root sqrtOp :: Floating a => Op '[a] a -- | Op for exponentiation (**.) :: Floating a => Op '[a, a] a -- | Op for logBase logBaseOp :: Floating a => Op '[a, a] a -- | Op for sine sinOp :: Floating a => Op '[a] a -- | Op for cosine cosOp :: Floating a => Op '[a] a -- | Op for tangent tanOp :: Floating a => Op '[a] a -- | Op for arcsine asinOp :: Floating a => Op '[a] a -- | Op for arccosine acosOp :: Floating a => Op '[a] a -- | Op for arctangent atanOp :: Floating a => Op '[a] a -- | Op for hyperbolic sine sinhOp :: Floating a => Op '[a] a -- | Op for hyperbolic cosine coshOp :: Floating a => Op '[a] a -- | Op for hyperbolic tangent tanhOp :: Floating a => Op '[a] a -- | Op for hyperbolic arcsine asinhOp :: Floating a => Op '[a] a -- | Op for hyperbolic arccosine acoshOp :: Floating a => Op '[a] a -- | Op for hyperbolic arctangent atanhOp :: Floating a => Op '[a] a instance (Data.Vinyl.Core.RPureConstrained GHC.Num.Num as, GHC.Num.Num a) => GHC.Num.Num (Numeric.Backprop.Op.Op as a) instance (Data.Vinyl.Core.RPureConstrained GHC.Num.Num as, GHC.Real.Fractional a) => GHC.Real.Fractional (Numeric.Backprop.Op.Op as a) instance (Data.Vinyl.Core.RPureConstrained GHC.Num.Num as, GHC.Float.Floating a) => GHC.Float.Floating (Numeric.Backprop.Op.Op as a) -- | Provides "explicit" versions of all of the functions in -- Numeric.Backprop. Instead of relying on a Backprop -- instance, allows you to manually provide zero, add, and -- one on a per-value basis. -- -- It is recommended you use Numeric.Backprop or -- Numeric.Backprop.Num instead, unless your type has no -- Num instance, or you else you want to avoid defining orphan -- Backprop instances for external types. Can also be useful if -- mixing and matching styles. -- -- See Numeric.Backprop for fuller documentation on using these -- functions. -- -- WARNING: API of this module can be considered only "semi-stable"; -- while the API of Numeric.Backprop and -- Numeric.Backprop.Num are kept consistent, some argument order -- changes might happen in this module to reflect changes in underlying -- implementation. module Numeric.Backprop.Explicit -- | A BVar s a is a value of type a that can be -- "backpropagated". -- -- Functions referring to BVars are tracked by the library and can -- be automatically differentiated to get their gradients and results. -- -- For simple numeric values, you can use its Num, -- Fractional, and Floating instances to manipulate them as -- if they were the numbers they represent. -- -- If a contains items, the items can be accessed and extracted -- using lenses. A Lens' b a can be used to access an -- a inside a b, using ^^. (viewVar): -- --
--   (^.)  ::        a -> Lens' a b ->        b
--   (^^.) :: BVar s a -> Lens' a b -> BVar s b
--   
-- -- There is also ^^? (previewVar), to use a -- Prism' or Traversal' to extract a target that may or -- may not be present (which can implement pattern matching), -- ^^.. (toListOfVar) to use a Traversal' to -- extract all targets inside a BVar, and .~~ -- (setVar) to set and update values inside a BVar. -- -- If you have control over your data type definitions, you can also use -- splitBV and joinBV to manipulate data types by easily -- extracting fields out of a BVar of data types and creating -- BVars of data types out of BVars of their fields. See -- Numeric.Backprop#hkd for a tutorial on this use pattern. -- -- For more complex operations, libraries can provide functions on -- BVars using liftOp and related functions. This is how -- you can create primitive functions that users can use to manipulate -- your library's values. See -- https://backprop.jle.im/08-equipping-your-library.html for a -- detailed guide. -- -- For example, the hmatrix library has a matrix-vector -- multiplication function, #> :: L m n -> R n -> L m. -- -- A library could instead provide a function #> :: BVar (L -- m n) -> BVar (R n) -> BVar (R m), which the user can then -- use to manipulate their BVars of L m ns and R -- ns, etc. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. data BVar s a -- | An ephemeral Wengert Tape in the environment. Used internally to track -- of the computational graph of variables. -- -- For the end user, one can just imagine Reifies s -- W as a required constraint on s that allows -- backpropagation to work. data W -- | Class of values that can be backpropagated in general. -- -- For instances of Num, these methods can be given by -- zeroNum, addNum, and oneNum. There are also -- generic options given in Numeric.Backprop.Class for functors, -- IsList instances, and Generic instances. -- --
--   instance Backprop Double where
--       zero = zeroNum
--       add = addNum
--       one = oneNum
--   
-- -- If you leave the body of an instance declaration blank, GHC Generics -- will be used to derive instances if the type has a single constructor -- and each field is an instance of Backprop. -- -- To ensure that backpropagation works in a sound way, should obey the -- laws: -- -- -- -- -- -- Also implies preservation of information, making zipWith -- (+) an illegal implementation for lists and vectors. -- -- This is only expected to be true up to potential "extra zeroes" in -- x and y in the result. -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- Note that not all values in the backpropagation process needs all of -- these methods: Only the "final result" needs one, for example. -- These are all grouped under one typeclass for convenience in defining -- instances, and also to talk about sensible laws. For fine-grained -- control, use the "explicit" versions of library functions (for -- example, in Numeric.Backprop.Explicit) instead of -- Backprop based ones. -- -- This typeclass replaces the reliance on Num of the previous API -- (v0.1). Num is strictly more powerful than Backprop, and -- is a stronger constraint on types than is necessary for proper -- backpropagating. In particular, fromInteger is a problem for -- many types, preventing useful backpropagation for lists, -- variable-length vectors (like Data.Vector) and variable-size -- matrices from linear algebra libraries like hmatrix and -- accelerate. class Backprop a -- | "Zero out" all components of a value. For scalar values, this should -- just be const 0. For vectors and matrices, this should -- set all components to zero, the additive identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See zeroNum for a pre-built definition for instances of -- Num and zeroFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericZero, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. zero :: Backprop a => a -> a -- | Add together two values of a type. To combine contributions of -- gradients, so should be information-preserving: -- -- -- -- Should be as strict as possible. This behavior is observed for -- all instances provided by this library. -- -- See addNum for a pre-built definition for instances of -- Num and addIsList for a definition for instances of -- IsList. If left blank, will automatically be genericAdd, -- a pre-built definition for instances of Generic with one -- constructor whose fields are all themselves instances of -- Backprop. add :: Backprop a => a -> a -> a -- | One all components of a value. For scalar values, this should -- just be const 1. For vectors and matrices, this should -- set all components to one, the multiplicative identity. -- -- As the library uses it, the most important law is: -- -- -- -- That is, one x is the gradient of the identity -- function with respect to its input. -- -- Ideally should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See oneNum for a pre-built definition for instances of -- Num and oneFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericOne, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. one :: Backprop a => a -> a -- | "Zero out" all components of a value. For scalar values, this should -- just be const 0. For vectors and matrices, this should -- set all components to zero, the additive identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See zeroNum for a pre-built definition for instances of -- Num and zeroFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericZero, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. zero :: (Backprop a, Generic a, GZero (Rep a)) => a -> a -- | Add together two values of a type. To combine contributions of -- gradients, so should be information-preserving: -- -- -- -- Should be as strict as possible. This behavior is observed for -- all instances provided by this library. -- -- See addNum for a pre-built definition for instances of -- Num and addIsList for a definition for instances of -- IsList. If left blank, will automatically be genericAdd, -- a pre-built definition for instances of Generic with one -- constructor whose fields are all themselves instances of -- Backprop. add :: (Backprop a, Generic a, GAdd (Rep a)) => a -> a -> a -- | One all components of a value. For scalar values, this should -- just be const 1. For vectors and matrices, this should -- set all components to one, the multiplicative identity. -- -- As the library uses it, the most important law is: -- -- -- -- That is, one x is the gradient of the identity -- function with respect to its input. -- -- Ideally should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See oneNum for a pre-built definition for instances of -- Num and oneFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericOne, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. one :: (Backprop a, Generic a, GOne (Rep a)) => a -> a -- | A newtype wrapper over an f a for Applicative -- f that gives a free Backprop instance (as well as -- Num etc. instances). -- -- Useful for performing backpropagation over functions that require some -- monadic context (like IO) to perform. newtype ABP f a ABP :: f a -> ABP f a [runABP] :: ABP f a -> f a -- | A newtype wrapper over an instance of Num that gives a free -- Backprop instance. -- -- Useful for things like DerivingVia, or for avoiding orphan -- instances. newtype NumBP a NumBP :: a -> NumBP a [runNumBP] :: NumBP a -> a -- | "Zero out" all components of a value. For scalar values, this should -- just be const 0. For vectors and matrices, this should -- set all components to zero, the additive identity. -- -- Should be idempotent: Applying the function twice is the same as -- applying it just once. -- -- Each type should ideally only have one ZeroFunc. This coherence -- constraint is given by the typeclass Backprop. newtype ZeroFunc a ZF :: (a -> a) -> ZeroFunc a [runZF] :: ZeroFunc a -> a -> a -- | If a type has a Num instance, this is the canonical -- ZeroFunc. zfNum :: Num a => ZeroFunc a -- | ZeroFuncs for every item in a type level list based on their -- Num instances zfNums :: RPureConstrained Num as => Rec ZeroFunc as -- | The canonical ZeroFunc for instances of Backprop. zeroFunc :: Backprop a => ZeroFunc a -- | Generate an ZeroFunc for every type in a type-level list, if -- every type has an instance of Backprop. zeroFuncs :: RPureConstrained Backprop as => Rec ZeroFunc as -- | zeroFunc for instances of Functor zfFunctor :: (Backprop a, Functor f) => ZeroFunc (f a) -- | Add together two values of a type. To combine contributions of -- gradients, so should ideally be information-preserving. -- -- See laws for Backprop for the laws this should be expected to -- preserve. Namely, it should be commutative and associative, with an -- identity for a valid ZeroFunc. -- -- Each type should ideally only have one AddFunc. This coherence -- constraint is given by the typeclass Backprop. newtype AddFunc a AF :: (a -> a -> a) -> AddFunc a [runAF] :: AddFunc a -> a -> a -> a -- | If a type has a Num instance, this is the canonical -- AddFunc. afNum :: Num a => AddFunc a -- | ZeroFuncs for every item in a type level list based on their -- Num instances afNums :: RPureConstrained Num as => Rec AddFunc as -- | The canonical AddFunc for instances of Backprop. addFunc :: Backprop a => AddFunc a -- | Generate an AddFunc for every type in a type-level list, if -- every type has an instance of Backprop. addFuncs :: RPureConstrained Backprop as => Rec AddFunc as -- | One all components of a value. For scalar values, this should -- just be const 1. For vectors and matrices, this should -- set all components to one, the multiplicative identity. -- -- Should be idempotent: Applying the function twice is the same as -- applying it just once. -- -- Each type should ideally only have one OneFunc. This coherence -- constraint is given by the typeclass Backprop. newtype OneFunc a OF :: (a -> a) -> OneFunc a [runOF] :: OneFunc a -> a -> a -- | If a type has a Num instance, this is the canonical -- OneFunc. ofNum :: Num a => OneFunc a -- | ZeroFuncs for every item in a type level list based on their -- Num instances ofNums :: RPureConstrained Num as => Rec OneFunc as -- | The canonical OneFunc for instances of Backprop. oneFunc :: Backprop a => OneFunc a -- | Generate an OneFunc for every type in a type-level list, if -- every type has an instance of Backprop. oneFuncs :: RPureConstrained Backprop as => Rec OneFunc as -- | OneFunc for instances of Functor ofFunctor :: (Backprop a, Functor f) => OneFunc (f a) -- | backprop, but with explicit zero and one. backprop :: ZeroFunc a -> OneFunc b -> (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, a) -- | Turn a function BVar s a -> BVar s b into -- the function a -> b that it represents. -- -- Benchmarks show that this should have virtually no overhead over -- directly writing a a -> b. BVar is, in this -- situation, a zero-cost abstraction, performance-wise. -- -- See documentation of backprop for more information. evalBP :: (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> b -- | gradBP, but with explicit zero and one. gradBP :: ZeroFunc a -> OneFunc b -> (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> a -- | backpropWith, but with explicit zero. -- -- Note that argument order changed in v0.2.4. backpropWith :: ZeroFunc a -> (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, b -> a) -- | evalBP but with no arguments. Useful when everything is just -- given through constVar. evalBP0 :: (forall s. Reifies s W => BVar s a) -> a -- | backprop2, but with explicit zero and one. backprop2 :: ZeroFunc a -> ZeroFunc b -> OneFunc c -> (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, (a, b)) -- | evalBP for a two-argument function. See backprop2 for -- notes. evalBP2 :: (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> c -- | gradBP2 with explicit zero and one. gradBP2 :: ZeroFunc a -> ZeroFunc b -> OneFunc c -> (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (a, b) -- | backpropWith2, but with explicit zero. -- -- Note that argument order changed in v0.2.4. backpropWith2 :: ZeroFunc a -> ZeroFunc b -> (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, c -> (a, b)) -- | backpropN, but with explicit zero and one. backpropN :: forall as b. () => Rec ZeroFunc as -> OneFunc b -> (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Rec Identity as -> (b, Rec Identity as) -- | evalBP generalized to multiple inputs of different types. See -- documentation for backpropN for more details. evalBPN :: forall as b. () => (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Rec Identity as -> b -- | gradBP, Nbut with explicit zero and one. gradBPN :: Rec ZeroFunc as -> OneFunc b -> (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Rec Identity as -> Rec Identity as -- | backpropWithN, but with explicit zero and one. -- -- Note that argument order changed in v0.2.4. backpropWithN :: forall as b. () => Rec ZeroFunc as -> (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Rec Identity as -> (b, b -> Rec Identity as) -- | Build a record whose elements are derived solely from a constraint -- satisfied by each. class RPureConstrained (c :: u -> Constraint) (ts :: [u]) -- | Lift a value into a BVar representing a constant value. -- -- This value will not be considered an input, and its gradients will not -- be backpropagated. constVar :: a -> BVar s a -- | Shorter alias for constVar, inspired by the ad library. auto :: a -> BVar s a -- | Coerce a BVar contents. Useful for things like newtype -- wrappers. coerceVar :: Coercible a b => BVar s a -> BVar s b -- | viewVar, but with explicit add and zero. viewVar :: forall a b s. Reifies s W => AddFunc a -> ZeroFunc b -> Lens' b a -> BVar s b -> BVar s a -- | setVar, but with explicit add and zero. setVar :: forall a b s. Reifies s W => AddFunc a -> AddFunc b -> ZeroFunc a -> Lens' b a -> BVar s a -> BVar s b -> BVar s b -- | overVar with explicit add and zero. overVar :: Reifies s W => AddFunc a -> AddFunc b -> ZeroFunc a -> ZeroFunc b -> Lens' b a -> (BVar s a -> BVar s a) -> BVar s b -> BVar s b -- | sequenceVar, but with explicit add and zero. sequenceVar :: forall t a s. (Reifies s W, Traversable t) => AddFunc a -> ZeroFunc a -> BVar s (t a) -> t (BVar s a) -- | collectVar, but with explicit add and zero. collectVar :: forall t a s. (Reifies s W, Foldable t, Functor t) => AddFunc a -> ZeroFunc a -> t (BVar s a) -> BVar s (t a) -- | previewVar, but with explicit add and zero. previewVar :: forall b a s. Reifies s W => AddFunc a -> ZeroFunc b -> Traversal' b a -> BVar s b -> Maybe (BVar s a) -- | toListOfVar, but with explicit add and zero. toListOfVar :: forall b a s. Reifies s W => AddFunc a -> ZeroFunc b -> Traversal' b a -> BVar s b -> [BVar s a] -- | isoVar with explicit add and zero. isoVar :: Reifies s W => AddFunc a -> (a -> b) -> (b -> a) -> BVar s a -> BVar s b -- | isoVar2 with explicit add and zero. isoVar2 :: Reifies s W => AddFunc a -> AddFunc b -> (a -> b -> c) -> (c -> (a, b)) -> BVar s a -> BVar s b -> BVar s c -- | isoVar3 with explicit add and zero. isoVar3 :: Reifies s W => AddFunc a -> AddFunc b -> AddFunc c -> (a -> b -> c -> d) -> (d -> (a, b, c)) -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | isoVarN with explicit add and zero. isoVarN :: Reifies s W => Rec AddFunc as -> (Rec Identity as -> b) -> (b -> Rec Identity as) -> Rec (BVar s) as -> BVar s b -- | liftOp, but with explicit add and zero. liftOp :: forall as b s. Reifies s W => Rec AddFunc as -> Op as b -> Rec (BVar s) as -> BVar s b -- | liftOp1, but with explicit add and zero. liftOp1 :: forall a b s. Reifies s W => AddFunc a -> Op '[a] b -> BVar s a -> BVar s b -- | liftOp2, but with explicit add and zero. liftOp2 :: forall a b c s. Reifies s W => AddFunc a -> AddFunc b -> Op '[a, b] c -> BVar s a -> BVar s b -> BVar s c -- | liftOp3, but with explicit add and zero. liftOp3 :: forall a b c d s. Reifies s W => AddFunc a -> AddFunc b -> AddFunc c -> Op '[a, b, c] d -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | splitBV with explicit add and zero. splitBV :: forall z f s as. (Generic (z f), Generic (z (BVar s)), BVGroup s as (Rep (z f)) (Rep (z (BVar s))), Reifies s W) => AddFunc (Rep (z f) ()) -> Rec AddFunc as -> ZeroFunc (z f) -> Rec ZeroFunc as -> BVar s (z f) -> z (BVar s) -- | joinBV with explicit add and zero. joinBV :: forall z f s as. (Generic (z f), Generic (z (BVar s)), BVGroup s as (Rep (z f)) (Rep (z (BVar s))), Reifies s W) => AddFunc (z f) -> Rec AddFunc as -> ZeroFunc (Rep (z f) ()) -> Rec ZeroFunc as -> z (BVar s) -> BVar s (z f) -- | Helper class for generically "splitting" and "joining" BVars -- into constructors. See splitBV and joinBV. -- -- See Numeric.Backprop#hkd for a tutorial on how to use this. -- -- Instances should be available for types made with one constructor -- whose fields are all instances of Backprop, with a -- Generic instance. class BVGroup s as i o | o -> i, i -> as -- | An Op as a describes a differentiable function from -- as to a. -- -- For example, a value of type -- --
--   Op '[Int, Bool] Double
--   
-- -- is a function from an Int and a Bool, returning a -- Double. It can be differentiated to give a gradient of -- an Int and a Bool if given a total derivative for the -- Double. If we call Bool <math>, then, -- mathematically, it is akin to a: -- -- <math> -- -- See runOp, gradOp, and gradOpWith for examples on -- how to run it, and Op for instructions on creating it. -- -- It is simpler to not use this type constructor directly, and instead -- use the op2, op1, op2, and op3 helper -- smart constructors. -- -- See Numeric.Backprop.Op#prod for a mini-tutorial on using -- Rec and 'Rec Identity'. -- -- To use an Op with the backprop library, see -- liftOp, liftOp1, liftOp2, and -- liftOp3. newtype Op as a -- | Construct an Op by giving a function creating the result, and -- also a continuation on how to create the gradient, given the total -- derivative of a. -- -- See the module documentation for Numeric.Backprop.Op for more -- details on the function that this constructor and Op expect. Op :: (Rec Identity as -> (a, a -> Rec Identity as)) -> Op as a -- | Run the function that the Op encodes, returning a continuation -- to compute the gradient, given the total derivative of a. See -- documentation for Numeric.Backprop.Op for more information. [runOpWith] :: Op as a -> Rec Identity as -> (a, a -> Rec Identity as) -- | Create an Op that takes no inputs and always returns the given -- value. -- -- There is no gradient, of course (using gradOp will give you an -- empty tuple), because there is no input to have a gradient of. -- --
--   >>> runOp (op0 10) RNil
--   (10, RNil)
--   
-- -- For a constant Op that takes input and ignores it, see -- opConst and opConst'. op0 :: a -> Op '[] a -- | An Op that ignores all of its inputs and returns a given -- constant value. -- --
--   >>> gradOp' (opConst 10) (1 :& 2 :& 3 :& RNil)
--   (10, 0 :& 0 :& 0 :& RNil)
--   
opConst :: forall as a. RPureConstrained Num as => a -> Op as a -- | An Op that just returns whatever it receives. The identity -- function. -- --
--   idOp = opIso id id
--   
idOp :: Op '[a] a -- | bpOp with explicit zero. bpOp :: Rec ZeroFunc as -> (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Op as b -- | Create an Op of a function taking one input, by giving its -- explicit derivative. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the derivative <math>, it would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that squares its input: -- --
--   square :: Num a => Op '[a] a
--   square = op1 $ \x -> (x*x, \d -> 2 * d * x
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op1 :: (a -> (b, b -> a)) -> Op '[a] b -- | Create an Op of a function taking two inputs, by giving its -- explicit gradient. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the gradient <math> would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that multiplies its inputs: -- --
--   mul :: Num a => Op '[a, a] a
--   mul = op2' $ \x y -> (x*y, \d -> (d*y, x*d)
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c -- | Create an Op of a function taking three inputs, by giving its -- explicit gradient. See documentation for op2 for more details. op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d -- | An Op that coerces an item into another item whose type has the -- same runtime representation. -- --
--   >>> gradOp' opCoerce (Identity 5) :: (Int, Identity Int)
--   (5, Identity 1)
--   
-- --
--   opCoerce = opIso coerced coerce
--   
opCoerce :: Coercible a b => Op '[a] b -- | An Op that takes as and returns exactly the input -- tuple. -- --
--   >>> gradOp' opTup (1 :& 2 :& 3 :& RNil)
--   (1 :& 2 :& 3 :& RNil, 1 :& 1 :& 1 :& RNil)
--   
opTup :: Op as (Rec Identity as) -- | An Op that runs the input value through an isomorphism. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. opIso :: (a -> b) -> (b -> a) -> Op '[a] b -- | An Op that runs the input value through an isomorphism between -- a tuple of values and a value. See opIso for caveats. -- -- In Numeric.Backprop.Op since version 0.1.2.0, but only exported -- from Numeric.Backprop since version 0.1.3.0. opIsoN :: (Rec Identity as -> b) -> (b -> Rec Identity as) -> Op as b -- | An Op that extracts a value from an input value using a -- Lens'. -- -- Warning: This is unsafe! It assumes that it extracts a specific value -- unchanged, with derivative 1, so will break for things that -- numerically manipulate things before returning them. opLens :: Num a => Lens' a b -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp1, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad1 :: (a -> b) -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad :: (Rec Identity as -> b) -> Op as b class Reifies (s :: k) a | s -> a instance Numeric.Backprop.Explicit.BVGroup s '[] (GHC.Generics.K1 i a) (GHC.Generics.K1 i (Numeric.Backprop.Internal.BVar s a)) instance Numeric.Backprop.Explicit.BVGroup s as i o => Numeric.Backprop.Explicit.BVGroup s as (GHC.Generics.M1 p c i) (GHC.Generics.M1 p c o) instance Numeric.Backprop.Explicit.BVGroup s '[] GHC.Generics.V1 GHC.Generics.V1 instance Numeric.Backprop.Explicit.BVGroup s '[] GHC.Generics.U1 GHC.Generics.U1 instance (Data.Reflection.Reifies s Numeric.Backprop.Internal.W, Numeric.Backprop.Explicit.BVGroup s as i1 o1, Numeric.Backprop.Explicit.BVGroup s bs i2 o2, cs Data.Type.Equality.~ (as Data.Vinyl.TypeLevel.++ bs), Data.Vinyl.Core.RecApplicative as) => Numeric.Backprop.Explicit.BVGroup s (i1 () : i2 () : cs) (i1 GHC.Generics.:*: i2) (o1 GHC.Generics.:*: o2) instance (Data.Reflection.Reifies s Numeric.Backprop.Internal.W, Numeric.Backprop.Explicit.BVGroup s as i1 o1, Numeric.Backprop.Explicit.BVGroup s bs i2 o2, cs Data.Type.Equality.~ (as Data.Vinyl.TypeLevel.++ bs), Data.Vinyl.Core.RecApplicative as) => Numeric.Backprop.Explicit.BVGroup s (i1 () : i2 () : cs) (i1 GHC.Generics.:+: i2) (o1 GHC.Generics.:+: o2) -- | Provides the exact same API as Numeric.Backprop, except -- requiring Num instances for all types involved instead of -- Backprop instances. -- -- This was the original API of the library (for version 0.1). -- -- Num is strictly more powerful than Backprop, and is a -- stronger constraint on types than is necessary for proper -- backpropagating. In particular, fromInteger is a problem for -- many types, preventing useful backpropagation for lists, -- variable-length vectors (like Data.Vector) and variable-size -- matrices from linear algebra libraries like hmatrix and -- accelerate. -- -- However, this module might be useful in situations where you are -- working with external types with Num instances, and you want to -- avoid writing orphan instances for external types. -- -- If you have external types that are not Num instances, consider -- instead Numeric.Backprop.External. -- -- If you need a Num instance for tuples, you can use the orphan -- instances in the -- <https://hackage.haskell.org/package/NumInstances -- NumInstances> package (in particular, -- Data.NumInstances.Tuple) if you are writing an application and -- do not have to worry about orphan instances. -- -- See Numeric.Backprop for fuller documentation on using these -- functions. module Numeric.Backprop.Num -- | A BVar s a is a value of type a that can be -- "backpropagated". -- -- Functions referring to BVars are tracked by the library and can -- be automatically differentiated to get their gradients and results. -- -- For simple numeric values, you can use its Num, -- Fractional, and Floating instances to manipulate them as -- if they were the numbers they represent. -- -- If a contains items, the items can be accessed and extracted -- using lenses. A Lens' b a can be used to access an -- a inside a b, using ^^. (viewVar): -- --
--   (^.)  ::        a -> Lens' a b ->        b
--   (^^.) :: BVar s a -> Lens' a b -> BVar s b
--   
-- -- There is also ^^? (previewVar), to use a -- Prism' or Traversal' to extract a target that may or -- may not be present (which can implement pattern matching), -- ^^.. (toListOfVar) to use a Traversal' to -- extract all targets inside a BVar, and .~~ -- (setVar) to set and update values inside a BVar. -- -- If you have control over your data type definitions, you can also use -- splitBV and joinBV to manipulate data types by easily -- extracting fields out of a BVar of data types and creating -- BVars of data types out of BVars of their fields. See -- Numeric.Backprop#hkd for a tutorial on this use pattern. -- -- For more complex operations, libraries can provide functions on -- BVars using liftOp and related functions. This is how -- you can create primitive functions that users can use to manipulate -- your library's values. See -- https://backprop.jle.im/08-equipping-your-library.html for a -- detailed guide. -- -- For example, the hmatrix library has a matrix-vector -- multiplication function, #> :: L m n -> R n -> L m. -- -- A library could instead provide a function #> :: BVar (L -- m n) -> BVar (R n) -> BVar (R m), which the user can then -- use to manipulate their BVars of L m ns and R -- ns, etc. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. data BVar s a -- | An ephemeral Wengert Tape in the environment. Used internally to track -- of the computational graph of variables. -- -- For the end user, one can just imagine Reifies s -- W as a required constraint on s that allows -- backpropagation to work. data W -- | backprop, but with Num constraints instead of -- Backprop constraints. -- -- See module documentation for Numeric.Backprop.Num for -- information on using this with tuples. backprop :: (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, a) -- | Turn a function BVar s a -> BVar s b into -- the function a -> b that it represents. -- -- Benchmarks show that this should have virtually no overhead over -- directly writing a a -> b. BVar is, in this -- situation, a zero-cost abstraction, performance-wise. -- -- See documentation of backprop for more information. evalBP :: (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> b -- | gradBP, but with Num constraints instead of -- Backprop constraints. gradBP :: (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> a -- | backpropWith, but with Num constraints instead of -- Backprop constraints. -- -- See module documentation for Numeric.Backprop.Num for -- information on using this with tuples. -- -- Note that argument order changed in v0.2.4. backpropWith :: Num a => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, b -> a) -- | evalBP but with no arguments. Useful when everything is just -- given through constVar. evalBP0 :: (forall s. Reifies s W => BVar s a) -> a -- | backprop2, but with Num constraints instead of -- Backprop constraints. backprop2 :: (Num a, Num b, Num c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, (a, b)) -- | evalBP for a two-argument function. See backprop2 for -- notes. evalBP2 :: (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> c -- | gradBP2, but with Num constraints instead of -- Backprop constraints. gradBP2 :: (Num a, Num b, Num c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (a, b) -- | backpropWith2, but with Num constraints instead of -- Backprop constraints. -- -- Note that argument order changed in v0.2.4. backpropWith2 :: (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, c -> (a, b)) -- | backpropN, but with Num constraints instead of -- Backprop constraints. -- -- The RPureConstrained Num as in the constraint -- says that every value in the type-level list as must have a -- Num instance. This means you can use, say, '[Double, Float, -- Int], but not '[Double, Bool, String]. -- -- If you stick to concerete, monomorphic usage of this (with -- specific types, typed into source code, known at compile-time), then -- AllPureConstrained Num as should be fulfilled -- automatically. backpropN :: (RPureConstrained Num as, Num b) => (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Rec Identity as -> (b, Rec Identity as) -- | evalBP generalized to multiple inputs of different types. See -- documentation for backpropN for more details. evalBPN :: forall as b. () => (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Rec Identity as -> b -- | gradBPN, but with Num constraints instead of -- Backprop constraints. gradBPN :: (RPureConstrained Num as, Num b) => (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Rec Identity as -> Rec Identity as -- | backpropWithN, but with Num constraints instead of -- Backprop constraints. -- -- See backpropN for information on the AllConstrained -- constraint. -- -- Note that argument order changed in v0.2.4. backpropWithN :: RPureConstrained Num as => (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Rec Identity as -> (b, b -> Rec Identity as) -- | Lift a value into a BVar representing a constant value. -- -- This value will not be considered an input, and its gradients will not -- be backpropagated. constVar :: a -> BVar s a -- | Shorter alias for constVar, inspired by the ad library. auto :: a -> BVar s a -- | Coerce a BVar contents. Useful for things like newtype -- wrappers. coerceVar :: Coercible a b => BVar s a -> BVar s b -- | ^^., but with Num constraints instead of -- Backprop constraints. (^^.) :: forall b a s. (Num a, Num b, Reifies s W) => BVar s b -> Lens' b a -> BVar s a infixl 8 ^^. -- | .~~, but with Num constraints instead of -- Backprop constraints. (.~~) :: (Num a, Num b, Reifies s W) => Lens' b a -> BVar s a -> BVar s b -> BVar s b infixl 8 .~~ -- | %~~, but with Num constraints instead of -- Backprop constraints. (%~~) :: (Num a, Num b, Reifies s W) => Lens' b a -> (BVar s a -> BVar s a) -> BVar s b -> BVar s b infixr 4 %~~ -- | ^^?, but with Num constraints instead of -- Backprop constraints. -- -- Note that many automatically-generated prisms by the lens -- package use tuples, which cannot work this this by default (because -- tuples do not have a Num instance). -- -- If you are writing an application or don't have to worry about orphan -- instances, you can pull in the orphan instances from -- NumInstances. Alternatively, you can chain those prisms with -- conversions to the anonymous canonical strict tuple types in -- Numeric.Backprop.Tuple, which do have Num instances. -- --
--   myPrism                   :: Prism' c (a, b)
--   myPrism . iso tupT2 t2Tup :: Prism' c (T2 a b)
--   
(^^?) :: forall b a s. (Num b, Num a, Reifies s W) => BVar s b -> Traversal' b a -> Maybe (BVar s a) infixl 8 ^^? -- | ^^.., but with Num constraints instead of -- Backprop constraints. (^^..) :: forall b a s. (Num b, Num a, Reifies s W) => BVar s b -> Traversal' b a -> [BVar s a] -- | ^^?!, but with Num constraints instead of -- Backprop constraints. -- -- Like ^^?!, is *UNSAFE*. (^^?!) :: forall b a s. (Num b, Num a, Reifies s W) => BVar s b -> Traversal' b a -> BVar s a infixl 8 ^^?! -- | viewVar, but with Num constraints instead of -- Backprop constraints. viewVar :: forall b a s. (Num a, Num b, Reifies s W) => Lens' b a -> BVar s b -> BVar s a -- | setVar, but with Num constraints instead of -- Backprop constraints. setVar :: forall a b s. (Num a, Num b, Reifies s W) => Lens' b a -> BVar s a -> BVar s b -> BVar s b -- | overVar, but with Num constraints instead of -- Backprop constraints. overVar :: (Num a, Num b, Reifies s W) => Lens' b a -> (BVar s a -> BVar s a) -> BVar s b -> BVar s b -- | sequenceVar, but with Num constraints instead of -- Backprop constraints. -- -- Since v0.2.4, requires a Num constraint on t a. sequenceVar :: (Traversable t, Num a, Reifies s W) => BVar s (t a) -> t (BVar s a) -- | collectVar, but with Num constraints instead of -- Backprop constraints. -- -- Prior to v0.2.3, required a Num constraint on t a. collectVar :: (Foldable t, Functor t, Num a, Reifies s W) => t (BVar s a) -> BVar s (t a) -- | previewVar, but with Num constraints instead of -- Backprop constraints. -- -- See documentation for ^^? for more information and important -- notes. previewVar :: forall b a s. (Num b, Num a, Reifies s W) => Traversal' b a -> BVar s b -> Maybe (BVar s a) -- | toListOfVar, but with Num constraints instead of -- Backprop constraints. toListOfVar :: forall b a s. (Num b, Num a, Reifies s W) => Traversal' b a -> BVar s b -> [BVar s a] -- | isoVar, but with Num constraints instead of -- Backprop constraints. isoVar :: (Num a, Reifies s W) => (a -> b) -> (b -> a) -> BVar s a -> BVar s b -- | isoVar, but with Num constraints instead of -- Backprop constraints. isoVar2 :: (Num a, Num b, Reifies s W) => (a -> b -> c) -> (c -> (a, b)) -> BVar s a -> BVar s b -> BVar s c -- | isoVar3, but with Num constraints instead of -- Backprop constraints. isoVar3 :: (Num a, Num b, Num c, Reifies s W) => (a -> b -> c -> d) -> (d -> (a, b, c)) -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | isoVarN, but with Num constraints instead of -- Backprop constraints. isoVarN :: (RPureConstrained Num as, Reifies s W) => (Rec Identity as -> b) -> (b -> Rec Identity as) -> Rec (BVar s) as -> BVar s b -- | liftOp, but with Num constraints instead of -- Backprop constraints. liftOp :: (RPureConstrained Num as, Reifies s W) => Op as b -> Rec (BVar s) as -> BVar s b -- | liftOp1, but with Num constraints instead of -- Backprop constraints. liftOp1 :: (Num a, Reifies s W) => Op '[a] b -> BVar s a -> BVar s b -- | liftOp2, but with Num constraints instead of -- Backprop constraints. liftOp2 :: (Num a, Num b, Reifies s W) => Op '[a, b] c -> BVar s a -> BVar s b -> BVar s c -- | liftOp3, but with Num constraints instead of -- Backprop constraints. liftOp3 :: (Num a, Num b, Num c, Reifies s W) => Op '[a, b, c] d -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | An Op as a describes a differentiable function from -- as to a. -- -- For example, a value of type -- --
--   Op '[Int, Bool] Double
--   
-- -- is a function from an Int and a Bool, returning a -- Double. It can be differentiated to give a gradient of -- an Int and a Bool if given a total derivative for the -- Double. If we call Bool <math>, then, -- mathematically, it is akin to a: -- -- <math> -- -- See runOp, gradOp, and gradOpWith for examples on -- how to run it, and Op for instructions on creating it. -- -- It is simpler to not use this type constructor directly, and instead -- use the op2, op1, op2, and op3 helper -- smart constructors. -- -- See Numeric.Backprop.Op#prod for a mini-tutorial on using -- Rec and 'Rec Identity'. -- -- To use an Op with the backprop library, see -- liftOp, liftOp1, liftOp2, and -- liftOp3. newtype Op as a -- | Construct an Op by giving a function creating the result, and -- also a continuation on how to create the gradient, given the total -- derivative of a. -- -- See the module documentation for Numeric.Backprop.Op for more -- details on the function that this constructor and Op expect. Op :: (Rec Identity as -> (a, a -> Rec Identity as)) -> Op as a -- | Run the function that the Op encodes, returning a continuation -- to compute the gradient, given the total derivative of a. See -- documentation for Numeric.Backprop.Op for more information. [runOpWith] :: Op as a -> Rec Identity as -> (a, a -> Rec Identity as) -- | Create an Op that takes no inputs and always returns the given -- value. -- -- There is no gradient, of course (using gradOp will give you an -- empty tuple), because there is no input to have a gradient of. -- --
--   >>> runOp (op0 10) RNil
--   (10, RNil)
--   
-- -- For a constant Op that takes input and ignores it, see -- opConst and opConst'. op0 :: a -> Op '[] a -- | An Op that ignores all of its inputs and returns a given -- constant value. -- --
--   >>> gradOp' (opConst 10) (1 :& 2 :& 3 :& RNil)
--   (10, 0 :& 0 :& 0 :& RNil)
--   
opConst :: forall as a. RPureConstrained Num as => a -> Op as a -- | An Op that just returns whatever it receives. The identity -- function. -- --
--   idOp = opIso id id
--   
idOp :: Op '[a] a -- | bpOp, but with Num constraints instead of -- Backprop constraints. bpOp :: RPureConstrained Num as => (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Op as b -- | Create an Op of a function taking one input, by giving its -- explicit derivative. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the derivative <math>, it would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that squares its input: -- --
--   square :: Num a => Op '[a] a
--   square = op1 $ \x -> (x*x, \d -> 2 * d * x
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op1 :: (a -> (b, b -> a)) -> Op '[a] b -- | Create an Op of a function taking two inputs, by giving its -- explicit gradient. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the gradient <math> would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that multiplies its inputs: -- --
--   mul :: Num a => Op '[a, a] a
--   mul = op2' $ \x y -> (x*y, \d -> (d*y, x*d)
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c -- | Create an Op of a function taking three inputs, by giving its -- explicit gradient. See documentation for op2 for more details. op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d -- | An Op that coerces an item into another item whose type has the -- same runtime representation. -- --
--   >>> gradOp' opCoerce (Identity 5) :: (Int, Identity Int)
--   (5, Identity 1)
--   
-- --
--   opCoerce = opIso coerced coerce
--   
opCoerce :: Coercible a b => Op '[a] b -- | An Op that takes as and returns exactly the input -- tuple. -- --
--   >>> gradOp' opTup (1 :& 2 :& 3 :& RNil)
--   (1 :& 2 :& 3 :& RNil, 1 :& 1 :& 1 :& RNil)
--   
opTup :: Op as (Rec Identity as) -- | An Op that runs the input value through an isomorphism. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. opIso :: (a -> b) -> (b -> a) -> Op '[a] b -- | An Op that runs the input value through an isomorphism between -- a tuple of values and a value. See opIso for caveats. -- -- In Numeric.Backprop.Op since version 0.1.2.0, but only exported -- from Numeric.Backprop since version 0.1.3.0. opIsoN :: (Rec Identity as -> b) -> (b -> Rec Identity as) -> Op as b -- | An Op that extracts a value from an input value using a -- Lens'. -- -- Warning: This is unsafe! It assumes that it extracts a specific value -- unchanged, with derivative 1, so will break for things that -- numerically manipulate things before returning them. opLens :: Num a => Lens' a b -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp1, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad1 :: (a -> b) -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad :: (Rec Identity as -> b) -> Op as b class Reifies (s :: k) a | s -> a -- | Automatic differentation and backpropagation. -- -- Main idea: Write a function computing what you want, and the library -- automatically provies the gradient of that function as well, for usage -- with gradient descent and other training methods. -- -- See the homepage for an introduction and walkthrough. -- -- In more detail: instead of working directly with values to produce -- your result, you work with BVars containing those values. -- Working with these BVars is made smooth with the usage of -- lenses and other combinators, and libraries can offer operatons on -- BVars instead of those on normal types directly. -- -- Then, you can use: -- --
--   evalBP :: (forall s. Reifies s W. BVar s a -> BVar s b) -> (a -> b)
--   
-- -- to turn a BVar function into the function on actual values -- a -> b. This has virtually zero overhead over writing the -- actual function directly. -- -- Then, there's: -- --
--   gradBP :: (forall s. Reifies s W. BVar s a -> BVar s b) -> (a -> a)
--   
-- -- to automatically get the gradient, as well, for a given input. -- -- Refer to the homepage for more information and links to -- demonstrations and tutorials, or dive striaght in by reading the docs -- for BVar. -- -- If you are writing a library, see -- https://backprop.jle.im/08-equipping-your-library.html for a -- guide for equipping your library with backpropatable operations. -- -- In the original version 0.1, this module required Num instances -- for methods instead of Backprop instances. This interface is -- still available in Numeric.Backprop.Num, which has the same API -- as this module, except with Num constraints on all values -- instead of Backprop constraints. -- -- See Prelude.Backprop.Explicit for a version allowing you to -- provide zero, add, and one explicitly, which can -- be useful when attempting to avoid orphan instances or when mixing -- both Backprop and Num styles. module Numeric.Backprop -- | A BVar s a is a value of type a that can be -- "backpropagated". -- -- Functions referring to BVars are tracked by the library and can -- be automatically differentiated to get their gradients and results. -- -- For simple numeric values, you can use its Num, -- Fractional, and Floating instances to manipulate them as -- if they were the numbers they represent. -- -- If a contains items, the items can be accessed and extracted -- using lenses. A Lens' b a can be used to access an -- a inside a b, using ^^. (viewVar): -- --
--   (^.)  ::        a -> Lens' a b ->        b
--   (^^.) :: BVar s a -> Lens' a b -> BVar s b
--   
-- -- There is also ^^? (previewVar), to use a -- Prism' or Traversal' to extract a target that may or -- may not be present (which can implement pattern matching), -- ^^.. (toListOfVar) to use a Traversal' to -- extract all targets inside a BVar, and .~~ -- (setVar) to set and update values inside a BVar. -- -- If you have control over your data type definitions, you can also use -- splitBV and joinBV to manipulate data types by easily -- extracting fields out of a BVar of data types and creating -- BVars of data types out of BVars of their fields. See -- Numeric.Backprop#hkd for a tutorial on this use pattern. -- -- For more complex operations, libraries can provide functions on -- BVars using liftOp and related functions. This is how -- you can create primitive functions that users can use to manipulate -- your library's values. See -- https://backprop.jle.im/08-equipping-your-library.html for a -- detailed guide. -- -- For example, the hmatrix library has a matrix-vector -- multiplication function, #> :: L m n -> R n -> L m. -- -- A library could instead provide a function #> :: BVar (L -- m n) -> BVar (R n) -> BVar (R m), which the user can then -- use to manipulate their BVars of L m ns and R -- ns, etc. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. data BVar s a -- | An ephemeral Wengert Tape in the environment. Used internally to track -- of the computational graph of variables. -- -- For the end user, one can just imagine Reifies s -- W as a required constraint on s that allows -- backpropagation to work. data W -- | Class of values that can be backpropagated in general. -- -- For instances of Num, these methods can be given by -- zeroNum, addNum, and oneNum. There are also -- generic options given in Numeric.Backprop.Class for functors, -- IsList instances, and Generic instances. -- --
--   instance Backprop Double where
--       zero = zeroNum
--       add = addNum
--       one = oneNum
--   
-- -- If you leave the body of an instance declaration blank, GHC Generics -- will be used to derive instances if the type has a single constructor -- and each field is an instance of Backprop. -- -- To ensure that backpropagation works in a sound way, should obey the -- laws: -- -- -- -- -- -- Also implies preservation of information, making zipWith -- (+) an illegal implementation for lists and vectors. -- -- This is only expected to be true up to potential "extra zeroes" in -- x and y in the result. -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- Note that not all values in the backpropagation process needs all of -- these methods: Only the "final result" needs one, for example. -- These are all grouped under one typeclass for convenience in defining -- instances, and also to talk about sensible laws. For fine-grained -- control, use the "explicit" versions of library functions (for -- example, in Numeric.Backprop.Explicit) instead of -- Backprop based ones. -- -- This typeclass replaces the reliance on Num of the previous API -- (v0.1). Num is strictly more powerful than Backprop, and -- is a stronger constraint on types than is necessary for proper -- backpropagating. In particular, fromInteger is a problem for -- many types, preventing useful backpropagation for lists, -- variable-length vectors (like Data.Vector) and variable-size -- matrices from linear algebra libraries like hmatrix and -- accelerate. class Backprop a -- | "Zero out" all components of a value. For scalar values, this should -- just be const 0. For vectors and matrices, this should -- set all components to zero, the additive identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See zeroNum for a pre-built definition for instances of -- Num and zeroFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericZero, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. zero :: Backprop a => a -> a -- | Add together two values of a type. To combine contributions of -- gradients, so should be information-preserving: -- -- -- -- Should be as strict as possible. This behavior is observed for -- all instances provided by this library. -- -- See addNum for a pre-built definition for instances of -- Num and addIsList for a definition for instances of -- IsList. If left blank, will automatically be genericAdd, -- a pre-built definition for instances of Generic with one -- constructor whose fields are all themselves instances of -- Backprop. add :: Backprop a => a -> a -> a -- | One all components of a value. For scalar values, this should -- just be const 1. For vectors and matrices, this should -- set all components to one, the multiplicative identity. -- -- As the library uses it, the most important law is: -- -- -- -- That is, one x is the gradient of the identity -- function with respect to its input. -- -- Ideally should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See oneNum for a pre-built definition for instances of -- Num and oneFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericOne, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. one :: Backprop a => a -> a -- | "Zero out" all components of a value. For scalar values, this should -- just be const 0. For vectors and matrices, this should -- set all components to zero, the additive identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See zeroNum for a pre-built definition for instances of -- Num and zeroFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericZero, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. zero :: (Backprop a, Generic a, GZero (Rep a)) => a -> a -- | Add together two values of a type. To combine contributions of -- gradients, so should be information-preserving: -- -- -- -- Should be as strict as possible. This behavior is observed for -- all instances provided by this library. -- -- See addNum for a pre-built definition for instances of -- Num and addIsList for a definition for instances of -- IsList. If left blank, will automatically be genericAdd, -- a pre-built definition for instances of Generic with one -- constructor whose fields are all themselves instances of -- Backprop. add :: (Backprop a, Generic a, GAdd (Rep a)) => a -> a -> a -- | One all components of a value. For scalar values, this should -- just be const 1. For vectors and matrices, this should -- set all components to one, the multiplicative identity. -- -- As the library uses it, the most important law is: -- -- -- -- That is, one x is the gradient of the identity -- function with respect to its input. -- -- Ideally should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See oneNum for a pre-built definition for instances of -- Num and oneFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericOne, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. one :: (Backprop a, Generic a, GOne (Rep a)) => a -> a -- | A newtype wrapper over an f a for Applicative -- f that gives a free Backprop instance (as well as -- Num etc. instances). -- -- Useful for performing backpropagation over functions that require some -- monadic context (like IO) to perform. newtype ABP f a ABP :: f a -> ABP f a [runABP] :: ABP f a -> f a -- | A newtype wrapper over an instance of Num that gives a free -- Backprop instance. -- -- Useful for things like DerivingVia, or for avoiding orphan -- instances. newtype NumBP a NumBP :: a -> NumBP a [runNumBP] :: NumBP a -> a -- | Turn a function BVar s a -> BVar s b into -- the function a -> b that it represents, also computing its -- gradient a as well. -- -- The Rank-N type forall s. Reifies s W => ... -- is used to ensure that BVars do not leak out of the context -- (similar to how it is used in Control.Monad.ST), and also as a -- reference to an ephemeral Wengert tape used to track the graph of -- references. backprop :: (Backprop a, Backprop b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, a) -- | Turn a function BVar s a -> BVar s b into -- the function a -> b that it represents. -- -- Benchmarks show that this should have virtually no overhead over -- directly writing a a -> b. BVar is, in this -- situation, a zero-cost abstraction, performance-wise. -- -- See documentation of backprop for more information. evalBP :: (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> b -- | Take a function BVar s a -> BVar s b, -- interpreted as a function a -> b, and compute its gradient -- with respect to its input. -- -- The resulting a -> a tells how the input (and its -- components) affects the output. Positive numbers indicate that the -- result will vary in the same direction as any adjustment in the input. -- Negative numbers indicate that the result will vary in the opposite -- direction as any adjustment in the input. Larger numbers indicate a -- greater sensitivity of change, and small numbers indicate lower -- sensitivity. -- -- See documentation of backprop for more information. -- -- If you want to provide an explicit "final gradient" for the end, see -- backpropWith. gradBP :: (Backprop a, Backprop b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> a -- | A version of backprop that allows you to specify the gradent of -- your "final result" in with respect to the output of your function. -- -- Typically, this is just the scalar 1, or a value of components that -- are all 1. -- -- Instead of taking the b gradient, the you may provide a b -- -> b, which backpropWith calls with the result of your -- function as the argument. This allows you to return something with the -- correct "shape", if not a scalar. -- -- backprop is essentially backpropWith with -- const 1 for scalars and Num instances. -- -- Note that argument order changed in v0.2.4 backpropWith :: Backprop a => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, b -> a) -- | backprop for a two-argument function. -- -- Not strictly necessary, because you can always uncurry a function by -- passing in all of the argument inside a data type, or just use a -- tuple. However, this could potentially be more performant. -- -- For 3 and more arguments, consider using backpropN. backprop2 :: (Backprop a, Backprop b, Backprop c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, (a, b)) -- | evalBP for a two-argument function. See backprop2 for -- notes. evalBP2 :: (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> c -- | gradBP for a two-argument function. See backprop2 for -- notes. gradBP2 :: (Backprop a, Backprop b, Backprop c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (a, b) -- | backprop2, but allows you to provide the gradient of the "final -- result" with respect to the output of your function. See -- backpropWith for more details. -- -- Note that argument order changed in v0.2.4 backpropWith2 :: (Backprop a, Backprop b) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, c -> (a, b)) -- | backprop generalized to multiple inputs of different types. See -- the Numeric.Backprop.Op#prod for a mini-tutorial on -- heterogeneous lists. -- -- Not strictly necessary, because you can always uncurry a function by -- passing in all of the inputs in a data type containing all of the -- arguments or a giant tuple. However, this could potentially also be -- more performant. -- -- A Rec (BVar s) '[Double, Float, Double], for -- instance, is a tuple of BVar s Double, -- BVar s Float, and BVar s -- Double, and can be pattern matched on using :< -- (cons) and Ø (nil). -- -- The RPureConstrained Backprop as in the -- constraint says that every value in the type-level list as -- must have a Backprop instance. This means you can use, say, -- '[Double, Float, Int], but not '[Double, Bool, -- String]. -- -- If you stick to concerete, monomorphic usage of this (with -- specific types, typed into source code, known at compile-time), then -- RPureConstrained Backprop as should be -- fulfilled automatically. backpropN :: (RPureConstrained Backprop as, Backprop b) => (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Rec Identity as -> (b, Rec Identity as) -- | evalBP generalized to multiple inputs of different types. See -- documentation for backpropN for more details. evalBPN :: forall as b. () => (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Rec Identity as -> b -- | gradBP generalized to multiple inputs of different types. See -- documentation for backpropN for more details. gradBPN :: (RPureConstrained Backprop as, Backprop b) => (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Rec Identity as -> Rec Identity as -- | backpropN, but allows you to provide the gradient of the "final -- result" with respect to the output of your function. See -- backpropWith for more details. -- -- Note that argument order changed in v0.2.4. backpropWithN :: RPureConstrained Backprop as => (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Rec Identity as -> (b, b -> Rec Identity as) -- | evalBP but with no arguments. Useful when everything is just -- given through constVar. evalBP0 :: (forall s. Reifies s W => BVar s a) -> a -- | Lift a value into a BVar representing a constant value. -- -- This value will not be considered an input, and its gradients will not -- be backpropagated. constVar :: a -> BVar s a -- | Shorter alias for constVar, inspired by the ad library. auto :: a -> BVar s a -- | Coerce a BVar contents. Useful for things like newtype -- wrappers. coerceVar :: Coercible a b => BVar s a -> BVar s b -- | An infix version of viewVar, meant to evoke parallels to -- ^. from lens. -- -- With normal values, you can extract something from that value with a -- lens: -- --
--   x ^. myLens
--   
-- -- would extract a piece of x :: b, specified by myLens :: -- Lens' b a. The result has type a. -- --
--   xVar ^^. myLens
--   
-- -- would extract a piece out of xVar :: BVar s b (a -- BVar holding a b), specified by myLens :: Lens' b -- a. The result has type BVar s a (a BVar -- holding a a) -- -- This is the main way to pull out values from BVar of container -- types. -- -- If you have control of your data type definitions, consider using -- splitBV, which lets you break out BVars of values into -- BVars of their individual fields automatically without -- requiring lenses. -- -- NOTE: Usage of ^^. on many fields from the same item is -- usually the main source of overhead in backprop code, if you -- are looking to optimize your code. See -- <https://backprop.jle.im/07-performance.html this -- performance guide> for more information, and details on mitigating -- this overhead. -- -- WARNING: Do not use with any lenses that operate "numerically" -- on the contents (like multiplying). (^^.) :: forall b a s. (Backprop b, Backprop a, Reifies s W) => BVar s b -> Lens' b a -> BVar s a infixl 8 ^^. -- | An infix version of setVar, meant to evoke parallels to -- .~ from lens. -- -- With normal values, you can set something in a value with a lens: -- --
--   x & myLens .~ y
--   
-- -- would "set" a part of x :: b, specified by myLens :: -- Lens' a b, to a new value y :: a. -- --
--   xVar & myLens .~~ yVar
--   
-- -- would "set" a part of xVar :: BVar s b (a BVar -- holding a b), specified by myLens :: Lens' a -- b, to a new value given by yVar :: BVar s a. The -- result is a new (updated) value of type BVar s b. -- -- This is the main way to set values inside BVars of container -- types. -- -- Note that this does not incurr the performance overhead issues of -- viewVar and ^^., and is fairly cheap. (.~~) :: (Backprop a, Backprop b, Reifies s W) => Lens' b a -> BVar s a -> BVar s b -> BVar s b infixl 8 .~~ -- | An infix version of overVar, meant to evoke parallels to -- %~ from lens. -- -- With normal values, you can set modify in a value with a lens: -- --
--   x & myLens %~ negate
--   
-- -- would "modify" a part of x :: b, specified by myLens :: -- Lens' a b, using the function negate :: a -> -- a. -- --
--   xVar & myLens %~~ negate
--   
-- -- would "modify" a part of xVar :: BVar s b (a -- BVar holding a b), specified by myLens :: -- Lens' a b, using the function negate :: BVar s a -> -- BVar s . The result is a new (updated) value of type -- BVar s b. -- -- Is essentially a convenient wrapper over a viewVar followed by -- a setVar. (%~~) :: (Backprop a, Backprop b, Reifies s W) => Lens' b a -> (BVar s a -> BVar s a) -> BVar s b -> BVar s b infixr 4 %~~ -- | An infix version of previewVar, meant to evoke parallels to -- ^? from lens. -- -- With normal values, you can (potentially) extract something from that -- value with a lens: -- --
--   x ^? myPrism
--   
-- -- would (potentially) extract a piece of x :: b, specified by -- myPrism :: Traversal' b a. The result has type -- Maybe a. -- --
--   xVar ^^? myPrism
--   
-- -- would (potentially) extract a piece out of xVar :: BVar s -- b (a BVar holding a b), specified by myPrism -- :: Prism' b a. The result has type Maybe (BVar -- s a) (Maybe a BVar holding a a). -- -- This is intended to be used with Prism's (which hits at most -- one target), but will actually work with any Traversal'. -- If the traversal hits more than one target, the first one found will -- be extracted. -- -- This can be used to "pattern match" on BVars, by using prisms -- on constructors. -- -- NOTE: Has the same potential of performance overhead issues as -- ^^.; see documentation of ^^. for more details. (^^?) :: forall b a s. (Backprop b, Backprop a, Reifies s W) => BVar s b -> Traversal' b a -> Maybe (BVar s a) infixl 8 ^^? -- | An infix version of toListOfVar, meant to evoke parallels to -- ^.. from lens. -- -- With normal values, you can extract all targets of a Traversal -- from that value with a: -- --
--   x ^.. myTraversal
--   
-- -- would extract all targets inside of x :: b, specified by -- myTraversal :: Traversal' b a. The result has type -- [a]. -- --
--   xVar ^^.. myTraversal
--   
-- -- would extract all targets inside of xVar :: BVar s b -- (a BVar holding a b), specified by myTraversal :: -- Traversal' b a. The result has type [BVar s a] (A -- list of BVars holding as). -- -- NOTE: Has all of the performance overhead issues of -- sequenceVar; see documentation for sequenceVar for more -- information. (^^..) :: forall b a s. (Backprop b, Backprop a, Reifies s W) => BVar s b -> Traversal' b a -> [BVar s a] -- | An *UNSAFE* version of ^^? and previewVar assuming that -- the value is there. -- -- Is undefined if the Traversal hits no targets. -- -- Is essentially ^^? with fromJust, or ^^.. with -- head. (^^?!) :: forall b a s. (Backprop b, Backprop a, Reifies s W) => BVar s b -> Traversal' b a -> BVar s a infixl 8 ^^?! -- | Using a Lens', extract a value inside a BVar. -- Meant to evoke parallels to view from lens. -- -- See documentation for ^^. for more information, caveats, and -- warnings. viewVar :: forall b a s. (Backprop a, Backprop b, Reifies s W) => Lens' b a -> BVar s b -> BVar s a -- | Using a Lens', set a value inside a BVar. Meant -- to evoke parallels to "set" from lens. -- -- See documentation for .~~ for more information. setVar :: (Backprop a, Backprop b, Reifies s W) => Lens' b a -> BVar s a -> BVar s b -> BVar s b -- | Using a Lens', modify a value inzide a BVar. -- Meant to evoke parallels to "over" from lens. See documentation for -- %~~ for more information. overVar :: (Backprop a, Backprop b, Reifies s W) => Lens' b a -> (BVar s a -> BVar s a) -> BVar s b -> BVar s b -- | Extract all of the BVars out of a Traversable container -- of BVars. -- -- Note that this associates gradients in order of occurrence in the -- original data structure; the second item in the gradient is assumed to -- correspond with the second item in the input, etc.; this can cause -- unexpected behavior in Foldable instances that don't have a -- fixed number of items. -- -- NOTE: A potential source of performance overhead. If there are -- <math> total elements, and you use <math> of them, then -- there is an overhead cost on the order of <math>, with a -- constant factor dependent on the cost of add. Should be -- negligible for types with cheap add (like Double), but -- may be costly for things like large matrices. See -- <https://backprop.jle.im/07-performance.html the performance -- guide> for for details. sequenceVar :: (Traversable t, Backprop a, Reifies s W) => BVar s (t a) -> t (BVar s a) -- | Collect all of the BVars in a container into a BVar of -- that container's contents. -- -- Note that this associates gradients in order of occurrence in the -- original data structure; the second item in the total derivative and -- gradient is assumed to correspond with the second item in the input, -- etc.; this can cause unexpected behavior in Foldable instances -- that don't have a fixed number of items. -- -- Note that this does not suffer from the same performance -- overhead issues as sequenceVar. collectVar is -- <math>, with a very small constant factor that consistent for -- all types. This reveals a general property of reverse-mode automatic -- differentiation; "many to one" is cheap, but "one to many" is -- expensive. collectVar :: (Foldable t, Functor t, Backprop a, Reifies s W) => t (BVar s a) -> BVar s (t a) -- | Using a Traversal', extract a single value inside a -- BVar, if it exists. If more than one traversal target exists, -- returns te first. Meant to evoke parallels to preview from -- lens. Really only intended to be used wth Prism's, or -- up-to-one target traversals. -- -- See documentation for ^^? for more information, warnings, and -- caveats. previewVar :: forall b a s. (Backprop b, Backprop a, Reifies s W) => Traversal' b a -> BVar s b -> Maybe (BVar s a) -- | Using a Traversal', extract all targeted values inside a -- BVar. Meant to evoke parallels to toListOf from lens. -- -- See documentation for ^^.. for more information, warnings, and -- caveats. toListOfVar :: forall b a s. (Backprop b, Backprop a, Reifies s W) => Traversal' b a -> BVar s b -> [BVar s a] -- | Useful pattern for constructing and deconstructing BVars of -- two-tuples. pattern T2 :: (Backprop a, Backprop b, Reifies s W) => BVar s a -> BVar s b -> BVar s (a, b) -- | Useful pattern for constructing and deconstructing BVars -- three-tuples. pattern T3 :: (Backprop a, Backprop b, Backprop c, Reifies s W) => BVar s a -> BVar s b -> BVar s c -> BVar s (a, b, c) -- | Convert the value inside a BVar using a given isomorphism. -- Useful for things like constructors. -- -- If you have control of your data type definitions, consider using -- joinBV, which lets you use your data type constructors -- themselves to join together BVars as their fields. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. isoVar :: (Backprop a, Reifies s W) => (a -> b) -> (b -> a) -> BVar s a -> BVar s b -- | Convert the values inside two BVars using a given isomorphism. -- Useful for things like constructors. See isoVar for caveats. -- -- If you have control of your data type definitions, consider using -- joinBV, which lets you use your data type constructors -- themselves to join together BVars as their fields. isoVar2 :: (Backprop a, Backprop b, Reifies s W) => (a -> b -> c) -> (c -> (a, b)) -> BVar s a -> BVar s b -> BVar s c -- | Convert the values inside three BVars using a given -- isomorphism. Useful for things like constructors. See isoVar -- for caveats. isoVar3 :: (Backprop a, Backprop b, Backprop c, Reifies s W) => (a -> b -> c -> d) -> (d -> (a, b, c)) -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | Convert the values inside a tuple of BVars using a given -- isomorphism. Useful for things like constructors. See isoVar -- for caveats. -- -- If you have control of your data type definitions, consider using -- joinBV, which lets you use your data type constructors -- themselves to join together BVars as their fields. isoVarN :: (RPureConstrained Backprop as, Reifies s W) => (Rec Identity as -> b) -> (b -> Rec Identity as) -> Rec (BVar s) as -> BVar s b -- | Lift an Op with an arbitrary number of inputs to a function on -- the appropriate number of BVars. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information, and -- Numeric.Backprop.Op#prod for a mini-tutorial on using -- Rec. liftOp :: (RPureConstrained Backprop as, Reifies s W) => Op as b -> Rec (BVar s) as -> BVar s b -- | Lift an Op with a single input to be a function on a single -- BVar. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. liftOp1 :: (Backprop a, Reifies s W) => Op '[a] b -> BVar s a -> BVar s b -- | Lift an Op with two inputs to be a function on a two -- BVars. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. liftOp2 :: (Backprop a, Backprop b, Reifies s W) => Op '[a, b] c -> BVar s a -> BVar s b -> BVar s c -- | Lift an Op with three inputs to be a function on a three -- BVars. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. liftOp3 :: (Backprop a, Backprop b, Backprop c, Reifies s W) => Op '[a, b, c] d -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | Split out a BVar of "higher-kinded data type", a la -- http://reasonablypolymorphic.com/blog/higher-kinded-data/ -- -- Lets you take BVar of a value into a separate BVar of -- every field of that value. -- -- See Numeric.Backprop#hkd for a tutorial on usage. -- -- This will work with all data types made with a single constructor, -- whose fields are all instances of Backprop, where the type -- itself has an instance of Backprop. The type also must derive -- Generic. -- -- Note that access using splitBV and pattern matching is slightly -- slower than access using lenses (by about 10-20%). -- -- See also BV, pattern synonym version where the deconstructor is -- exactly a view into splitBV. -- -- NOTE: Like ^^. and viewVar, splitBV usage -- could potentially be the main source of performance overhead in your -- program. If your data type has <math> fields, and you use -- splitBV to later use <math> of those fields, there is an -- overhead cost on the order of <math>, with a constant factor -- dependent on the cost of add for your original data type. -- Should be negligible for types with cheap add (like -- Double), but may be costly for things like large matrices. See -- the performance guide for for details. -- -- However, there is some potential opportunities to re-write some core -- library functionality that would allow splitBV to avoid all of -- the significant performance overhead issues of ^^.. Contact me -- if you are interested in helping out! splitBV :: (Generic (z f), Generic (z (BVar s)), BVGroup s as (Rep (z f)) (Rep (z (BVar s))), Backprop (z f), Backprop (Rep (z f) ()), RPureConstrained Backprop as, Reifies s W) => BVar s (z f) -> z (BVar s) -- | Assemble a BVar of "higher-kinded data type", a la -- http://reasonablypolymorphic.com/blog/higher-kinded-data/ -- -- It lets you take a BVar of every field of a value, and join -- them into a BVar of that value. -- -- See Numeric.Backprop#hkd for a tutorial on usage. -- -- This will work with all data types made with a single constructor, -- whose fields are all instances of Backprop, where the type -- itself has an instance of Backprop. -- -- See also BV, a pattern synonym version where the constructor is -- exactly joinBV. -- -- Note that joinBV does not suffer the major performance overhead -- issues of splitBV. This is a general property of reverse-mode -- automatic differentiation: "many to one" is cheap, but "one to many" -- is expensive. joinBV :: (Generic (z f), Generic (z (BVar s)), BVGroup s as (Rep (z f)) (Rep (z (BVar s))), Backprop (z f), Backprop (Rep (z f) ()), RPureConstrained Backprop as, Reifies s W) => z (BVar s) -> BVar s (z f) -- | Pattern synonym wrapping manual usage of splitBV and -- joinBV. It is a pattern for a BVar s (z f) -- containing a z (BVar s) pattern BV :: (Generic (z f), Generic (z (BVar s)), BVGroup s as (Rep (z f)) (Rep (z (BVar s))), Backprop (Rep (z f) ()), Backprop (z f), RPureConstrained Backprop as, RecApplicative as, Reifies s W) => z (BVar s) -> BVar s (z f) -- | Helper class for generically "splitting" and "joining" BVars -- into constructors. See splitBV and joinBV. -- -- See Numeric.Backprop#hkd for a tutorial on how to use this. -- -- Instances should be available for types made with one constructor -- whose fields are all instances of Backprop, with a -- Generic instance. class BVGroup s as i o | o -> i, i -> as -- | An Op as a describes a differentiable function from -- as to a. -- -- For example, a value of type -- --
--   Op '[Int, Bool] Double
--   
-- -- is a function from an Int and a Bool, returning a -- Double. It can be differentiated to give a gradient of -- an Int and a Bool if given a total derivative for the -- Double. If we call Bool <math>, then, -- mathematically, it is akin to a: -- -- <math> -- -- See runOp, gradOp, and gradOpWith for examples on -- how to run it, and Op for instructions on creating it. -- -- It is simpler to not use this type constructor directly, and instead -- use the op2, op1, op2, and op3 helper -- smart constructors. -- -- See Numeric.Backprop.Op#prod for a mini-tutorial on using -- Rec and 'Rec Identity'. -- -- To use an Op with the backprop library, see -- liftOp, liftOp1, liftOp2, and -- liftOp3. newtype Op as a -- | Construct an Op by giving a function creating the result, and -- also a continuation on how to create the gradient, given the total -- derivative of a. -- -- See the module documentation for Numeric.Backprop.Op for more -- details on the function that this constructor and Op expect. Op :: (Rec Identity as -> (a, a -> Rec Identity as)) -> Op as a -- | Run the function that the Op encodes, returning a continuation -- to compute the gradient, given the total derivative of a. See -- documentation for Numeric.Backprop.Op for more information. [runOpWith] :: Op as a -> Rec Identity as -> (a, a -> Rec Identity as) -- | Create an Op that takes no inputs and always returns the given -- value. -- -- There is no gradient, of course (using gradOp will give you an -- empty tuple), because there is no input to have a gradient of. -- --
--   >>> runOp (op0 10) RNil
--   (10, RNil)
--   
-- -- For a constant Op that takes input and ignores it, see -- opConst and opConst'. op0 :: a -> Op '[] a -- | An Op that ignores all of its inputs and returns a given -- constant value. -- --
--   >>> gradOp' (opConst 10) (1 :& 2 :& 3 :& RNil)
--   (10, 0 :& 0 :& 0 :& RNil)
--   
opConst :: forall as a. RPureConstrained Num as => a -> Op as a -- | An Op that just returns whatever it receives. The identity -- function. -- --
--   idOp = opIso id id
--   
idOp :: Op '[a] a -- | Create an Op from a backpropagatable function. Can be useful -- for "storing" an otherwise Rank-N backpropagatable function in order -- to avoid impredicative types. But this is pretty uncommon, so this is -- mostly just used for low-level internal situations. -- --
--   liftOp . bpOp = id
--   bpOp . liftOp = id
--   
bpOp :: RPureConstrained Backprop as => (forall s. Reifies s W => Rec (BVar s) as -> BVar s b) -> Op as b -- | Create an Op of a function taking one input, by giving its -- explicit derivative. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the derivative <math>, it would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that squares its input: -- --
--   square :: Num a => Op '[a] a
--   square = op1 $ \x -> (x*x, \d -> 2 * d * x
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op1 :: (a -> (b, b -> a)) -> Op '[a] b -- | Create an Op of a function taking two inputs, by giving its -- explicit gradient. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the gradient <math> would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that multiplies its inputs: -- --
--   mul :: Num a => Op '[a, a] a
--   mul = op2' $ \x y -> (x*y, \d -> (d*y, x*d)
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c -- | Create an Op of a function taking three inputs, by giving its -- explicit gradient. See documentation for op2 for more details. op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d -- | An Op that coerces an item into another item whose type has the -- same runtime representation. -- --
--   >>> gradOp' opCoerce (Identity 5) :: (Int, Identity Int)
--   (5, Identity 1)
--   
-- --
--   opCoerce = opIso coerced coerce
--   
opCoerce :: Coercible a b => Op '[a] b -- | An Op that takes as and returns exactly the input -- tuple. -- --
--   >>> gradOp' opTup (1 :& 2 :& 3 :& RNil)
--   (1 :& 2 :& 3 :& RNil, 1 :& 1 :& 1 :& RNil)
--   
opTup :: Op as (Rec Identity as) -- | An Op that runs the input value through an isomorphism. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. opIso :: (a -> b) -> (b -> a) -> Op '[a] b -- | An Op that runs the input value through an isomorphism between -- a tuple of values and a value. See opIso for caveats. -- -- In Numeric.Backprop.Op since version 0.1.2.0, but only exported -- from Numeric.Backprop since version 0.1.3.0. opIsoN :: (Rec Identity as -> b) -> (b -> Rec Identity as) -> Op as b -- | An Op that extracts a value from an input value using a -- Lens'. -- -- Warning: This is unsafe! It assumes that it extracts a specific value -- unchanged, with derivative 1, so will break for things that -- numerically manipulate things before returning them. opLens :: Num a => Lens' a b -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp1, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad1 :: (a -> b) -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad :: (Rec Identity as -> b) -> Op as b class Reifies (s :: k) a | s -> a -- | Provides "explicit" versions of all of the functions in -- Prelude.Backprop. Instead of relying on a Backprop -- instance, allows you to manually provide zero, add, and -- one on a per-value basis. -- -- WARNING: API of this module can be considered only "semi-stable"; -- while the API of Prelude.Backprop and Prelude.Backprop.Num" are -- kept consistent, some argument order changes might happen in this -- module to reflect changes in underlying implementation. module Prelude.Backprop.Explicit -- | sum, but taking explicit add and zero. sum :: (Foldable t, Functor t, Num a, Reifies s W) => AddFunc (t a) -> BVar s (t a) -> BVar s a -- | product, but taking explicit add and zero. product :: (Foldable t, Functor t, Fractional a, Reifies s W) => AddFunc (t a) -> BVar s (t a) -> BVar s a -- | length, but taking explicit add and zero. length :: (Foldable t, Num b, Reifies s W) => AddFunc (t a) -> ZeroFunc (t a) -> BVar s (t a) -> BVar s b -- | minimum, but taking explicit add and zero. minimum :: (Foldable t, Functor t, Ord a, Reifies s W) => AddFunc (t a) -> ZeroFunc a -> BVar s (t a) -> BVar s a -- | maximum, but taking explicit add and zero. maximum :: (Foldable t, Functor t, Ord a, Reifies s W) => AddFunc (t a) -> ZeroFunc a -> BVar s (t a) -> BVar s a -- | traverse, but taking explicit add and zero. traverse :: (Traversable t, Applicative f, Foldable f, Reifies s W) => AddFunc a -> AddFunc b -> AddFunc (t b) -> ZeroFunc a -> ZeroFunc b -> (BVar s a -> f (BVar s b)) -> BVar s (t a) -> BVar s (f (t b)) -- | length, but taking explicit add and zero. toList :: (Traversable t, Reifies s W) => AddFunc a -> ZeroFunc a -> BVar s (t a) -> [BVar s a] -- | mapAccumL, but taking explicit add and zero. mapAccumL :: (Traversable t, Reifies s W) => AddFunc b -> AddFunc c -> ZeroFunc b -> ZeroFunc c -> (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | mapAccumR, but taking explicit add and zero. mapAccumR :: (Traversable t, Reifies s W) => AddFunc b -> AddFunc c -> ZeroFunc b -> ZeroFunc c -> (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | foldr, but taking explicit add and zero. foldr :: (Traversable t, Reifies s W) => AddFunc a -> ZeroFunc a -> (BVar s a -> BVar s b -> BVar s b) -> BVar s b -> BVar s (t a) -> BVar s b -- | foldl', but taking explicit add and zero. foldl' :: (Traversable t, Reifies s W) => AddFunc a -> ZeroFunc a -> (BVar s b -> BVar s a -> BVar s b) -> BVar s b -> BVar s (t a) -> BVar s b -- | fmap, but taking explicit add and zero. fmap :: (Traversable f, Reifies s W) => AddFunc a -> AddFunc b -> ZeroFunc a -> ZeroFunc b -> (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | fmapConst, but taking explicit add and zero. fmapConst :: (Functor f, Foldable f, Reifies s W) => AddFunc (f a) -> AddFunc b -> ZeroFunc (f a) -> ZeroFunc b -> BVar s b -> BVar s (f a) -> BVar s (f b) -- | pure, but taking explicit add and zero. pure :: (Foldable t, Applicative t, Reifies s W) => AddFunc a -> ZeroFunc a -> BVar s a -> BVar s (t a) -- | liftA2, but taking explicit add and zero. liftA2 :: (Traversable f, Applicative f, Reifies s W) => AddFunc a -> AddFunc b -> AddFunc c -> ZeroFunc a -> ZeroFunc b -> ZeroFunc c -> (BVar s a -> BVar s b -> BVar s c) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -- | liftA3, but taking explicit add and zero. liftA3 :: (Traversable f, Applicative f, Reifies s W) => AddFunc a -> AddFunc b -> AddFunc c -> AddFunc d -> ZeroFunc a -> ZeroFunc b -> ZeroFunc c -> ZeroFunc d -> (BVar s a -> BVar s b -> BVar s c -> BVar s d) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -> BVar s (f d) -- | fromIntegral, but taking explicit add and zero. fromIntegral :: (Integral a, Integral b, Reifies s W) => AddFunc a -> BVar s a -> BVar s b -- | realToFrac, but taking explicit add and zero. realToFrac :: (Fractional a, Real a, Fractional b, Real b, Reifies s W) => AddFunc a -> BVar s a -> BVar s b -- | round, but taking explicit add and zero. round :: (RealFrac a, Integral b, Reifies s W) => AddFunc a -> BVar s a -> BVar s b -- | fromIntegral', but taking explicit add and zero. fromIntegral' :: (Integral a, RealFrac b, Reifies s W) => AddFunc a -> BVar s a -> BVar s b -- | Coerce items inside a BVar. coerce :: Coercible a b => BVar s a -> BVar s b -- | Some lifted versions of common functions found in Prelude (or -- base in general). -- -- This module is intended to be a catch-all one, so feel free to suggest -- other functions or submit a PR if you think one would make sense. -- -- See Prelude.Backprop.Num for a version with Num -- constraints instead of Backprop constraints, and -- Prelude.Backprop.Explicit for a version allowing you to provide -- zero, add, and one explicitly. module Prelude.Backprop -- | Lifted sum. More efficient than going through toList. sum :: (Foldable t, Functor t, Backprop (t a), Num a, Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted product. More efficient than going through -- toList. product :: (Foldable t, Functor t, Backprop (t a), Fractional a, Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted length. More efficient than going through toList. length :: (Foldable t, Backprop (t a), Num b, Reifies s W) => BVar s (t a) -> BVar s b -- | Lifted minimum. Undefined for situations where minimum -- would be undefined. More efficient than going through toList. minimum :: (Foldable t, Functor t, Backprop a, Ord a, Backprop (t a), Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted maximum. Undefined for situations where maximum -- would be undefined. More efficient than going through toList. maximum :: (Foldable t, Functor t, Backprop a, Ord a, Backprop (t a), Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted traverse. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Functors. traverse :: (Traversable t, Applicative f, Foldable f, Backprop a, Backprop b, Backprop (t b), Reifies s W) => (BVar s a -> f (BVar s b)) -> BVar s (t a) -> BVar s (f (t b)) -- | Lifted version of toList. Takes a BVar of a -- Traversable of items and returns a list of BVars for -- each item. -- -- You can use this to implement "lifted" versions of Foldable -- methods like foldr, foldl', etc.; however, sum, -- product, length, minimum, and maximum have -- more efficient implementations than simply minimum . -- toList. toList :: (Traversable t, Backprop a, Reifies s W) => BVar s (t a) -> [BVar s a] -- | Lifted version of mapAccumL. -- -- Prior to v0.2.3, required a Backprop constraint on t -- b. mapAccumL :: (Traversable t, Backprop b, Backprop c, Reifies s W) => (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | Lifted version of mapAccumR. -- -- Prior to v0.2.3, required a Backprop constraint on t -- b. mapAccumR :: (Traversable t, Backprop b, Backprop c, Reifies s W) => (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | Lifed foldr. Essentially just toList composed with a -- normal list foldr, and is only here for convenience. foldr :: (Traversable t, Backprop a, Reifies s W) => (BVar s a -> BVar s b -> BVar s b) -> BVar s b -> BVar s (t a) -> BVar s b -- | Lifed foldl'. Essentially just toList composed with a -- normal list foldl', and is only here for convenience. foldl' :: (Traversable t, Backprop a, Reifies s W) => (BVar s b -> BVar s a -> BVar s b) -> BVar s b -> BVar s (t a) -> BVar s b -- | Lifted fmap. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Functors. fmap :: (Traversable f, Backprop a, Backprop b, Reifies s W) => (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | Efficient version of fmap when used to "replace" all values in -- a Functor value. -- --
--   fmapConst x = fmap (const x)
--   
-- -- but much more efficient. fmapConst :: (Functor f, Foldable f, Backprop b, Backprop (f a), Reifies s W) => BVar s b -> BVar s (f a) -> BVar s (f b) -- | Alias for fmap. (<$>) :: (Traversable f, Backprop a, Backprop b, Reifies s W) => (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) infixl 4 <$> -- | Alias for fmapConst. (<$) :: (Traversable f, Backprop b, Backprop (f a), Reifies s W) => BVar s b -> BVar s (f a) -> BVar s (f b) infixl 4 <$ -- | Alias for flip fmapConst. ($>) :: (Traversable f, Backprop b, Backprop (f a), Reifies s W) => BVar s (f a) -> BVar s b -> BVar s (f b) infixl 4 $> -- | Lifted pure. pure :: (Foldable t, Applicative t, Backprop a, Reifies s W) => BVar s a -> BVar s (t a) -- | Lifted liftA2. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Applicatives. liftA2 :: (Traversable f, Applicative f, Backprop a, Backprop b, Backprop c, Reifies s W) => (BVar s a -> BVar s b -> BVar s c) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -- | Lifted liftA3. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Applicatives. liftA3 :: (Traversable f, Applicative f, Backprop a, Backprop b, Backprop c, Backprop d, Reifies s W) => (BVar s a -> BVar s b -> BVar s c -> BVar s d) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -> BVar s (f d) -- | Lifted conversion between two Integral instances. fromIntegral :: (Backprop a, Integral a, Integral b, Reifies s W) => BVar s a -> BVar s b -- | Lifted conversion between two Fractional and Real -- instances. realToFrac :: (Backprop a, Fractional a, Real a, Fractional b, Real b, Reifies s W) => BVar s a -> BVar s b -- | Lifted version of round. -- -- Gradient should technically diverge whenever the fractional part is -- 0.5, but does not do this for convenience reasons. round :: (RealFrac a, Integral b, Reifies s W) => BVar s a -> BVar s b -- | Lifted version of fromIntegral, defined to let you return -- RealFrac instances as targets, instead of only other -- Integrals. Essentially the opposite of round. -- -- The gradient should technically diverge whenever the fractional part -- of the downstream gradient is 0.5, but does not do this for -- convenience reasons. fromIntegral' :: (Integral a, RealFrac b, Reifies s W) => BVar s a -> BVar s b -- | Coerce items inside a BVar. coerce :: Coercible a b => BVar s a -> BVar s b -- | Provides the exact same API as Prelude.Backprop, except -- requiring Num instances for all types involved instead of -- Backprop instances. module Prelude.Backprop.Num -- | sum, but with Num constraints instead of -- Backprop constraints. sum :: (Foldable t, Functor t, Num (t a), Num a, Reifies s W) => BVar s (t a) -> BVar s a -- | product, but with Num constraints instead of -- Backprop constraints. product :: (Foldable t, Functor t, Num (t a), Fractional a, Reifies s W) => BVar s (t a) -> BVar s a -- | length, but with Num constraints instead of -- Backprop constraints. length :: (Foldable t, Num (t a), Num b, Reifies s W) => BVar s (t a) -> BVar s b -- | minimum, but with Num constraints instead of -- Backprop constraints. minimum :: (Foldable t, Functor t, Num a, Ord a, Num (t a), Reifies s W) => BVar s (t a) -> BVar s a -- | maximum, but with Num constraints instead of -- Backprop constraints. maximum :: (Foldable t, Functor t, Num a, Ord a, Num (t a), Reifies s W) => BVar s (t a) -> BVar s a -- | traverse, but with Num constraints instead of -- Backprop constraints. -- -- See vector-sized for a fixed-length vector type with a very -- appropriate Num instance! traverse :: (Traversable t, Applicative f, Foldable f, Num a, Num b, Num (t b), Reifies s W) => (BVar s a -> f (BVar s b)) -> BVar s (t a) -> BVar s (f (t b)) -- | toList, but with Num constraints instead of -- Backprop constraints. toList :: (Traversable t, Num a, Reifies s W) => BVar s (t a) -> [BVar s a] -- | mapAccumL, but with Num constraints instead of -- Backprop constraints. -- -- Prior to v0.2.3, required a Num constraint on t b. mapAccumL :: (Traversable t, Num b, Num c, Reifies s W) => (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | mapAccumR, but with Num constraints instead of -- Backprop constraints. -- -- Prior to v0.2.3, required a Num constraint on t b. mapAccumR :: (Traversable t, Num b, Num c, Reifies s W) => (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | foldr, but with Num constraints instead of -- Backprop constraints. foldr :: (Traversable t, Num a, Reifies s W) => (BVar s a -> BVar s b -> BVar s b) -> BVar s b -> BVar s (t a) -> BVar s b -- | foldl', but with Num constraints instead of -- Backprop constraints. foldl' :: (Traversable t, Num a, Reifies s W) => (BVar s b -> BVar s a -> BVar s b) -> BVar s b -> BVar s (t a) -> BVar s b -- | fmap, but with Num constraints instead of -- Backprop constraints. fmap :: (Traversable f, Num a, Num b, Reifies s W) => (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | fmapConst, but with Num constraints instead of -- Backprop constraints. fmapConst :: (Functor f, Foldable f, Num b, Num (f a), Reifies s W) => BVar s b -> BVar s (f a) -> BVar s (f b) -- | Alias for fmap. (<$>) :: (Traversable f, Num a, Num b, Reifies s W) => (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) infixl 4 <$> -- | Alias for fmapConst. (<$) :: (Functor f, Foldable f, Num b, Num (f a), Reifies s W) => BVar s b -> BVar s (f a) -> BVar s (f b) infixl 4 <$ -- | Alias for flip fmapConst. ($>) :: (Functor f, Foldable f, Num b, Num (f a), Reifies s W) => BVar s (f a) -> BVar s b -> BVar s (f b) infixl 4 $> -- | pure, but with Num constraints instead of -- Backprop constraints. pure :: (Foldable t, Applicative t, Num a, Reifies s W) => BVar s a -> BVar s (t a) -- | liftA2, but with Num constraints instead of -- Backprop constraints. liftA2 :: (Traversable f, Applicative f, Num a, Num b, Num c, Reifies s W) => (BVar s a -> BVar s b -> BVar s c) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -- | liftA3, but with Num constraints instead of -- Backprop constraints. liftA3 :: (Traversable f, Applicative f, Num a, Num b, Num c, Num d, Reifies s W) => (BVar s a -> BVar s b -> BVar s c -> BVar s d) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -> BVar s (f d) -- | fromIntegral, but with Num constraints instead of -- Backprop constraints. fromIntegral :: (Integral a, Integral b, Reifies s W) => BVar s a -> BVar s b -- | realToFrac, but with Num constraints instead of -- Backprop constraints. realToFrac :: (Fractional a, Real a, Fractional b, Real b, Reifies s W) => BVar s a -> BVar s b -- | round, but with Num constraints instead of -- Backprop constraints. round :: (RealFrac a, Integral b, Reifies s W) => BVar s a -> BVar s b -- | fromIntegral', but with Num constraints instead of -- Backprop constraints. fromIntegral' :: (Integral a, RealFrac b, Reifies s W) => BVar s a -> BVar s b -- | Coerce items inside a BVar. coerce :: Coercible a b => BVar s a -> BVar s b