-- Hoogle documentation, generated by Haddock -- See Hoogle, http://www.haskell.org/hoogle/ -- | Heterogeneous automatic differentation -- -- Write your functions to compute your result, and the library will -- automatically generate functions to compute your gradient. -- -- Implements heterogeneous reverse-mode automatic differentiation, -- commonly known as "backpropagation". -- -- See https://backprop.jle.im for official introduction and -- documentation. @package backprop @version 0.2.3.0 -- | Provides the Backprop typeclass, a class for values that can be -- used for backpropagation. -- -- This class replaces the old (version 0.1) API relying on Num. module Numeric.Backprop.Class -- | Class of values that can be backpropagated in general. -- -- For instances of Num, these methods can be given by -- zeroNum, addNum, and oneNum. There are also -- generic options given in Numeric.Backprop.Class for functors, -- IsList instances, and Generic instances. -- --
--   instance Backprop Double where
--       zero = zeroNum
--       add = addNum
--       one = oneNum
--   
-- -- If you leave the body of an instance declaration blank, GHC Generics -- will be used to derive instances if the type has a single constructor -- and each field is an instance of Backprop. -- -- To ensure that backpropagation works in a sound way, should obey the -- laws: -- -- -- -- -- -- Also implies preservation of information, making zipWith -- (+) an illegal implementation for lists and vectors. -- -- This is only expected to be true up to potential "extra zeroes" in -- x and y in the result. -- -- -- -- -- -- -- -- -- -- -- -- -- -- Note that not all values in the backpropagation process needs all of -- these methods: Only the "final result" needs one, for example. -- These are all grouped under one typeclass for convenience in defining -- instances, and also to talk about sensible laws. For fine-grained -- control, use the "explicit" versions of library functions (for -- example, in Numeric.Backprop.Explicit) instead of -- Backprop based ones. -- -- This typeclass replaces the reliance on Num of the previous API -- (v0.1). Num is strictly more powerful than Backprop, and -- is a stronger constraint on types than is necessary for proper -- backpropagating. In particular, fromInteger is a problem for -- many types, preventing useful backpropagation for lists, -- variable-length vectors (like Data.Vector) and variable-size -- matrices from linear algebra libraries like hmatrix and -- accelerate. class Backprop a -- | "Zero out" all components of a value. For scalar values, this should -- just be const 0. For vectors and matrices, this should -- set all components to zero, the additive identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See zeroNum for a pre-built definition for instances of -- Num and zeroFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericZero, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. zero :: Backprop a => a -> a -- | Add together two values of a type. To combine contributions of -- gradients, so should be information-preserving: -- -- -- -- Should be as strict as possible. This behavior is observed for -- all instances provided by this library. -- -- See addNum for a pre-built definition for instances of -- Num and addFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericAdd, a pre-built definition for instances of -- Generic with one constructor whose fields are all themselves -- instances of Backprop. add :: Backprop a => a -> a -> a -- | One all components of a value. For scalar values, this should -- just be const 1. For vectors and matrices, this should -- set all components to one, the multiplicative identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See oneNum for a pre-built definition for instances of -- Num and oneFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericOne, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. one :: Backprop a => a -> a -- | "Zero out" all components of a value. For scalar values, this should -- just be const 0. For vectors and matrices, this should -- set all components to zero, the additive identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See zeroNum for a pre-built definition for instances of -- Num and zeroFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericZero, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. zero :: (Backprop a, Generic a, GZero (Rep a)) => a -> a -- | Add together two values of a type. To combine contributions of -- gradients, so should be information-preserving: -- -- -- -- Should be as strict as possible. This behavior is observed for -- all instances provided by this library. -- -- See addNum for a pre-built definition for instances of -- Num and addFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericAdd, a pre-built definition for instances of -- Generic with one constructor whose fields are all themselves -- instances of Backprop. add :: (Backprop a, Generic a, GAdd (Rep a)) => a -> a -> a -- | One all components of a value. For scalar values, this should -- just be const 1. For vectors and matrices, this should -- set all components to one, the multiplicative identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See oneNum for a pre-built definition for instances of -- Num and oneFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericOne, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. one :: (Backprop a, Generic a, GOne (Rep a)) => a -> a -- | zero for instances of Num. -- -- Is lazy in its argument. zeroNum :: Num a => a -> a -- | add for instances of Num. addNum :: Num a => a -> a -> a -- | one for instances of Num. -- -- Is lazy in its argument. oneNum :: Num a => a -> a -- | zero for instances of Vector. zeroVec :: (Vector v a, Backprop a) => v a -> v a -- | add for instances of Vector. Automatically pads the end -- of the shorter vector with zeroes. addVec :: (Vector v a, Backprop a) => v a -> v a -> v a -- | one for instances of Vector. oneVec :: (Vector v a, Backprop a) => v a -> v a -- | zero for Functor instances. zeroFunctor :: (Functor f, Backprop a) => f a -> f a -- | add for instances of IsList. Automatically pads the end -- of the "shorter" value with zeroes. addIsList :: (IsList a, Backprop (Item a)) => a -> a -> a -- | add for types that are isomorphic to a list. Automatically pads -- the end of the "shorter" value with zeroes. addAsList :: Backprop b => (a -> [b]) -> ([b] -> a) -> a -> a -> a -- | one for instances of Functor. oneFunctor :: (Functor f, Backprop a) => f a -> f a -- | zero using GHC Generics; works if all fields are instances of -- Backprop. genericZero :: (Generic a, GZero (Rep a)) => a -> a -- | add using GHC Generics; works if all fields are instances of -- Backprop, but only for values with single constructors. genericAdd :: (Generic a, GAdd (Rep a)) => a -> a -> a -- | one using GHC Generics; works if all fields are instaces of -- Backprop. genericOne :: (Generic a, GOne (Rep a)) => a -> a -- | A newtype wrapper over an f a for Applicative -- f that gives a free Backprop instance (as well as -- Num etc. instances). -- -- Useful for performing backpropagation over functions that require some -- monadic context (like IO) to perform. newtype ABP f a ABP :: f a -> ABP f a [runABP] :: ABP f a -> f a -- | A newtype wrapper over an instance of Num that gives a free -- Backprop instance. -- -- Useful for things like DerivingVia, or for avoiding orphan -- instances. newtype NumBP a NumBP :: a -> NumBP a [runNumBP] :: NumBP a -> a -- | Helper class for automatically deriving zero using GHC -- Generics. class GZero f -- | Helper class for automatically deriving add using GHC Generics. class GAdd f -- | Helper class for automatically deriving one using GHC Generics. class GOne f instance Data.Traversable.Traversable f => Data.Traversable.Traversable (Numeric.Backprop.Class.ABP f) instance Data.Foldable.Foldable f => Data.Foldable.Foldable (Numeric.Backprop.Class.ABP f) instance GHC.Base.Functor f => GHC.Base.Functor (Numeric.Backprop.Class.ABP f) instance GHC.Generics.Generic (Numeric.Backprop.Class.ABP f a) instance (Data.Data.Data (f a), Data.Typeable.Internal.Typeable a, Data.Typeable.Internal.Typeable f) => Data.Data.Data (Numeric.Backprop.Class.ABP f a) instance GHC.Classes.Ord (f a) => GHC.Classes.Ord (Numeric.Backprop.Class.ABP f a) instance GHC.Classes.Eq (f a) => GHC.Classes.Eq (Numeric.Backprop.Class.ABP f a) instance GHC.Read.Read (f a) => GHC.Read.Read (Numeric.Backprop.Class.ABP f a) instance GHC.Show.Show (f a) => GHC.Show.Show (Numeric.Backprop.Class.ABP f a) instance GHC.Float.Floating a => GHC.Float.Floating (Numeric.Backprop.Class.NumBP a) instance GHC.Real.Fractional a => GHC.Real.Fractional (Numeric.Backprop.Class.NumBP a) instance GHC.Num.Num a => GHC.Num.Num (Numeric.Backprop.Class.NumBP a) instance Data.Traversable.Traversable Numeric.Backprop.Class.NumBP instance Data.Foldable.Foldable Numeric.Backprop.Class.NumBP instance GHC.Base.Functor Numeric.Backprop.Class.NumBP instance GHC.Generics.Generic (Numeric.Backprop.Class.NumBP a) instance Data.Data.Data a => Data.Data.Data (Numeric.Backprop.Class.NumBP a) instance GHC.Classes.Ord a => GHC.Classes.Ord (Numeric.Backprop.Class.NumBP a) instance GHC.Classes.Eq a => GHC.Classes.Eq (Numeric.Backprop.Class.NumBP a) instance GHC.Read.Read a => GHC.Read.Read (Numeric.Backprop.Class.NumBP a) instance GHC.Show.Show a => GHC.Show.Show (Numeric.Backprop.Class.NumBP a) instance GHC.Num.Num a => Numeric.Backprop.Class.Backprop (Numeric.Backprop.Class.NumBP a) instance (GHC.Base.Applicative f, Numeric.Backprop.Class.Backprop a) => Numeric.Backprop.Class.Backprop (Numeric.Backprop.Class.ABP f a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.GZero (GHC.Generics.K1 i a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.GAdd (GHC.Generics.K1 i a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.GOne (GHC.Generics.K1 i a) instance Numeric.Backprop.Class.Backprop GHC.Types.Int instance Numeric.Backprop.Class.Backprop GHC.Integer.Type.Integer instance Numeric.Backprop.Class.Backprop GHC.Natural.Natural instance Numeric.Backprop.Class.Backprop GHC.Word.Word8 instance Numeric.Backprop.Class.Backprop GHC.Types.Word instance Numeric.Backprop.Class.Backprop GHC.Word.Word16 instance Numeric.Backprop.Class.Backprop GHC.Word.Word32 instance Numeric.Backprop.Class.Backprop GHC.Word.Word64 instance GHC.Real.Integral a => Numeric.Backprop.Class.Backprop (GHC.Real.Ratio a) instance GHC.Float.RealFloat a => Numeric.Backprop.Class.Backprop (Data.Complex.Complex a) instance Numeric.Backprop.Class.Backprop GHC.Types.Float instance Numeric.Backprop.Class.Backprop GHC.Types.Double instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Vector.Vector a) instance (Data.Vector.Unboxed.Base.Unbox a, Numeric.Backprop.Class.Backprop a) => Numeric.Backprop.Class.Backprop (Data.Vector.Unboxed.Base.Vector a) instance (Foreign.Storable.Storable a, Numeric.Backprop.Class.Backprop a) => Numeric.Backprop.Class.Backprop (Data.Vector.Storable.Vector a) instance (Data.Primitive.Types.Prim a, Numeric.Backprop.Class.Backprop a) => Numeric.Backprop.Class.Backprop (Data.Vector.Primitive.Vector a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop [a] instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.List.NonEmpty.NonEmpty a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Sequence.Internal.Seq a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (GHC.Base.Maybe a) instance Numeric.Backprop.Class.Backprop () instance (Numeric.Backprop.Class.Backprop a, Numeric.Backprop.Class.Backprop b) => Numeric.Backprop.Class.Backprop (a, b) instance (Numeric.Backprop.Class.Backprop a, Numeric.Backprop.Class.Backprop b, Numeric.Backprop.Class.Backprop c) => Numeric.Backprop.Class.Backprop (a, b, c) instance (Numeric.Backprop.Class.Backprop a, Numeric.Backprop.Class.Backprop b, Numeric.Backprop.Class.Backprop c, Numeric.Backprop.Class.Backprop d) => Numeric.Backprop.Class.Backprop (a, b, c, d) instance (Numeric.Backprop.Class.Backprop a, Numeric.Backprop.Class.Backprop b, Numeric.Backprop.Class.Backprop c, Numeric.Backprop.Class.Backprop d, Numeric.Backprop.Class.Backprop e) => Numeric.Backprop.Class.Backprop (a, b, c, d, e) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Functor.Identity.Identity a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Type.Combinator.I a) instance Numeric.Backprop.Class.Backprop (Data.Proxy.Proxy a) instance Numeric.Backprop.Class.Backprop w => Numeric.Backprop.Class.Backprop (Data.Functor.Const.Const w a) instance Numeric.Backprop.Class.Backprop Data.Void.Void instance (Numeric.Backprop.Class.Backprop a, GHC.Classes.Ord k) => Numeric.Backprop.Class.Backprop (Data.Map.Internal.Map k a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.IntMap.Internal.IntMap a) instance Type.Family.List.ListC (Numeric.Backprop.Class.Backprop Type.Family.List.<$> (f Type.Family.List.<$> as)) => Numeric.Backprop.Class.Backprop (Data.Type.Product.Prod f as) instance Type.Family.Maybe.MaybeC (Numeric.Backprop.Class.Backprop Type.Family.Maybe.<$> (f Type.Family.Maybe.<$> a)) => Numeric.Backprop.Class.Backprop (Data.Type.Option.Option f a) instance (Numeric.Backprop.Class.Backprop (f a), Numeric.Backprop.Class.Backprop (g a)) => Numeric.Backprop.Class.Backprop ((Data.Type.Conjunction.:&:) f g a) instance (Numeric.Backprop.Class.Backprop (f a), Numeric.Backprop.Class.Backprop (g b)) => Numeric.Backprop.Class.Backprop ((Data.Type.Conjunction.:*:) f g '(a, b)) instance Numeric.Backprop.Class.Backprop (f (g h) a) => Numeric.Backprop.Class.Backprop (Data.Type.Combinator.Comp1 f g h a) instance Numeric.Backprop.Class.Backprop (f (g a)) => Numeric.Backprop.Class.Backprop ((Data.Type.Combinator.:.:) f g a) instance Numeric.Backprop.Class.Backprop w => Numeric.Backprop.Class.Backprop (Data.Type.Combinator.C w a) instance Numeric.Backprop.Class.Backprop (p a b) => Numeric.Backprop.Class.Backprop (Data.Type.Combinator.Flip p b a) instance Numeric.Backprop.Class.Backprop (p '(a, b)) => Numeric.Backprop.Class.Backprop (Data.Type.Combinator.Cur p a b) instance Numeric.Backprop.Class.Backprop (p a b) => Numeric.Backprop.Class.Backprop (Data.Type.Combinator.Uncur p '(a, b)) instance Numeric.Backprop.Class.Backprop (p '(a, b, c)) => Numeric.Backprop.Class.Backprop (Data.Type.Combinator.Cur3 p a b c) instance Numeric.Backprop.Class.Backprop (p a b c) => Numeric.Backprop.Class.Backprop (Data.Type.Combinator.Uncur3 p '(a, b, c)) instance Numeric.Backprop.Class.Backprop (f a a) => Numeric.Backprop.Class.Backprop (Data.Type.Combinator.Join f a) instance Numeric.Backprop.Class.Backprop (t (Data.Type.Combinator.Flip f b) a) => Numeric.Backprop.Class.Backprop (Data.Type.Combinator.Conj t f a b) instance Numeric.Backprop.Class.Backprop (c (f a)) => Numeric.Backprop.Class.Backprop (Data.Type.Combinator.LL c a f) instance Numeric.Backprop.Class.Backprop (c (f a)) => Numeric.Backprop.Class.Backprop (Data.Type.Combinator.RR c f a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (GHC.Generics.K1 i a p) instance Numeric.Backprop.Class.Backprop (f p) => Numeric.Backprop.Class.Backprop (GHC.Generics.M1 i c f p) instance (Numeric.Backprop.Class.Backprop (f p), Numeric.Backprop.Class.Backprop (g p)) => Numeric.Backprop.Class.Backprop ((GHC.Generics.:*:) f g p) instance Numeric.Backprop.Class.Backprop (GHC.Generics.V1 p) instance Numeric.Backprop.Class.Backprop (GHC.Generics.U1 p) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Monoid.Sum a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Monoid.Product a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Semigroup.Option a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Semigroup.First a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Semigroup.Last a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Monoid.First a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Monoid.Last a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (Data.Monoid.Dual a) instance (Numeric.Backprop.Class.Backprop a, Numeric.Backprop.Class.Backprop b) => Numeric.Backprop.Class.Backprop (Data.Semigroup.Arg a b) instance (Numeric.Backprop.Class.Backprop (f a), Numeric.Backprop.Class.Backprop (g a)) => Numeric.Backprop.Class.Backprop (Data.Functor.Product.Product f g a) instance Numeric.Backprop.Class.Backprop (f (g a)) => Numeric.Backprop.Class.Backprop (Data.Functor.Compose.Compose f g a) instance Numeric.Backprop.Class.Backprop a => Numeric.Backprop.Class.Backprop (r -> a) instance (Numeric.Backprop.Class.Backprop a, GHC.Base.Applicative m) => Numeric.Backprop.Class.Backprop (Control.Arrow.Kleisli m r a) instance (Numeric.Backprop.Class.GOne f, Numeric.Backprop.Class.GOne g) => Numeric.Backprop.Class.GOne (f GHC.Generics.:*: g) instance (Numeric.Backprop.Class.GOne f, Numeric.Backprop.Class.GOne g) => Numeric.Backprop.Class.GOne (f GHC.Generics.:+: g) instance Numeric.Backprop.Class.GOne GHC.Generics.V1 instance Numeric.Backprop.Class.GOne GHC.Generics.U1 instance Numeric.Backprop.Class.GOne f => Numeric.Backprop.Class.GOne (GHC.Generics.M1 i c f) instance Numeric.Backprop.Class.GOne f => Numeric.Backprop.Class.GOne (f GHC.Generics.:.: g) instance (Numeric.Backprop.Class.GAdd f, Numeric.Backprop.Class.GAdd g) => Numeric.Backprop.Class.GAdd (f GHC.Generics.:*: g) instance Numeric.Backprop.Class.GAdd GHC.Generics.V1 instance Numeric.Backprop.Class.GAdd GHC.Generics.U1 instance Numeric.Backprop.Class.GAdd f => Numeric.Backprop.Class.GAdd (GHC.Generics.M1 i c f) instance Numeric.Backprop.Class.GAdd f => Numeric.Backprop.Class.GAdd (f GHC.Generics.:.: g) instance (Numeric.Backprop.Class.GZero f, Numeric.Backprop.Class.GZero g) => Numeric.Backprop.Class.GZero (f GHC.Generics.:*: g) instance (Numeric.Backprop.Class.GZero f, Numeric.Backprop.Class.GZero g) => Numeric.Backprop.Class.GZero (f GHC.Generics.:+: g) instance Numeric.Backprop.Class.GZero GHC.Generics.V1 instance Numeric.Backprop.Class.GZero GHC.Generics.U1 instance Numeric.Backprop.Class.GZero f => Numeric.Backprop.Class.GZero (GHC.Generics.M1 i c f) instance Numeric.Backprop.Class.GZero f => Numeric.Backprop.Class.GZero (f GHC.Generics.:.: g) instance Control.DeepSeq.NFData (f a) => Control.DeepSeq.NFData (Numeric.Backprop.Class.ABP f a) instance GHC.Base.Applicative f => GHC.Base.Applicative (Numeric.Backprop.Class.ABP f) instance GHC.Base.Monad m => GHC.Base.Monad (Numeric.Backprop.Class.ABP m) instance (GHC.Base.Applicative f, GHC.Num.Num a) => GHC.Num.Num (Numeric.Backprop.Class.ABP f a) instance (GHC.Base.Applicative f, GHC.Real.Fractional a) => GHC.Real.Fractional (Numeric.Backprop.Class.ABP f a) instance (GHC.Base.Applicative f, GHC.Float.Floating a) => GHC.Float.Floating (Numeric.Backprop.Class.ABP f a) instance Control.DeepSeq.NFData a => Control.DeepSeq.NFData (Numeric.Backprop.Class.NumBP a) instance GHC.Base.Applicative Numeric.Backprop.Class.NumBP instance GHC.Base.Monad Numeric.Backprop.Class.NumBP -- | Provides the Op type and combinators, which represent -- differentiable functions/operations on values, and are used internally -- by the library to perform back-propagation. -- -- Users of the library can ignore this module for the most part. Library -- authors defining backpropagatable primitives for their functions are -- recommend to simply use op0, op1, op2, -- op3, which are re-exported in Numeric.Backprop. However, -- authors who want more options in defining their primtive functions -- might find some of these functions useful. -- -- Note that if your entire function is a single non-branching -- composition of functions, Op and its utility functions alone -- are sufficient to differentiate/backprop. However, this happens rarely -- in practice. -- -- To use these Ops with the backprop library, they can be made to -- work with BVars using liftOp, liftOp1, -- liftOp2, and liftOp3. -- -- If you are writing a library, see -- https://backprop.jle.im/06-equipping-your-library.html for a -- guide for equipping your library with backpropatable operations using -- Ops. module Numeric.Backprop.Op -- | An Op as a describes a differentiable function from -- as to a. -- -- For example, a value of type -- --
--   Op '[Int, Bool] Double
--   
-- -- is a function from an Int and a Bool, returning a -- Double. It can be differentiated to give a gradient of -- an Int and a Bool if given a total derivative for the -- Double. If we call Bool <math>, then, -- mathematically, it is akin to a: -- -- <math> -- -- See runOp, gradOp, and gradOpWith for examples on -- how to run it, and Op for instructions on creating it. -- -- It is simpler to not use this type constructor directly, and instead -- use the op2, op1, op2, and op3 helper -- smart constructors. -- -- See Numeric.Backprop.Op#prod for a mini-tutorial on using -- Prod and Tuple. -- -- To use an Op with the backprop library, see -- liftOp, liftOp1, liftOp2, and -- liftOp3. newtype Op as a -- | Construct an Op by giving a function creating the result, and -- also a continuation on how to create the gradient, given the total -- derivative of a. -- -- See the module documentation for Numeric.Backprop.Op for more -- details on the function that this constructor and Op expect. Op :: (Tuple as -> (a, a -> Tuple as)) -> Op as a -- | Run the function that the Op encodes, returning a continuation -- to compute the gradient, given the total derivative of a. See -- documentation for Numeric.Backprop.Op for more information. [runOpWith] :: Op as a -> Tuple as -> (a, a -> Tuple as) data Prod k (f :: k -> *) (a :: [k]) :: forall k. () => (k -> *) -> [k] -> * [Ø] :: Prod k f [] k [:<] :: Prod k f (:) k a1 as -- | A Prod of simple Haskell types. type Tuple = Prod * I newtype I a :: * -> * I :: a -> I a [getI] :: I a -> a -- | Run the function that an Op encodes, to get the resulting -- output and also its gradient with respect to the inputs. -- --
--   >>> gradOp' (op2 (*)) (3 ::< 5 ::< Ø)
--   (15, 5 ::< 3 ::< Ø)
--   
runOp :: Num a => Op as a -> Tuple as -> (a, Tuple as) -- | Run the function that an Op encodes, to get the result. -- --
--   >>> runOp (op2 (*)) (3 ::< 5 ::< Ø)
--   15
--   
evalOp :: Op as a -> Tuple as -> a -- | Run the function that an Op encodes, and get the gradient of -- the output with respect to the inputs. -- --
--   >>> gradOp (op2 (*)) (3 ::< 5 ::< Ø)
--   5 ::< 3 ::< Ø
--   -- the gradient of x*y is (y, x)
--   
-- --
--   gradOp o xs = gradOpWith o xs 1
--   
gradOp :: Num a => Op as a -> Tuple as -> Tuple as -- | Get the gradient function that an Op encodes, with a third -- argument expecting the total derivative of the result. -- -- See the module documentaiton for Numeric.Backprop.Op for more -- information. gradOpWith :: Op as a -> Tuple as -> a -> Tuple as -- | Create an Op that takes no inputs and always returns the given -- value. -- -- There is no gradient, of course (using gradOp will give you an -- empty tuple), because there is no input to have a gradient of. -- --
--   >>> runOp (op0 10) Ø
--   (10, Ø)
--   
-- -- For a constant Op that takes input and ignores it, see -- opConst and opConst'. op0 :: a -> Op '[] a -- | An Op that ignores all of its inputs and returns a given -- constant value. -- --
--   >>> gradOp' (opConst 10) (1 ::< 2 ::< 3 ::< Ø)
--   (10, 0 ::< 0 ::< 0 ::< Ø)
--   
opConst :: (Every Num as, Known Length as) => a -> Op as a -- | An Op that just returns whatever it receives. The identity -- function. -- --
--   idOp = opIso id id
--   
idOp :: Op '[a] a -- | A version of opConst taking explicit Length, indicating -- the number of inputs and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use opConst. opConst' :: Every Num as => Length as -> a -> Op as a -- | An Op that extracts a value from an input value using a -- Lens'. -- -- Warning: This is unsafe! It assumes that it extracts a specific value -- unchanged, with derivative 1, so will break for things that -- numerically manipulate things before returning them. opLens :: Num a => Lens' a b -> Op '[a] b -- | Create an Op of a function taking one input, by giving its -- explicit derivative. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the derivative <math>, it would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that squares its input: -- --
--   square :: Num a => Op '[a] a
--   square = op1 $ \x -> (x*x, \d -> 2 * d * x
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op1 :: (a -> (b, b -> a)) -> Op '[a] b -- | Create an Op of a function taking two inputs, by giving its -- explicit gradient. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the gradient <math> would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that multiplies its inputs: -- --
--   mul :: Num a => Op '[a, a] a
--   mul = op2' $ \x y -> (x*y, \d -> (d*y, x*d)
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c -- | Create an Op of a function taking three inputs, by giving its -- explicit gradient. See documentation for op2 for more details. op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d -- | An Op that coerces an item into another item whose type has the -- same runtime representation. -- --
--   >>> gradOp' opCoerce (Identity 5) :: (Int, Identity Int)
--   (5, Identity 1)
--   
-- --
--   opCoerce = opIso coerced coerce
--   
opCoerce :: Coercible a b => Op '[a] b -- | An Op that takes as and returns exactly the input -- tuple. -- --
--   >>> gradOp' opTup (1 ::< 2 ::< 3 ::< Ø)
--   (1 ::< 2 ::< 3 ::< Ø, 1 ::< 1 ::< 1 ::< Ø)
--   
opTup :: Op as (Tuple as) -- | An Op that runs the input value through an isomorphism. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. opIso :: (a -> b) -> (b -> a) -> Op '[a] b -- | An Op that runs the two input values through an isomorphism. -- Useful for things like constructors. See opIso for caveats. opIso2 :: (a -> b -> c) -> (c -> (a, b)) -> Op '[a, b] c -- | An Op that runs the three input values through an isomorphism. -- Useful for things like constructors. See opIso for caveats. opIso3 :: (a -> b -> c -> d) -> (d -> (a, b, c)) -> Op '[a, b, c] d -- | An Op that runs the input value through an isomorphism between -- a tuple of values and a value. See opIso for caveats. -- -- In Numeric.Backprop.Op since version 0.1.2.0, but only exported -- from Numeric.Backprop since version 0.1.3.0. opIsoN :: (Tuple as -> b) -> (b -> Tuple as) -> Op as b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp1, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad1 :: (a -> b) -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad :: (Tuple as -> b) -> Op as b -- | Compose Ops together, like sequence for functions, or -- liftAN. -- -- That is, given an Op as b1, an Op as -- b2, and an Op as b3, it can compose them with an -- Op '[b1,b2,b3] c to create an Op as c. composeOp :: (Every Num as, Known Length as) => Prod (Op as) bs -> Op bs c -> Op as c -- | Convenient wrapper over composeOp for the case where the second -- function only takes one input, so the two Ops can be directly -- piped together, like for .. composeOp1 :: (Every Num as, Known Length as) => Op as b -> Op '[b] c -> Op as c -- | Convenient infix synonym for (flipped) composeOp1. Meant to be -- used just like .: -- --
--   f :: Op '[b]   c
--   g :: Op '[a,a] b
--   
--   f ~. g :: Op '[a, a] c
--   
(~.) :: (Known Length as, Every Num as) => Op '[b] c -> Op as b -> Op as c infixr 9 ~. -- | A version of composeOp taking explicit Length, -- indicating the number of inputs expected and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use composeOp. composeOp' :: Every Num as => Length as -> Prod (Op as) bs -> Op bs c -> Op as c -- | A version of composeOp1 taking explicit Length, -- indicating the number of inputs expected and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use composeOp1. composeOp1' :: Every Num as => Length as -> Op as b -> Op '[b] c -> Op as c -- | Construct a two element Prod. Since the precedence of (:>) is -- higher than (:<), we can conveniently write lists like: -- --
--   >>> a :< b :> c
--   
-- -- Which is identical to: -- --
--   >>> a :< b :< c :< Ø
--   
infix 6 :> -- | Build a singleton Prod. only :: () => f a -> Prod k f (:) k a [] k head' :: () => Prod k f (:<) k a as -> f a -- | Cons onto a Tuple. infixr 5 ::< -- | Singleton Tuple. only_ :: () => a -> Tuple (:) * a [] * -- | Op for addition (+.) :: Num a => Op '[a, a] a -- | Op for subtraction (-.) :: Num a => Op '[a, a] a -- | Op for multiplication (*.) :: Num a => Op '[a, a] a -- | Op for negation negateOp :: Num a => Op '[a] a -- | Op for absolute value absOp :: Num a => Op '[a] a -- | Op for signum signumOp :: Num a => Op '[a] a -- | Op for division (/.) :: Fractional a => Op '[a, a] a -- | Op for multiplicative inverse recipOp :: Fractional a => Op '[a] a -- | Op for exp expOp :: Floating a => Op '[a] a -- | Op for the natural logarithm logOp :: Floating a => Op '[a] a -- | Op for square root sqrtOp :: Floating a => Op '[a] a -- | Op for exponentiation (**.) :: Floating a => Op '[a, a] a -- | Op for logBase logBaseOp :: Floating a => Op '[a, a] a -- | Op for sine sinOp :: Floating a => Op '[a] a -- | Op for cosine cosOp :: Floating a => Op '[a] a -- | Op for tangent tanOp :: Floating a => Op '[a] a -- | Op for arcsine asinOp :: Floating a => Op '[a] a -- | Op for arccosine acosOp :: Floating a => Op '[a] a -- | Op for arctangent atanOp :: Floating a => Op '[a] a -- | Op for hyperbolic sine sinhOp :: Floating a => Op '[a] a -- | Op for hyperbolic cosine coshOp :: Floating a => Op '[a] a -- | Op for hyperbolic tangent tanhOp :: Floating a => Op '[a] a -- | Op for hyperbolic arcsine asinhOp :: Floating a => Op '[a] a -- | Op for hyperbolic arccosine acoshOp :: Floating a => Op '[a] a -- | Op for hyperbolic arctangent atanhOp :: Floating a => Op '[a] a instance (Type.Class.Known.Known Data.Type.Length.Length as, Data.Type.Index.Every GHC.Num.Num as, GHC.Num.Num a) => GHC.Num.Num (Numeric.Backprop.Op.Op as a) instance (Type.Class.Known.Known Data.Type.Length.Length as, Data.Type.Index.Every GHC.Real.Fractional as, Data.Type.Index.Every GHC.Num.Num as, GHC.Real.Fractional a) => GHC.Real.Fractional (Numeric.Backprop.Op.Op as a) instance (Type.Class.Known.Known Data.Type.Length.Length as, Data.Type.Index.Every GHC.Float.Floating as, Data.Type.Index.Every GHC.Real.Fractional as, Data.Type.Index.Every GHC.Num.Num as, GHC.Float.Floating a) => GHC.Float.Floating (Numeric.Backprop.Op.Op as a) -- | Provides "explicit" versions of all of the functions in -- Numeric.Backprop. Instead of relying on a Backprop -- instance, allows you to manually provide zero, add, and -- one on a per-value basis. -- -- It is recommended you use Backprop or Num instead, -- unless your type has no Num instance, or you else you want to -- avoid defining orphan Backprop instances for external types. -- Can also be useful if mixing and matching styles. -- -- See Numeric.Backprop for fuller documentation on using these -- functions. module Numeric.Backprop.Explicit -- | A BVar s a is a value of type a that can be -- "backpropagated". -- -- Functions referring to BVars are tracked by the library and can -- be automatically differentiated to get their gradients and results. -- -- For simple numeric values, you can use its Num, -- Fractional, and Floating instances to manipulate them as -- if they were the numbers they represent. -- -- If a contains items, the items can be accessed and extracted -- using lenses. A Lens' b a can be used to access an -- a inside a b, using ^^. (viewVar): -- --
--   (^.)  ::        a -> Lens' a b ->        b
--   (^^.) :: BVar s a -> Lens' a b -> BVar s b
--   
-- -- There is also ^^? (previewVar), to use a -- Prism' or Traversal' to extract a target that may or -- may not be present (which can implement pattern matching), -- ^^.. (toListOfVar) to use a Traversal' to -- extract all targets inside a BVar, and .~~ -- (setVar) to set and update values inside a BVar. -- -- If you have control over your data type definitions, you can also use -- splitBV and joinBV to manipulate data types by easily -- extracting fields out of a BVar of data types and creating -- BVars of data types out of BVars of their fields. See -- Numeric.Backprop#hkd for a tutorial on this use pattern. -- -- For more complex operations, libraries can provide functions on -- BVars using liftOp and related functions. This is how -- you can create primitive functions that users can use to manipulate -- your library's values. See -- https://backprop.jle.im/06-equipping-your-library.html for a -- detailed guide. -- -- For example, the hmatrix library has a matrix-vector -- multiplication function, #> :: L m n -> R n -> L m. -- -- A library could instead provide a function #> :: BVar (L -- m n) -> BVar (R n) -> BVar (R m), which the user can then -- use to manipulate their BVars of L m ns and R -- ns, etc. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. data BVar s a -- | An ephemeral Wengert Tape in the environment. Used internally to track -- of the computational graph of variables. -- -- For the end user, one can just imagine Reifies s -- W as a required constraint on s that allows -- backpropagation to work. data W -- | Class of values that can be backpropagated in general. -- -- For instances of Num, these methods can be given by -- zeroNum, addNum, and oneNum. There are also -- generic options given in Numeric.Backprop.Class for functors, -- IsList instances, and Generic instances. -- --
--   instance Backprop Double where
--       zero = zeroNum
--       add = addNum
--       one = oneNum
--   
-- -- If you leave the body of an instance declaration blank, GHC Generics -- will be used to derive instances if the type has a single constructor -- and each field is an instance of Backprop. -- -- To ensure that backpropagation works in a sound way, should obey the -- laws: -- -- -- -- -- -- Also implies preservation of information, making zipWith -- (+) an illegal implementation for lists and vectors. -- -- This is only expected to be true up to potential "extra zeroes" in -- x and y in the result. -- -- -- -- -- -- -- -- -- -- -- -- -- -- Note that not all values in the backpropagation process needs all of -- these methods: Only the "final result" needs one, for example. -- These are all grouped under one typeclass for convenience in defining -- instances, and also to talk about sensible laws. For fine-grained -- control, use the "explicit" versions of library functions (for -- example, in Numeric.Backprop.Explicit) instead of -- Backprop based ones. -- -- This typeclass replaces the reliance on Num of the previous API -- (v0.1). Num is strictly more powerful than Backprop, and -- is a stronger constraint on types than is necessary for proper -- backpropagating. In particular, fromInteger is a problem for -- many types, preventing useful backpropagation for lists, -- variable-length vectors (like Data.Vector) and variable-size -- matrices from linear algebra libraries like hmatrix and -- accelerate. class Backprop a -- | "Zero out" all components of a value. For scalar values, this should -- just be const 0. For vectors and matrices, this should -- set all components to zero, the additive identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See zeroNum for a pre-built definition for instances of -- Num and zeroFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericZero, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. zero :: Backprop a => a -> a -- | Add together two values of a type. To combine contributions of -- gradients, so should be information-preserving: -- -- -- -- Should be as strict as possible. This behavior is observed for -- all instances provided by this library. -- -- See addNum for a pre-built definition for instances of -- Num and addFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericAdd, a pre-built definition for instances of -- Generic with one constructor whose fields are all themselves -- instances of Backprop. add :: Backprop a => a -> a -> a -- | One all components of a value. For scalar values, this should -- just be const 1. For vectors and matrices, this should -- set all components to one, the multiplicative identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See oneNum for a pre-built definition for instances of -- Num and oneFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericOne, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. one :: Backprop a => a -> a -- | "Zero out" all components of a value. For scalar values, this should -- just be const 0. For vectors and matrices, this should -- set all components to zero, the additive identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See zeroNum for a pre-built definition for instances of -- Num and zeroFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericZero, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. zero :: (Backprop a, Generic a, GZero (Rep a)) => a -> a -- | Add together two values of a type. To combine contributions of -- gradients, so should be information-preserving: -- -- -- -- Should be as strict as possible. This behavior is observed for -- all instances provided by this library. -- -- See addNum for a pre-built definition for instances of -- Num and addFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericAdd, a pre-built definition for instances of -- Generic with one constructor whose fields are all themselves -- instances of Backprop. add :: (Backprop a, Generic a, GAdd (Rep a)) => a -> a -> a -- | One all components of a value. For scalar values, this should -- just be const 1. For vectors and matrices, this should -- set all components to one, the multiplicative identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See oneNum for a pre-built definition for instances of -- Num and oneFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericOne, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. one :: (Backprop a, Generic a, GOne (Rep a)) => a -> a -- | A newtype wrapper over an f a for Applicative -- f that gives a free Backprop instance (as well as -- Num etc. instances). -- -- Useful for performing backpropagation over functions that require some -- monadic context (like IO) to perform. newtype ABP f a ABP :: f a -> ABP f a [runABP] :: ABP f a -> f a -- | A newtype wrapper over an instance of Num that gives a free -- Backprop instance. -- -- Useful for things like DerivingVia, or for avoiding orphan -- instances. newtype NumBP a NumBP :: a -> NumBP a [runNumBP] :: NumBP a -> a -- | "Zero out" all components of a value. For scalar values, this should -- just be const 0. For vectors and matrices, this should -- set all components to zero, the additive identity. -- -- Should be idempotent: Applying the function twice is the same as -- applying it just once. -- -- Each type should ideally only have one ZeroFunc. This coherence -- constraint is given by the typeclass Backprop. newtype ZeroFunc a ZF :: (a -> a) -> ZeroFunc a [runZF] :: ZeroFunc a -> a -> a -- | If a type has a Num instance, this is the canonical -- ZeroFunc. zfNum :: Num a => ZeroFunc a -- | ZeroFuncs for every item in a type level list based on their -- Num instances zfNums :: (Every Num as, Known Length as) => Prod ZeroFunc as -- | The canonical ZeroFunc for instances of Backprop. zeroFunc :: Backprop a => ZeroFunc a -- | Generate an ZeroFunc for every type in a type-level list, if -- every type has an instance of Backprop. zeroFuncs :: (Every Backprop as, Known Length as) => Prod ZeroFunc as -- | zeroFunc for instances of Functor zfFunctor :: (Backprop a, Functor f) => ZeroFunc (f a) -- | Add together two values of a type. To combine contributions of -- gradients, so should ideally be information-preserving. -- -- See laws for Backprop for the laws this should be expected to -- preserve. Namely, it should be commutative and associative, with an -- identity for a valid ZeroFunc. -- -- Each type should ideally only have one AddFunc. This coherence -- constraint is given by the typeclass Backprop. newtype AddFunc a AF :: (a -> a -> a) -> AddFunc a [runAF] :: AddFunc a -> a -> a -> a -- | If a type has a Num instance, this is the canonical -- AddFunc. afNum :: Num a => AddFunc a -- | ZeroFuncs for every item in a type level list based on their -- Num instances afNums :: (Every Num as, Known Length as) => Prod AddFunc as -- | The canonical AddFunc for instances of Backprop. addFunc :: Backprop a => AddFunc a -- | Generate an AddFunc for every type in a type-level list, if -- every type has an instance of Backprop. addFuncs :: (Every Backprop as, Known Length as) => Prod AddFunc as -- | One all components of a value. For scalar values, this should -- just be const 1. For vectors and matrices, this should -- set all components to one, the multiplicative identity. -- -- Should be idempotent: Applying the function twice is the same as -- applying it just once. -- -- Each type should ideally only have one OneFunc. This coherence -- constraint is given by the typeclass Backprop. newtype OneFunc a OF :: (a -> a) -> OneFunc a [runOF] :: OneFunc a -> a -> a -- | If a type has a Num instance, this is the canonical -- OneFunc. ofNum :: Num a => OneFunc a -- | ZeroFuncs for every item in a type level list based on their -- Num instances ofNums :: (Every Num as, Known Length as) => Prod OneFunc as -- | The canonical OneFunc for instances of Backprop. oneFunc :: Backprop a => OneFunc a -- | Generate an OneFunc for every type in a type-level list, if -- every type has an instance of Backprop. oneFuncs :: (Every Backprop as, Known Length as) => Prod OneFunc as -- | OneFunc for instances of Functor ofFunctor :: (Backprop a, Functor f) => OneFunc (f a) -- | backprop, but with explicit zero and one. -- -- Note that argument order changed in v0.2.3. backprop :: ZeroFunc a -> (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, OneFunc b -> a) -- | Turn a function BVar s a -> BVar s b into -- the function a -> b that it represents. -- -- Benchmarks show that this should have virtually no overhead over -- directly writing a a -> b. BVar is, in this -- situation, a zero-cost abstraction, performance-wise. -- -- See documentation of backprop for more information. evalBP :: (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> b -- | gradBP, but with explicit zero and one. gradBP :: ZeroFunc a -> OneFunc b -> (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> a -- | backpropWith, but with explicit zero. -- -- Note that argument order changed in v0.2.3. backpropWith :: ZeroFunc a -> (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, (b -> b) -> a) -- | evalBP but with no arguments. Useful when everything is just -- given through constVar. evalBP0 :: (forall s. Reifies s W => BVar s a) -> a -- | backprop2, but with explicit zero and one. -- -- Note that argument order changed in v0.2.3. backprop2 :: ZeroFunc a -> ZeroFunc b -> (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, OneFunc c -> (a, b)) -- | evalBP for a two-argument function. See backprop2 for -- notes. evalBP2 :: (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> c -- | gradBP for a two-argument function. See backprop2 for -- notes. gradBP2 :: ZeroFunc a -> ZeroFunc b -> OneFunc c -> (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (a, b) -- | backpropWith2, but with explicit zero. -- -- Note that argument order changed in v0.2.3. backpropWith2 :: ZeroFunc a -> ZeroFunc b -> (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, (c -> c) -> (a, b)) -- | backpropN, but with explicit zero and one. -- -- Note that argument order changed in v0.2.3. backpropN :: forall as b. () => Prod ZeroFunc as -> (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> (b, OneFunc b -> Tuple as) -- | evalBP generalized to multiple inputs of different types. See -- documentation for backpropN for more details. evalBPN :: forall as b. () => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> b -- | gradBP, Nbut with explicit zero and one. gradBPN :: Prod ZeroFunc as -> OneFunc b -> (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> Tuple as -- | backpropWithN, but with explicit zero. -- -- Note that argument order changed in v0.2.3. backpropWithN :: Prod ZeroFunc as -> (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> (b, (b -> b) -> Tuple as) class EveryC k c as => Every k (c :: k -> Constraint) (as :: [k]) -- | Lift a value into a BVar representing a constant value. -- -- This value will not be considered an input, and its gradients will not -- be backpropagated. constVar :: a -> BVar s a -- | Shorter alias for constVar, inspired by the ad library. auto :: a -> BVar s a -- | Coerce a BVar contents. Useful for things like newtype -- wrappers. coerceVar :: Coercible a b => BVar s a -> BVar s b -- | viewVar, but with explicit add and zero. viewVar :: forall a b s. Reifies s W => AddFunc a -> ZeroFunc a -> Lens' b a -> BVar s b -> BVar s a -- | setVar, but with explicit add and zero. setVar :: forall a b s. Reifies s W => AddFunc a -> AddFunc b -> ZeroFunc a -> ZeroFunc b -> Lens' b a -> BVar s a -> BVar s b -> BVar s b -- | sequenceVar, but with explicit add and zero. sequenceVar :: forall t a s. (Reifies s W, Traversable t) => AddFunc a -> ZeroFunc a -> BVar s (t a) -> t (BVar s a) -- | collectVar, but with explicit add and zero. -- -- NOTE: Prior to v0.2.3, this required an extra ZeroFunc (t -- a) input. However, after v0.2.3, the ZeroFunc is now -- derived from the Functor instance of t. This makes the -- API a little more convenient, and it enforces consistency with the -- ZeroFunc a, so people can't pass in nonsense -- combinations. -- -- Please submit an issue to the issue tracker if you find yourself in a -- situation where you need the flexibility to provide a separte -- ZeroFunc a and ZeroFunc (t a). collectVar :: forall t a s. (Reifies s W, Foldable t, Functor t) => AddFunc a -> ZeroFunc a -> t (BVar s a) -> BVar s (t a) -- | previewVar, but with explicit add and zero. previewVar :: forall b a s. Reifies s W => AddFunc a -> ZeroFunc a -> Traversal' b a -> BVar s b -> Maybe (BVar s a) -- | toListOfVar, but with explicit add and zero. toListOfVar :: forall b a s. Reifies s W => AddFunc a -> ZeroFunc a -> Traversal' b a -> BVar s b -> [BVar s a] -- | isoVar with explicit add and zero. isoVar :: Reifies s W => AddFunc a -> ZeroFunc b -> (a -> b) -> (b -> a) -> BVar s a -> BVar s b -- | isoVar2 with explicit add and zero. isoVar2 :: Reifies s W => AddFunc a -> AddFunc b -> ZeroFunc c -> (a -> b -> c) -> (c -> (a, b)) -> BVar s a -> BVar s b -> BVar s c -- | isoVar3 with explicit add and zero. isoVar3 :: Reifies s W => AddFunc a -> AddFunc b -> AddFunc c -> ZeroFunc d -> (a -> b -> c -> d) -> (d -> (a, b, c)) -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | isoVarN with explicit add and zero. isoVarN :: Reifies s W => Prod AddFunc as -> ZeroFunc b -> (Tuple as -> b) -> (b -> Tuple as) -> Prod (BVar s) as -> BVar s b -- | liftOp, but with explicit add and zero. liftOp :: forall as b s. Reifies s W => Prod AddFunc as -> ZeroFunc b -> Op as b -> Prod (BVar s) as -> BVar s b -- | liftOp1, but with explicit add and zero. liftOp1 :: forall a b s. Reifies s W => AddFunc a -> ZeroFunc b -> Op '[a] b -> BVar s a -> BVar s b -- | liftOp2, but with explicit add and zero. liftOp2 :: forall a b c s. Reifies s W => AddFunc a -> AddFunc b -> ZeroFunc c -> Op '[a, b] c -> BVar s a -> BVar s b -> BVar s c -- | liftOp3, but with explicit add and zero. liftOp3 :: forall a b c d s. Reifies s W => AddFunc a -> AddFunc b -> AddFunc c -> ZeroFunc d -> Op '[a, b, c] d -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | splitBV with explicit add and zero. splitBV :: forall z f s as. (Generic (z f), Generic (z (BVar s)), BVGroup s as (Rep (z f)) (Rep (z (BVar s))), Reifies s W) => AddFunc (Rep (z f) ()) -> Prod AddFunc as -> ZeroFunc (Rep (z f) ()) -> Prod ZeroFunc as -> BVar s (z f) -> z (BVar s) -- | joinBV with explicit add and zero. joinBV :: forall z f s as. (Generic (z f), Generic (z (BVar s)), BVGroup s as (Rep (z f)) (Rep (z (BVar s))), Reifies s W) => AddFunc (z f) -> Prod AddFunc as -> ZeroFunc (z f) -> Prod ZeroFunc as -> z (BVar s) -> BVar s (z f) -- | Helper class for generically "splitting" and "joining" BVars -- into constructors. See splitBV and joinBV. -- -- See Numeric.Backprop#hkd for a tutorial on how to use this. -- -- Instances should be available for types made with one constructor -- whose fields are all instances of Backprop, with a -- Generic instance. class BVGroup s as i o | o -> i, i -> as -- | An Op as a describes a differentiable function from -- as to a. -- -- For example, a value of type -- --
--   Op '[Int, Bool] Double
--   
-- -- is a function from an Int and a Bool, returning a -- Double. It can be differentiated to give a gradient of -- an Int and a Bool if given a total derivative for the -- Double. If we call Bool <math>, then, -- mathematically, it is akin to a: -- -- <math> -- -- See runOp, gradOp, and gradOpWith for examples on -- how to run it, and Op for instructions on creating it. -- -- It is simpler to not use this type constructor directly, and instead -- use the op2, op1, op2, and op3 helper -- smart constructors. -- -- See Numeric.Backprop.Op#prod for a mini-tutorial on using -- Prod and Tuple. -- -- To use an Op with the backprop library, see -- liftOp, liftOp1, liftOp2, and -- liftOp3. newtype Op as a -- | Construct an Op by giving a function creating the result, and -- also a continuation on how to create the gradient, given the total -- derivative of a. -- -- See the module documentation for Numeric.Backprop.Op for more -- details on the function that this constructor and Op expect. Op :: (Tuple as -> (a, a -> Tuple as)) -> Op as a -- | Run the function that the Op encodes, returning a continuation -- to compute the gradient, given the total derivative of a. See -- documentation for Numeric.Backprop.Op for more information. [runOpWith] :: Op as a -> Tuple as -> (a, a -> Tuple as) -- | Create an Op that takes no inputs and always returns the given -- value. -- -- There is no gradient, of course (using gradOp will give you an -- empty tuple), because there is no input to have a gradient of. -- --
--   >>> runOp (op0 10) Ø
--   (10, Ø)
--   
-- -- For a constant Op that takes input and ignores it, see -- opConst and opConst'. op0 :: a -> Op '[] a -- | An Op that ignores all of its inputs and returns a given -- constant value. -- --
--   >>> gradOp' (opConst 10) (1 ::< 2 ::< 3 ::< Ø)
--   (10, 0 ::< 0 ::< 0 ::< Ø)
--   
opConst :: (Every Num as, Known Length as) => a -> Op as a -- | An Op that just returns whatever it receives. The identity -- function. -- --
--   idOp = opIso id id
--   
idOp :: Op '[a] a -- | A version of opConst taking explicit Length, indicating -- the number of inputs and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use opConst. opConst' :: Every Num as => Length as -> a -> Op as a -- | Create an Op of a function taking one input, by giving its -- explicit derivative. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the derivative <math>, it would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that squares its input: -- --
--   square :: Num a => Op '[a] a
--   square = op1 $ \x -> (x*x, \d -> 2 * d * x
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op1 :: (a -> (b, b -> a)) -> Op '[a] b -- | Create an Op of a function taking two inputs, by giving its -- explicit gradient. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the gradient <math> would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that multiplies its inputs: -- --
--   mul :: Num a => Op '[a, a] a
--   mul = op2' $ \x y -> (x*y, \d -> (d*y, x*d)
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c -- | Create an Op of a function taking three inputs, by giving its -- explicit gradient. See documentation for op2 for more details. op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d -- | An Op that coerces an item into another item whose type has the -- same runtime representation. -- --
--   >>> gradOp' opCoerce (Identity 5) :: (Int, Identity Int)
--   (5, Identity 1)
--   
-- --
--   opCoerce = opIso coerced coerce
--   
opCoerce :: Coercible a b => Op '[a] b -- | An Op that takes as and returns exactly the input -- tuple. -- --
--   >>> gradOp' opTup (1 ::< 2 ::< 3 ::< Ø)
--   (1 ::< 2 ::< 3 ::< Ø, 1 ::< 1 ::< 1 ::< Ø)
--   
opTup :: Op as (Tuple as) -- | An Op that runs the input value through an isomorphism. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. opIso :: (a -> b) -> (b -> a) -> Op '[a] b -- | An Op that runs the input value through an isomorphism between -- a tuple of values and a value. See opIso for caveats. -- -- In Numeric.Backprop.Op since version 0.1.2.0, but only exported -- from Numeric.Backprop since version 0.1.3.0. opIsoN :: (Tuple as -> b) -> (b -> Tuple as) -> Op as b -- | An Op that extracts a value from an input value using a -- Lens'. -- -- Warning: This is unsafe! It assumes that it extracts a specific value -- unchanged, with derivative 1, so will break for things that -- numerically manipulate things before returning them. opLens :: Num a => Lens' a b -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp1, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad1 :: (a -> b) -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad :: (Tuple as -> b) -> Op as b data Prod k (f :: k -> *) (a :: [k]) :: forall k. () => (k -> *) -> [k] -> * [Ø] :: Prod k f [] k [:<] :: Prod k f (:) k a1 as -- | Construct a two element Prod. Since the precedence of (:>) is -- higher than (:<), we can conveniently write lists like: -- --
--   >>> a :< b :> c
--   
-- -- Which is identical to: -- --
--   >>> a :< b :< c :< Ø
--   
infix 6 :> -- | Build a singleton Prod. only :: () => f a -> Prod k f (:) k a [] k head' :: () => Prod k f (:<) k a as -> f a -- | A Prod of simple Haskell types. type Tuple = Prod * I -- | Cons onto a Tuple. infixr 5 ::< -- | Singleton Tuple. only_ :: () => a -> Tuple (:) * a [] * newtype I a :: * -> * I :: a -> I a [getI] :: I a -> a class Reifies k (s :: k) a | s -> a instance Numeric.Backprop.Explicit.BVGroup s '[] (GHC.Generics.K1 i a) (GHC.Generics.K1 i (Numeric.Backprop.Internal.BVar s a)) instance Numeric.Backprop.Explicit.BVGroup s as i o => Numeric.Backprop.Explicit.BVGroup s as (GHC.Generics.M1 p c i) (GHC.Generics.M1 p c o) instance Numeric.Backprop.Explicit.BVGroup s '[] GHC.Generics.V1 GHC.Generics.V1 instance Numeric.Backprop.Explicit.BVGroup s '[] GHC.Generics.U1 GHC.Generics.U1 instance (Data.Reflection.Reifies s Numeric.Backprop.Internal.W, Numeric.Backprop.Explicit.BVGroup s as i1 o1, Numeric.Backprop.Explicit.BVGroup s bs i2 o2, cs ~ (as Type.Family.List.++ bs), Type.Class.Known.Known Data.Type.Length.Length as) => Numeric.Backprop.Explicit.BVGroup s (i1 () : i2 () : cs) (i1 GHC.Generics.:*: i2) (o1 GHC.Generics.:*: o2) instance (Data.Reflection.Reifies s Numeric.Backprop.Internal.W, Numeric.Backprop.Explicit.BVGroup s as i1 o1, Numeric.Backprop.Explicit.BVGroup s bs i2 o2, cs ~ (as Type.Family.List.++ bs), Type.Class.Known.Known Data.Type.Length.Length as) => Numeric.Backprop.Explicit.BVGroup s (i1 () : i2 () : cs) (i1 GHC.Generics.:+: i2) (o1 GHC.Generics.:+: o2) -- | Provides the exact same API as Numeric.Backprop, except -- requiring Num instances for all types involved instead of -- Backprop instances. -- -- This was the original API of the library (for version 0.1). -- -- Num is strictly more powerful than Backprop, and is a -- stronger constraint on types than is necessary for proper -- backpropagating. In particular, fromInteger is a problem for -- many types, preventing useful backpropagation for lists, -- variable-length vectors (like Data.Vector) and variable-size -- matrices from linear algebra libraries like hmatrix and -- accelerate. -- -- However, this module might be useful in situations where you are -- working with external types with Num instances, and you want to -- avoid writing orphan instances for external types. -- -- If you have external types that are not Num instances, consider -- instead Numeric.Backprop.External. -- -- If you need a Num instance for tuples, you can use the -- canonical 2- and 3-tuples for the library in -- Numeric.Backprop.Tuple. If you need one for larger tuples, -- consider making a custom product type instead (making Num instances -- with something like -- <https://hackage.haskell.org/package/one-liner-instances -- one-liner-instances>). You can also use the orphan instances in the -- NumInstances package (in particular, -- Data.NumInstances.Tuple) if you are writing an application and -- do not have to worry about orphan instances. -- -- See Numeric.Backprop for fuller documentation on using these -- functions. module Numeric.Backprop.Num -- | A BVar s a is a value of type a that can be -- "backpropagated". -- -- Functions referring to BVars are tracked by the library and can -- be automatically differentiated to get their gradients and results. -- -- For simple numeric values, you can use its Num, -- Fractional, and Floating instances to manipulate them as -- if they were the numbers they represent. -- -- If a contains items, the items can be accessed and extracted -- using lenses. A Lens' b a can be used to access an -- a inside a b, using ^^. (viewVar): -- --
--   (^.)  ::        a -> Lens' a b ->        b
--   (^^.) :: BVar s a -> Lens' a b -> BVar s b
--   
-- -- There is also ^^? (previewVar), to use a -- Prism' or Traversal' to extract a target that may or -- may not be present (which can implement pattern matching), -- ^^.. (toListOfVar) to use a Traversal' to -- extract all targets inside a BVar, and .~~ -- (setVar) to set and update values inside a BVar. -- -- If you have control over your data type definitions, you can also use -- splitBV and joinBV to manipulate data types by easily -- extracting fields out of a BVar of data types and creating -- BVars of data types out of BVars of their fields. See -- Numeric.Backprop#hkd for a tutorial on this use pattern. -- -- For more complex operations, libraries can provide functions on -- BVars using liftOp and related functions. This is how -- you can create primitive functions that users can use to manipulate -- your library's values. See -- https://backprop.jle.im/06-equipping-your-library.html for a -- detailed guide. -- -- For example, the hmatrix library has a matrix-vector -- multiplication function, #> :: L m n -> R n -> L m. -- -- A library could instead provide a function #> :: BVar (L -- m n) -> BVar (R n) -> BVar (R m), which the user can then -- use to manipulate their BVars of L m ns and R -- ns, etc. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. data BVar s a -- | An ephemeral Wengert Tape in the environment. Used internally to track -- of the computational graph of variables. -- -- For the end user, one can just imagine Reifies s -- W as a required constraint on s that allows -- backpropagation to work. data W -- | backprop, but with Num constraints instead of -- Backprop constraints. -- -- See module documentation for Numeric.Backprop.Num for -- information on using this with tuples. backprop :: (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, a) -- | Turn a function BVar s a -> BVar s b into -- the function a -> b that it represents. -- -- Benchmarks show that this should have virtually no overhead over -- directly writing a a -> b. BVar is, in this -- situation, a zero-cost abstraction, performance-wise. -- -- See documentation of backprop for more information. evalBP :: (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> b -- | gradBP, but with Num constraints instead of -- Backprop constraints. gradBP :: (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> a -- | backpropWith, but with Num constraints instead of -- Backprop constraints. -- -- See module documentation for Numeric.Backprop.Num for -- information on using this with tuples. -- -- Note that argument order changed in v0.2.3. backpropWith :: Num a => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, (b -> b) -> a) -- | evalBP but with no arguments. Useful when everything is just -- given through constVar. evalBP0 :: (forall s. Reifies s W => BVar s a) -> a -- | backprop2, but with Num constraints instead of -- Backprop constraints. backprop2 :: (Num a, Num b, Num c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, (a, b)) -- | evalBP for a two-argument function. See backprop2 for -- notes. evalBP2 :: (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> c -- | gradBP2, but with Num constraints instead of -- Backprop constraints. gradBP2 :: (Num a, Num b, Num c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (a, b) -- | backpropWith2, but with Num constraints instead of -- Backprop constraints. -- -- Note that argument order changed in v0.2.3. backpropWith2 :: (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, (c -> c) -> (a, b)) -- | backpropN, but with Num constraints instead of -- Backprop constraints. -- -- The Every Num as in the constraint says that -- every value in the type-level list as must have a Num -- instance. This means you can use, say, '[Double, Float, Int], -- but not '[Double, Bool, String]. -- -- If you stick to concerete, monomorphic usage of this (with -- specific types, typed into source code, known at compile-time), then -- Every Num as should be fulfilled automatically. backpropN :: (Every Num as, Known Length as, Num b) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> (b, Tuple as) -- | evalBP generalized to multiple inputs of different types. See -- documentation for backpropN for more details. evalBPN :: forall as b. () => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> b -- | gradBPN, but with Num constraints instead of -- Backprop constraints. gradBPN :: (Every Num as, Known Length as, Num b) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> Tuple as -- | backpropWithN, but with Num constraints instead of -- Backprop constraints. -- -- See backpropN for information on the Every constraint. -- -- Note that argument order changed in v0.2.3. backpropWithN :: (Every Num as, Known Length as) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> (b, (b -> b) -> Tuple as) class EveryC k c as => Every k (c :: k -> Constraint) (as :: [k]) -- | Lift a value into a BVar representing a constant value. -- -- This value will not be considered an input, and its gradients will not -- be backpropagated. constVar :: a -> BVar s a -- | Shorter alias for constVar, inspired by the ad library. auto :: a -> BVar s a -- | Coerce a BVar contents. Useful for things like newtype -- wrappers. coerceVar :: Coercible a b => BVar s a -> BVar s b -- | ^^., but with Num constraints instead of -- Backprop constraints. (^^.) :: forall b a s. (Num a, Reifies s W) => BVar s b -> Lens' b a -> BVar s a infixl 8 ^^. -- | .~~, but with Num constraints instead of -- Backprop constraints. (.~~) :: (Num a, Num b, Reifies s W) => Lens' b a -> BVar s a -> BVar s b -> BVar s b infixl 8 .~~ -- | ^^?, but with Num constraints instead of -- Backprop constraints. -- -- Note that many automatically-generated prisms by the lens -- package use tuples, which cannot work this this by default (because -- tuples do not have a Num instance). -- -- If you are writing an application or don't have to worry about orphan -- instances, you can pull in the orphan instances from -- NumInstances. Alternatively, you can chain those prisms with -- conversions to the anonymous canonical strict tuple types in -- Numeric.Backprop.Tuple, which do have Num instances. -- --
--   myPrism                   :: Prism' c (a, b)
--   myPrism . iso tupT2 t2Tup :: Prism' c (T2 a b)
--   
(^^?) :: forall b a s. (Num a, Reifies s W) => BVar s b -> Traversal' b a -> Maybe (BVar s a) -- | ^^.., but with Num constraints instead of -- Backprop constraints. (^^..) :: forall b a s. (Num a, Reifies s W) => BVar s b -> Traversal' b a -> [BVar s a] -- | ^^?!, but with Num constraints instead of -- Backprop constraints. -- -- Like ^^?!, is *UNSAFE*. (^^?!) :: forall b a s. (Num a, Reifies s W) => BVar s b -> Traversal' b a -> BVar s a -- | viewVar, but with Num constraints instead of -- Backprop constraints. viewVar :: forall b a s. (Num a, Reifies s W) => Lens' b a -> BVar s b -> BVar s a -- | setVar, but with Num constraints instead of -- Backprop constraints. setVar :: forall a b s. (Num a, Num b, Reifies s W) => Lens' b a -> BVar s a -> BVar s b -> BVar s b -- | sequenceVar, but with Num constraints instead of -- Backprop constraints. sequenceVar :: (Traversable t, Num a, Reifies s W) => BVar s (t a) -> t (BVar s a) -- | collectVar, but with Num constraints instead of -- Backprop constraints. -- -- Prior to v0.2.3, required a Num constraint on t a. collectVar :: (Foldable t, Functor t, Num a, Reifies s W) => t (BVar s a) -> BVar s (t a) -- | previewVar, but with Num constraints instead of -- Backprop constraints. -- -- See documentation for ^^? for more information and important -- notes. previewVar :: forall b a s. (Num a, Reifies s W) => Traversal' b a -> BVar s b -> Maybe (BVar s a) -- | toListOfVar, but with Num constraints instead of -- Backprop constraints. toListOfVar :: forall b a s. (Num a, Reifies s W) => Traversal' b a -> BVar s b -> [BVar s a] -- | isoVar, but with Num constraints instead of -- Backprop constraints. isoVar :: (Num a, Num b, Reifies s W) => (a -> b) -> (b -> a) -> BVar s a -> BVar s b -- | isoVar, but with Num constraints instead of -- Backprop constraints. isoVar2 :: (Num a, Num b, Num c, Reifies s W) => (a -> b -> c) -> (c -> (a, b)) -> BVar s a -> BVar s b -> BVar s c -- | isoVar3, but with Num constraints instead of -- Backprop constraints. isoVar3 :: (Num a, Num b, Num c, Num d, Reifies s W) => (a -> b -> c -> d) -> (d -> (a, b, c)) -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | isoVarN, but with Num constraints instead of -- Backprop constraints. isoVarN :: (Every Num as, Known Length as, Num b, Reifies s W) => (Tuple as -> b) -> (b -> Tuple as) -> Prod (BVar s) as -> BVar s b -- | liftOp, but with Num constraints instead of -- Backprop constraints. liftOp :: (Every Num as, Known Length as, Num b, Reifies s W) => Op as b -> Prod (BVar s) as -> BVar s b -- | liftOp1, but with Num constraints instead of -- Backprop constraints. liftOp1 :: (Num a, Num b, Reifies s W) => Op '[a] b -> BVar s a -> BVar s b -- | liftOp2, but with Num constraints instead of -- Backprop constraints. liftOp2 :: (Num a, Num b, Num c, Reifies s W) => Op '[a, b] c -> BVar s a -> BVar s b -> BVar s c -- | liftOp3, but with Num constraints instead of -- Backprop constraints. liftOp3 :: (Num a, Num b, Num c, Num d, Reifies s W) => Op '[a, b, c] d -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | An Op as a describes a differentiable function from -- as to a. -- -- For example, a value of type -- --
--   Op '[Int, Bool] Double
--   
-- -- is a function from an Int and a Bool, returning a -- Double. It can be differentiated to give a gradient of -- an Int and a Bool if given a total derivative for the -- Double. If we call Bool <math>, then, -- mathematically, it is akin to a: -- -- <math> -- -- See runOp, gradOp, and gradOpWith for examples on -- how to run it, and Op for instructions on creating it. -- -- It is simpler to not use this type constructor directly, and instead -- use the op2, op1, op2, and op3 helper -- smart constructors. -- -- See Numeric.Backprop.Op#prod for a mini-tutorial on using -- Prod and Tuple. -- -- To use an Op with the backprop library, see -- liftOp, liftOp1, liftOp2, and -- liftOp3. newtype Op as a -- | Construct an Op by giving a function creating the result, and -- also a continuation on how to create the gradient, given the total -- derivative of a. -- -- See the module documentation for Numeric.Backprop.Op for more -- details on the function that this constructor and Op expect. Op :: (Tuple as -> (a, a -> Tuple as)) -> Op as a -- | Run the function that the Op encodes, returning a continuation -- to compute the gradient, given the total derivative of a. See -- documentation for Numeric.Backprop.Op for more information. [runOpWith] :: Op as a -> Tuple as -> (a, a -> Tuple as) -- | Create an Op that takes no inputs and always returns the given -- value. -- -- There is no gradient, of course (using gradOp will give you an -- empty tuple), because there is no input to have a gradient of. -- --
--   >>> runOp (op0 10) Ø
--   (10, Ø)
--   
-- -- For a constant Op that takes input and ignores it, see -- opConst and opConst'. op0 :: a -> Op '[] a -- | An Op that ignores all of its inputs and returns a given -- constant value. -- --
--   >>> gradOp' (opConst 10) (1 ::< 2 ::< 3 ::< Ø)
--   (10, 0 ::< 0 ::< 0 ::< Ø)
--   
opConst :: (Every Num as, Known Length as) => a -> Op as a -- | An Op that just returns whatever it receives. The identity -- function. -- --
--   idOp = opIso id id
--   
idOp :: Op '[a] a -- | A version of opConst taking explicit Length, indicating -- the number of inputs and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use opConst. opConst' :: Every Num as => Length as -> a -> Op as a -- | Create an Op of a function taking one input, by giving its -- explicit derivative. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the derivative <math>, it would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that squares its input: -- --
--   square :: Num a => Op '[a] a
--   square = op1 $ \x -> (x*x, \d -> 2 * d * x
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op1 :: (a -> (b, b -> a)) -> Op '[a] b -- | Create an Op of a function taking two inputs, by giving its -- explicit gradient. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the gradient <math> would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that multiplies its inputs: -- --
--   mul :: Num a => Op '[a, a] a
--   mul = op2' $ \x y -> (x*y, \d -> (d*y, x*d)
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c -- | Create an Op of a function taking three inputs, by giving its -- explicit gradient. See documentation for op2 for more details. op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d -- | An Op that coerces an item into another item whose type has the -- same runtime representation. -- --
--   >>> gradOp' opCoerce (Identity 5) :: (Int, Identity Int)
--   (5, Identity 1)
--   
-- --
--   opCoerce = opIso coerced coerce
--   
opCoerce :: Coercible a b => Op '[a] b -- | An Op that takes as and returns exactly the input -- tuple. -- --
--   >>> gradOp' opTup (1 ::< 2 ::< 3 ::< Ø)
--   (1 ::< 2 ::< 3 ::< Ø, 1 ::< 1 ::< 1 ::< Ø)
--   
opTup :: Op as (Tuple as) -- | An Op that runs the input value through an isomorphism. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. opIso :: (a -> b) -> (b -> a) -> Op '[a] b -- | An Op that runs the input value through an isomorphism between -- a tuple of values and a value. See opIso for caveats. -- -- In Numeric.Backprop.Op since version 0.1.2.0, but only exported -- from Numeric.Backprop since version 0.1.3.0. opIsoN :: (Tuple as -> b) -> (b -> Tuple as) -> Op as b -- | An Op that extracts a value from an input value using a -- Lens'. -- -- Warning: This is unsafe! It assumes that it extracts a specific value -- unchanged, with derivative 1, so will break for things that -- numerically manipulate things before returning them. opLens :: Num a => Lens' a b -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp1, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad1 :: (a -> b) -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad :: (Tuple as -> b) -> Op as b data Prod k (f :: k -> *) (a :: [k]) :: forall k. () => (k -> *) -> [k] -> * [Ø] :: Prod k f [] k [:<] :: Prod k f (:) k a1 as -- | Construct a two element Prod. Since the precedence of (:>) is -- higher than (:<), we can conveniently write lists like: -- --
--   >>> a :< b :> c
--   
-- -- Which is identical to: -- --
--   >>> a :< b :< c :< Ø
--   
infix 6 :> -- | Build a singleton Prod. only :: () => f a -> Prod k f (:) k a [] k head' :: () => Prod k f (:<) k a as -> f a -- | A Prod of simple Haskell types. type Tuple = Prod * I -- | Cons onto a Tuple. infixr 5 ::< -- | Singleton Tuple. only_ :: () => a -> Tuple (:) * a [] * newtype I a :: * -> * I :: a -> I a [getI] :: I a -> a class Reifies k (s :: k) a | s -> a -- | Automatic differentation and backpropagation. -- -- Main idea: Write a function computing what you want, and the library -- automatically provies the gradient of that function as well, for usage -- with gradient descent and other training methods. -- -- In more detail: instead of working directly with values to produce -- your result, you work with BVars containing those values. -- Working with these BVars is made smooth with the usage of -- lenses and other combinators, and libraries can offer operatons on -- BVars instead of those on normal types directly. -- -- Then, you can use: -- --
--   evalBP :: (forall s. Reifies s W. BVar s a -> BVar s b) -> (a -> b)
--   
-- -- to turn a BVar function into the function on actual values -- a -> b. This has virtually zero overhead over writing the -- actual function directly. -- -- Then, there's: -- --
--   gradBP :: (forall s. Reifies s W. BVar s a -> BVar s b) -> (a -> a)
--   
-- -- to automatically get the gradient, as well, for a given input. -- -- See the README for more information and links to demonstrations -- and tutorials, or dive striaght in by reading the docs for -- BVar. -- -- If you are writing a library, see -- https://backprop.jle.im/06-equipping-your-library.html for a -- guide for equipping your library with backpropatable operations. -- -- In the original version 0.1, this module required Num instances -- for methods instead of Backprop instances. This interface is -- still available in Numeric.Backprop.Num, which has the same API -- as this module, except with Num constraints on all values -- instead of Backprop constraints. -- -- See Prelude.Backprop.Explicit for a version allowing you to -- provide zero, add, and one explicitly, which can -- be useful when attempting to avoid orphan instances or when mixing -- both Backprop and Num styles. module Numeric.Backprop -- | A BVar s a is a value of type a that can be -- "backpropagated". -- -- Functions referring to BVars are tracked by the library and can -- be automatically differentiated to get their gradients and results. -- -- For simple numeric values, you can use its Num, -- Fractional, and Floating instances to manipulate them as -- if they were the numbers they represent. -- -- If a contains items, the items can be accessed and extracted -- using lenses. A Lens' b a can be used to access an -- a inside a b, using ^^. (viewVar): -- --
--   (^.)  ::        a -> Lens' a b ->        b
--   (^^.) :: BVar s a -> Lens' a b -> BVar s b
--   
-- -- There is also ^^? (previewVar), to use a -- Prism' or Traversal' to extract a target that may or -- may not be present (which can implement pattern matching), -- ^^.. (toListOfVar) to use a Traversal' to -- extract all targets inside a BVar, and .~~ -- (setVar) to set and update values inside a BVar. -- -- If you have control over your data type definitions, you can also use -- splitBV and joinBV to manipulate data types by easily -- extracting fields out of a BVar of data types and creating -- BVars of data types out of BVars of their fields. See -- Numeric.Backprop#hkd for a tutorial on this use pattern. -- -- For more complex operations, libraries can provide functions on -- BVars using liftOp and related functions. This is how -- you can create primitive functions that users can use to manipulate -- your library's values. See -- https://backprop.jle.im/06-equipping-your-library.html for a -- detailed guide. -- -- For example, the hmatrix library has a matrix-vector -- multiplication function, #> :: L m n -> R n -> L m. -- -- A library could instead provide a function #> :: BVar (L -- m n) -> BVar (R n) -> BVar (R m), which the user can then -- use to manipulate their BVars of L m ns and R -- ns, etc. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. data BVar s a -- | An ephemeral Wengert Tape in the environment. Used internally to track -- of the computational graph of variables. -- -- For the end user, one can just imagine Reifies s -- W as a required constraint on s that allows -- backpropagation to work. data W -- | Class of values that can be backpropagated in general. -- -- For instances of Num, these methods can be given by -- zeroNum, addNum, and oneNum. There are also -- generic options given in Numeric.Backprop.Class for functors, -- IsList instances, and Generic instances. -- --
--   instance Backprop Double where
--       zero = zeroNum
--       add = addNum
--       one = oneNum
--   
-- -- If you leave the body of an instance declaration blank, GHC Generics -- will be used to derive instances if the type has a single constructor -- and each field is an instance of Backprop. -- -- To ensure that backpropagation works in a sound way, should obey the -- laws: -- -- -- -- -- -- Also implies preservation of information, making zipWith -- (+) an illegal implementation for lists and vectors. -- -- This is only expected to be true up to potential "extra zeroes" in -- x and y in the result. -- -- -- -- -- -- -- -- -- -- -- -- -- -- Note that not all values in the backpropagation process needs all of -- these methods: Only the "final result" needs one, for example. -- These are all grouped under one typeclass for convenience in defining -- instances, and also to talk about sensible laws. For fine-grained -- control, use the "explicit" versions of library functions (for -- example, in Numeric.Backprop.Explicit) instead of -- Backprop based ones. -- -- This typeclass replaces the reliance on Num of the previous API -- (v0.1). Num is strictly more powerful than Backprop, and -- is a stronger constraint on types than is necessary for proper -- backpropagating. In particular, fromInteger is a problem for -- many types, preventing useful backpropagation for lists, -- variable-length vectors (like Data.Vector) and variable-size -- matrices from linear algebra libraries like hmatrix and -- accelerate. class Backprop a -- | "Zero out" all components of a value. For scalar values, this should -- just be const 0. For vectors and matrices, this should -- set all components to zero, the additive identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See zeroNum for a pre-built definition for instances of -- Num and zeroFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericZero, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. zero :: Backprop a => a -> a -- | Add together two values of a type. To combine contributions of -- gradients, so should be information-preserving: -- -- -- -- Should be as strict as possible. This behavior is observed for -- all instances provided by this library. -- -- See addNum for a pre-built definition for instances of -- Num and addFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericAdd, a pre-built definition for instances of -- Generic with one constructor whose fields are all themselves -- instances of Backprop. add :: Backprop a => a -> a -> a -- | One all components of a value. For scalar values, this should -- just be const 1. For vectors and matrices, this should -- set all components to one, the multiplicative identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See oneNum for a pre-built definition for instances of -- Num and oneFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericOne, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. one :: Backprop a => a -> a -- | "Zero out" all components of a value. For scalar values, this should -- just be const 0. For vectors and matrices, this should -- set all components to zero, the additive identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See zeroNum for a pre-built definition for instances of -- Num and zeroFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericZero, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. zero :: (Backprop a, Generic a, GZero (Rep a)) => a -> a -- | Add together two values of a type. To combine contributions of -- gradients, so should be information-preserving: -- -- -- -- Should be as strict as possible. This behavior is observed for -- all instances provided by this library. -- -- See addNum for a pre-built definition for instances of -- Num and addFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericAdd, a pre-built definition for instances of -- Generic with one constructor whose fields are all themselves -- instances of Backprop. add :: (Backprop a, Generic a, GAdd (Rep a)) => a -> a -> a -- | One all components of a value. For scalar values, this should -- just be const 1. For vectors and matrices, this should -- set all components to one, the multiplicative identity. -- -- Should be idempotent: -- -- -- -- Should be as lazy as possible. This behavior is observed for -- all instances provided by this library. -- -- See oneNum for a pre-built definition for instances of -- Num and oneFunctor for a definition for instances of -- Functor. If left blank, will automatically be -- genericOne, a pre-built definition for instances of -- Generic whose fields are all themselves instances of -- Backprop. one :: (Backprop a, Generic a, GOne (Rep a)) => a -> a -- | A newtype wrapper over an f a for Applicative -- f that gives a free Backprop instance (as well as -- Num etc. instances). -- -- Useful for performing backpropagation over functions that require some -- monadic context (like IO) to perform. newtype ABP f a ABP :: f a -> ABP f a [runABP] :: ABP f a -> f a -- | A newtype wrapper over an instance of Num that gives a free -- Backprop instance. -- -- Useful for things like DerivingVia, or for avoiding orphan -- instances. newtype NumBP a NumBP :: a -> NumBP a [runNumBP] :: NumBP a -> a -- | Turn a function BVar s a -> BVar s b into -- the function a -> b that it represents, also computing its -- gradient a as well. -- -- The Rank-N type forall s. Reifies s W => ... -- is used to ensure that BVars do not leak out of the context -- (similar to how it is used in Control.Monad.ST), and also as a -- reference to an ephemeral Wengert tape used to track the graph of -- references. backprop :: (Backprop a, Backprop b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, a) -- | Turn a function BVar s a -> BVar s b into -- the function a -> b that it represents. -- -- Benchmarks show that this should have virtually no overhead over -- directly writing a a -> b. BVar is, in this -- situation, a zero-cost abstraction, performance-wise. -- -- See documentation of backprop for more information. evalBP :: (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> b -- | Take a function BVar s a -> BVar s b, -- interpreted as a function a -> b, and compute its gradient -- with respect to its input. -- -- The resulting a -> a tells how the input (and its -- components) affects the output. Positive numbers indicate that the -- result will vary in the same direction as any adjustment in the input. -- Negative numbers indicate that the result will vary in the opposite -- direction as any adjustment in the input. Larger numbers indicate a -- greater sensitivity of change, and small numbers indicate lower -- sensitivity. -- -- See documentation of backprop for more information. -- -- If you want to provide an explicit "final gradient" for the end, see -- backpropWith. gradBP :: (Backprop a, Backprop b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> a -- | A version of backprop that allows you to specify the gradent of -- your "final result" in with respect to the output of your function. -- -- Typically, this is just the scalar 1, or a value of components that -- are all 1. -- -- Instead of taking the b gradient, the you may provide a b -- -> b, which backpropWith calls with the result of your -- function as the argument. This allows you to return something with the -- correct "shape", if not a scalar. -- -- backprop is essentially backpropWith with -- const 1 for scalars and Num instances. -- -- Note that argument order changed in v0.2.3 backpropWith :: Backprop a => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, (b -> b) -> a) -- | backprop for a two-argument function. -- -- Not strictly necessary, because you can always uncurry a function by -- passing in all of the argument inside a data type, or just use a -- tuple. However, this could potentially be more performant. -- -- For 3 and more arguments, consider using backpropN. backprop2 :: (Backprop a, Backprop b, Backprop c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, (a, b)) -- | evalBP for a two-argument function. See backprop2 for -- notes. evalBP2 :: (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> c -- | gradBP for a two-argument function. See backprop2 for -- notes. gradBP2 :: (Backprop a, Backprop b, Backprop c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (a, b) -- | backprop2, but allows you to provide the gradient of the "final -- result" with respect to the output of your function. See -- backpropWith for more details. -- -- Note that argument order changed in v0.2.3 backpropWith2 :: (Backprop a, Backprop b) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, (c -> c) -> (a, b)) -- | backprop generalized to multiple inputs of different types. See -- the Numeric.Backprop.Op#prod for a mini-tutorial on -- heterogeneous lists. -- -- Not strictly necessary, because you can always uncurry a function by -- passing in all of the inputs in a data type containing all of the -- arguments or a giant tuple. However, this could potentially also be -- more performant. -- -- A Prod (BVar s) '[Double, Float, Double], for -- instance, is a tuple of BVar s Double, -- BVar s Float, and BVar s -- Double, and can be pattern matched on using :< -- (cons) and 'Ø' (nil). -- -- Tuples can be built and pattern matched on using ::< (cons) -- and 'Ø' (nil), as well. -- -- The Every Backprop as in the constraint says -- that every value in the type-level list as must have a -- Backprop instance. This means you can use, say, '[Double, -- Float, Int], but not '[Double, Bool, String]. -- -- If you stick to concerete, monomorphic usage of this (with -- specific types, typed into source code, known at compile-time), then -- Every Backprop as should be fulfilled -- automatically. backpropN :: (Every Backprop as, Known Length as, Backprop b) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> (b, Tuple as) -- | evalBP generalized to multiple inputs of different types. See -- documentation for backpropN for more details. evalBPN :: forall as b. () => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> b -- | gradBP generalized to multiple inputs of different types. See -- documentation for backpropN for more details. gradBPN :: (Every Backprop as, Known Length as, Backprop b) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> Tuple as -- | backpropN, but allows you to provide the gradient of the "final -- result" with respect to the output of your function. See -- backpropWith for more details. -- -- Note that argument order changed in v0.2.3. backpropWithN :: (Every Backprop as, Known Length as) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> (b, (b -> b) -> Tuple as) class EveryC k c as => Every k (c :: k -> Constraint) (as :: [k]) -- | evalBP but with no arguments. Useful when everything is just -- given through constVar. evalBP0 :: (forall s. Reifies s W => BVar s a) -> a -- | Lift a value into a BVar representing a constant value. -- -- This value will not be considered an input, and its gradients will not -- be backpropagated. constVar :: a -> BVar s a -- | Shorter alias for constVar, inspired by the ad library. auto :: a -> BVar s a -- | Coerce a BVar contents. Useful for things like newtype -- wrappers. coerceVar :: Coercible a b => BVar s a -> BVar s b -- | An infix version of viewVar, meant to evoke parallels to -- ^. from lens. -- -- With normal values, you can extract something from that value with a -- lens: -- --
--   x ^. myLens
--   
-- -- would extract a piece of x :: b, specified by myLens :: -- Lens' b a. The result has type a. -- --
--   xVar ^^. myLens
--   
-- -- would extract a piece out of xVar :: BVar s b (a -- BVar holding a b), specified by myLens :: Lens' b -- a. The result has type BVar s a (a BVar -- holding a a) -- -- This is the main way to pull out values from BVar of container -- types. -- -- If you have control of your data type definitions, consider using -- splitBV, which lets you break out BVars of values into -- BVars of their individual fields automatically without -- requiring lenses. -- -- WARNING: Do not use with any lenses that operate "numerically" -- on the contents (like multiplying). (^^.) :: forall b a s. (Backprop a, Reifies s W) => BVar s b -> Lens' b a -> BVar s a infixl 8 ^^. -- | An infix version of setVar, meant to evoke parallels to -- .~ from lens. -- -- With normal values, you can set something in a value with a lens: a -- lens: -- --
--   x & myLens .~ y
--   
-- -- would "set" a part of x :: b, specified by myLens :: -- Lens' a b, to a new value y :: a. -- --
--   xVar & myLens .~~ yVar
--   
-- -- would "set" a part of xVar :: BVar s b (a BVar -- holding a b), specified by myLens :: Lens' a -- b, to a new value given by yVar :: BVar s a. The -- result is a new (updated) value of type BVar s b. -- -- This is the main way to set values inside BVars of container -- types. (.~~) :: (Backprop a, Backprop b, Reifies s W) => Lens' b a -> BVar s a -> BVar s b -> BVar s b infixl 8 .~~ -- | An infix version of previewVar, meant to evoke parallels to -- ^? from lens. -- -- With normal values, you can (potentially) extract something from that -- value with a lens: -- --
--   x ^? myPrism
--   
-- -- would (potentially) extract a piece of x :: b, specified by -- myPrism :: Traversal' b a. The result has type -- Maybe a. -- --
--   xVar ^^? myPrism
--   
-- -- would (potentially) extract a piece out of xVar :: BVar s -- b (a BVar holding a b), specified by myPrism -- :: Prism' b a. The result has type Maybe (BVar -- s a) (Maybe a BVar holding a a). -- -- This is intended to be used with Prism's (which hits at most -- one target), but will actually work with any Traversal'. -- If the traversal hits more than one target, the first one found will -- be extracted. -- -- This can be used to "pattern match" on BVars, by using prisms -- on constructors. (^^?) :: forall b a s. (Backprop a, Reifies s W) => BVar s b -> Traversal' b a -> Maybe (BVar s a) -- | An infix version of toListOfVar, meant to evoke parallels to -- ^.. from lens. -- -- With normal values, you can extract all targets of a Traversal -- from that value with a: -- --
--   x ^.. myTraversal
--   
-- -- would extract all targets inside of x :: b, specified by -- myTraversal :: Traversal' b a. The result has type -- [a]. -- --
--   xVar ^^.. myTraversal
--   
-- -- would extract all targets inside of xVar :: BVar s b -- (a BVar holding a b), specified by myTraversal :: -- Traversal' b a. The result has type [BVar s a] (A -- list of BVars holding as). (^^..) :: forall b a s. (Backprop a, Reifies s W) => BVar s b -> Traversal' b a -> [BVar s a] -- | An *UNSAFE* version of previewVar assuming that it is there. -- -- Is undefined if the Traversal hits no targets. -- -- Is essentially ^^? with fromJust, or ^^.. with -- head. (^^?!) :: forall b a s. (Backprop a, Reifies s W) => BVar s b -> Traversal' b a -> BVar s a -- | Using a Lens', extract a value inside a BVar. -- Meant to evoke parallels to view from lens. -- -- If you have control of your data type definitions, consider using -- splitBV, which lets you break out BVars of values into -- BVars of their individual fields automatically without -- requiring lenses. -- -- See documentation for ^^. for more information. viewVar :: forall a b s. (Backprop a, Reifies s W) => Lens' b a -> BVar s b -> BVar s a -- | Using a Lens', set a value inside a BVar. Meant -- to evoke parallels to "set" from lens. -- -- See documentation for .~~ for more information. setVar :: (Backprop a, Backprop b, Reifies s W) => Lens' b a -> BVar s a -> BVar s b -> BVar s b -- | Extract all of the BVars out of a Traversable container -- of BVars. -- -- Note that this associates gradients in order of occurrence in the -- original data structure; the second item in the gradient is assumed to -- correspond with the second item in the input, etc.; this can cause -- unexpected behavior in Foldable instances that don't have a -- fixed number of items. sequenceVar :: (Traversable t, Backprop a, Reifies s W) => BVar s (t a) -> t (BVar s a) -- | Collect all of the BVars in a container into a BVar of -- that container's contents. -- -- Note that this associates gradients in order of occurrence in the -- original data structure; the second item in the total derivative and -- gradient is assumed to correspond with the second item in the input, -- etc.; this can cause unexpected behavior in Foldable instances -- that don't have a fixed number of items. -- -- Prior to v0.2.3, required a Backprop constraint on t -- a. collectVar :: (Foldable t, Functor t, Backprop a, Reifies s W) => t (BVar s a) -> BVar s (t a) -- | Using a Traversal', extract a single value inside a -- BVar, if it exists. If more than one traversal target exists, -- returns te first. Meant to evoke parallels to preview from -- lens. Really only intended to be used wth Prism's, or -- up-to-one target traversals. -- -- See documentation for ^^? for more information. previewVar :: forall b a s. (Backprop a, Reifies s W) => Traversal' b a -> BVar s b -> Maybe (BVar s a) -- | Using a Traversal', extract all targeted values inside a -- BVar. Meant to evoke parallels to toListOf from lens. -- -- See documentation for ^^.. for more information. toListOfVar :: forall b a s. (Backprop a, Reifies s W) => Traversal' b a -> BVar s b -> [BVar s a] -- | Useful pattern for constructing and deconstructing BVars of -- two-tuples. -- | Useful pattern for constructing and deconstructing BVars -- three-tuples. -- | Convert the value inside a BVar using a given isomorphism. -- Useful for things like constructors. -- -- If you have control of your data type definitions, consider using -- joinBV, which lets you use your data type constructors -- themselves to join together BVars as their fields. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. isoVar :: (Backprop a, Backprop b, Reifies s W) => (a -> b) -> (b -> a) -> BVar s a -> BVar s b -- | Convert the values inside two BVars using a given isomorphism. -- Useful for things like constructors. See isoVar for caveats. -- -- If you have control of your data type definitions, consider using -- joinBV, which lets you use your data type constructors -- themselves to join together BVars as their fields. isoVar2 :: (Backprop a, Backprop b, Backprop c, Reifies s W) => (a -> b -> c) -> (c -> (a, b)) -> BVar s a -> BVar s b -> BVar s c -- | Convert the values inside three BVars using a given -- isomorphism. Useful for things like constructors. See isoVar -- for caveats. isoVar3 :: (Backprop a, Backprop b, Backprop c, Backprop d, Reifies s W) => (a -> b -> c -> d) -> (d -> (a, b, c)) -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | Convert the values inside a tuple of BVars using a given -- isomorphism. Useful for things like constructors. See isoVar -- for caveats. -- -- If you have control of your data type definitions, consider using -- joinBV, which lets you use your data type constructors -- themselves to join together BVars as their fields. isoVarN :: (Every Backprop as, Known Length as, Backprop b, Reifies s W) => (Tuple as -> b) -> (b -> Tuple as) -> Prod (BVar s) as -> BVar s b -- | Lift an Op with an arbitrary number of inputs to a function on -- the appropriate number of BVars. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information, and -- Numeric.Backprop.Op#prod for a mini-tutorial on using -- Prod and Tuple. liftOp :: (Every Backprop as, Known Length as, Backprop b, Reifies s W) => Op as b -> Prod (BVar s) as -> BVar s b -- | Lift an Op with a single input to be a function on a single -- BVar. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. liftOp1 :: (Backprop a, Backprop b, Reifies s W) => Op '[a] b -> BVar s a -> BVar s b -- | Lift an Op with two inputs to be a function on a two -- BVars. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. liftOp2 :: (Backprop a, Backprop b, Backprop c, Reifies s W) => Op '[a, b] c -> BVar s a -> BVar s b -> BVar s c -- | Lift an Op with three inputs to be a function on a three -- BVars. -- -- Should preferably be used only by libraries to provide primitive -- BVar functions for their types for users. -- -- See Numeric.Backprop#liftops and documentation for -- liftOp for more information. liftOp3 :: (Backprop a, Backprop b, Backprop c, Backprop d, Reifies s W) => Op '[a, b, c] d -> BVar s a -> BVar s b -> BVar s c -> BVar s d -- | Split out a BVar of "higher-kinded data type", a la -- http://reasonablypolymorphic.com/blog/higher-kinded-data/ -- -- Lets you take BVar of a value into a separate BVar of -- every field of that value. -- -- See Numeric.Backprop#hkd for a tutorial on usage. -- -- This will work with all data types made with a single constructor, -- whose fields are all instances of Backprop, where the type -- itself has an instance of Backprop. The type also must derive -- Generic. -- -- Note that BV is a pattern synonym version where the -- deconstructor is exactly a view into splitBV. splitBV :: (Generic (z f), Generic (z (BVar s)), BVGroup s as (Rep (z f)) (Rep (z (BVar s))), Backprop (Rep (z f) ()), Every Backprop as, Known Length as, Reifies s W) => BVar s (z f) -> z (BVar s) -- | Assemble a BVar of "higher-kinded data type", a la -- http://reasonablypolymorphic.com/blog/higher-kinded-data/ -- -- It lets you take a BVar of every field of a value, and join -- them into a BVar of that value. -- -- See Numeric.Backprop#hkd for a tutorial on usage. -- -- This will work with all data types made with a single constructor, -- whose fields are all instances of Backprop, where the type -- itself has an instance of Backprop. -- -- Note that BV is a pattern synonym version where the constructor -- is exactly joinBV. joinBV :: (Generic (z f), Generic (z (BVar s)), BVGroup s as (Rep (z f)) (Rep (z (BVar s))), Backprop (z f), Every Backprop as, Known Length as, Reifies s W) => z (BVar s) -> BVar s (z f) -- | Pattern synonym wrapping manual usage of splitBV and -- joinBV. It is a pattern for a BVar s (z f) -- containing a z (BVar s) -- | Helper class for generically "splitting" and "joining" BVars -- into constructors. See splitBV and joinBV. -- -- See Numeric.Backprop#hkd for a tutorial on how to use this. -- -- Instances should be available for types made with one constructor -- whose fields are all instances of Backprop, with a -- Generic instance. class BVGroup s as i o | o -> i, i -> as -- | An Op as a describes a differentiable function from -- as to a. -- -- For example, a value of type -- --
--   Op '[Int, Bool] Double
--   
-- -- is a function from an Int and a Bool, returning a -- Double. It can be differentiated to give a gradient of -- an Int and a Bool if given a total derivative for the -- Double. If we call Bool <math>, then, -- mathematically, it is akin to a: -- -- <math> -- -- See runOp, gradOp, and gradOpWith for examples on -- how to run it, and Op for instructions on creating it. -- -- It is simpler to not use this type constructor directly, and instead -- use the op2, op1, op2, and op3 helper -- smart constructors. -- -- See Numeric.Backprop.Op#prod for a mini-tutorial on using -- Prod and Tuple. -- -- To use an Op with the backprop library, see -- liftOp, liftOp1, liftOp2, and -- liftOp3. newtype Op as a -- | Construct an Op by giving a function creating the result, and -- also a continuation on how to create the gradient, given the total -- derivative of a. -- -- See the module documentation for Numeric.Backprop.Op for more -- details on the function that this constructor and Op expect. Op :: (Tuple as -> (a, a -> Tuple as)) -> Op as a -- | Run the function that the Op encodes, returning a continuation -- to compute the gradient, given the total derivative of a. See -- documentation for Numeric.Backprop.Op for more information. [runOpWith] :: Op as a -> Tuple as -> (a, a -> Tuple as) -- | Create an Op that takes no inputs and always returns the given -- value. -- -- There is no gradient, of course (using gradOp will give you an -- empty tuple), because there is no input to have a gradient of. -- --
--   >>> runOp (op0 10) Ø
--   (10, Ø)
--   
-- -- For a constant Op that takes input and ignores it, see -- opConst and opConst'. op0 :: a -> Op '[] a -- | An Op that ignores all of its inputs and returns a given -- constant value. -- --
--   >>> gradOp' (opConst 10) (1 ::< 2 ::< 3 ::< Ø)
--   (10, 0 ::< 0 ::< 0 ::< Ø)
--   
opConst :: (Every Num as, Known Length as) => a -> Op as a -- | An Op that just returns whatever it receives. The identity -- function. -- --
--   idOp = opIso id id
--   
idOp :: Op '[a] a -- | A version of opConst taking explicit Length, indicating -- the number of inputs and their types. -- -- Requiring an explicit Length is mostly useful for rare -- "extremely polymorphic" situations, where GHC can't infer the type and -- length of the the expected input tuple. If you ever actually -- explicitly write down as as a list of types, you should be -- able to just use opConst. opConst' :: Every Num as => Length as -> a -> Op as a -- | Create an Op of a function taking one input, by giving its -- explicit derivative. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the derivative <math>, it would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that squares its input: -- --
--   square :: Num a => Op '[a] a
--   square = op1 $ \x -> (x*x, \d -> 2 * d * x
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op1 :: (a -> (b, b -> a)) -> Op '[a] b -- | Create an Op of a function taking two inputs, by giving its -- explicit gradient. The function should return a tuple containing the -- result of the function, and also a function taking the derivative of -- the result and return the derivative of the input. -- -- If we have -- -- <math> -- -- Then the gradient <math> would be: -- -- <math> -- -- If our Op represents <math>, then the second item in the -- resulting tuple should be a function that takes <math> and -- returns <math>. -- -- As an example, here is an Op that multiplies its inputs: -- --
--   mul :: Num a => Op '[a, a] a
--   mul = op2' $ \x y -> (x*y, \d -> (d*y, x*d)
--                        )
--   
-- -- Remember that, generally, end users shouldn't directly construct -- Ops; they should be provided by libraries or generated -- automatically. op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c -- | Create an Op of a function taking three inputs, by giving its -- explicit gradient. See documentation for op2 for more details. op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d -- | An Op that coerces an item into another item whose type has the -- same runtime representation. -- --
--   >>> gradOp' opCoerce (Identity 5) :: (Int, Identity Int)
--   (5, Identity 1)
--   
-- --
--   opCoerce = opIso coerced coerce
--   
opCoerce :: Coercible a b => Op '[a] b -- | An Op that takes as and returns exactly the input -- tuple. -- --
--   >>> gradOp' opTup (1 ::< 2 ::< 3 ::< Ø)
--   (1 ::< 2 ::< 3 ::< Ø, 1 ::< 1 ::< 1 ::< Ø)
--   
opTup :: Op as (Tuple as) -- | An Op that runs the input value through an isomorphism. -- -- Warning: This is unsafe! It assumes that the isomorphisms themselves -- have derivative 1, so will break for things like exp & -- log. Basically, don't use this for any "numeric" isomorphisms. opIso :: (a -> b) -> (b -> a) -> Op '[a] b -- | An Op that runs the input value through an isomorphism between -- a tuple of values and a value. See opIso for caveats. -- -- In Numeric.Backprop.Op since version 0.1.2.0, but only exported -- from Numeric.Backprop since version 0.1.3.0. opIsoN :: (Tuple as -> b) -> (b -> Tuple as) -> Op as b -- | An Op that extracts a value from an input value using a -- Lens'. -- -- Warning: This is unsafe! It assumes that it extracts a specific value -- unchanged, with derivative 1, so will break for things that -- numerically manipulate things before returning them. opLens :: Num a => Lens' a b -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp1, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad1 :: (a -> b) -> Op '[a] b -- | Create an Op with no gradient. Can be evaluated with -- evalOp, but will throw a runtime exception when asked for the -- gradient. -- -- Can be used with BVar with liftOp, and -- evalBP will work fine. gradBP and backprop -- will also work fine if the result is never used in the final answer, -- but will throw a runtime exception if the final answer depends on the -- result of this operation. -- -- Useful if your only API is exposed through backprop. Just be -- sure to tell your users that this will explode when finding the -- gradient if the result is used in the final result. noGrad :: (Tuple as -> b) -> Op as b data Prod k (f :: k -> *) (a :: [k]) :: forall k. () => (k -> *) -> [k] -> * [Ø] :: Prod k f [] k [:<] :: Prod k f (:) k a1 as -- | Construct a two element Prod. Since the precedence of (:>) is -- higher than (:<), we can conveniently write lists like: -- --
--   >>> a :< b :> c
--   
-- -- Which is identical to: -- --
--   >>> a :< b :< c :< Ø
--   
infix 6 :> -- | Build a singleton Prod. only :: () => f a -> Prod k f (:) k a [] k head' :: () => Prod k f (:<) k a as -> f a -- | A Prod of simple Haskell types. type Tuple = Prod * I -- | Cons onto a Tuple. infixr 5 ::< -- | Singleton Tuple. only_ :: () => a -> Tuple (:) * a [] * newtype I a :: * -> * I :: a -> I a [getI] :: I a -> a class Reifies k (s :: k) a | s -> a -- | Provides "explicit" versions of all of the functions in -- Prelude.Backprop. Instead of relying on a Backprop -- instance, allows you to manually provide zero, add, and -- one on a per-value basis. module Prelude.Backprop.Explicit -- | sum, but taking explicit add and zero. sum :: (Foldable t, Functor t, Num a, Reifies s W) => AddFunc (t a) -> ZeroFunc a -> BVar s (t a) -> BVar s a -- | product, but taking explicit add and zero. product :: (Foldable t, Functor t, Fractional a, Reifies s W) => AddFunc (t a) -> ZeroFunc a -> BVar s (t a) -> BVar s a -- | length, but taking explicit add and zero. length :: (Foldable t, Num b, Reifies s W) => AddFunc (t a) -> ZeroFunc (t a) -> ZeroFunc b -> BVar s (t a) -> BVar s b -- | minimum, but taking explicit add and zero. minimum :: (Foldable t, Functor t, Ord a, Reifies s W) => AddFunc (t a) -> ZeroFunc a -> BVar s (t a) -> BVar s a -- | maximum, but taking explicit add and zero. maximum :: (Foldable t, Functor t, Ord a, Reifies s W) => AddFunc (t a) -> ZeroFunc a -> BVar s (t a) -> BVar s a -- | traverse, but taking explicit add and zero. -- -- See documentation for collectVar for information the API change -- in v0.2.3 that removed the ZeroFunc (t b) and -- ZeroFunc (f (t b)) parameters. traverse :: (Traversable t, Applicative f, Foldable f, Reifies s W) => AddFunc a -> AddFunc b -> AddFunc (t b) -> ZeroFunc a -> ZeroFunc b -> (BVar s a -> f (BVar s b)) -> BVar s (t a) -> BVar s (f (t b)) -- | length, but taking explicit add and zero. toList :: (Traversable t, Reifies s W) => AddFunc a -> ZeroFunc a -> BVar s (t a) -> [BVar s a] -- | mapAccumL, but taking explicit add and zero. -- -- See documentation for collectVar for information the API change -- in v0.2.3 that removed the ZeroFunc (t c) parameter. mapAccumL :: (Traversable t, Reifies s W) => AddFunc b -> AddFunc c -> ZeroFunc b -> ZeroFunc c -> (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | mapAccumR, but taking explicit add and zero. -- -- See documentation for collectVar for information the API change -- in v0.2.3 that removed the ZeroFunc (t c) parameter. mapAccumR :: (Traversable t, Reifies s W) => AddFunc b -> AddFunc c -> ZeroFunc b -> ZeroFunc c -> (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | foldr, but taking explicit add and zero. foldr :: (Traversable t, Reifies s W) => AddFunc a -> ZeroFunc a -> (BVar s a -> BVar s b -> BVar s b) -> BVar s b -> BVar s (t a) -> BVar s b -- | foldl', but taking explicit add and zero. foldl' :: (Traversable t, Reifies s W) => AddFunc a -> ZeroFunc a -> (BVar s b -> BVar s a -> BVar s b) -> BVar s b -> BVar s (t a) -> BVar s b -- | fmap, but taking explicit add and zero. -- -- See documentation for collectVar for information the API change -- in v0.2.3 that removed the ZeroFunc (f b) parameter. fmap :: (Traversable f, Reifies s W) => AddFunc a -> AddFunc b -> ZeroFunc a -> ZeroFunc b -> (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | pure, but taking explicit add and zero. pure :: (Foldable t, Applicative t, Reifies s W) => AddFunc a -> ZeroFunc a -> ZeroFunc (t a) -> BVar s a -> BVar s (t a) -- | liftA2, but taking explicit add and zero. -- -- See documentation for collectVar for information the API change -- in v0.2.3 that removed the ZeroFunc (f c) parameter. liftA2 :: (Traversable f, Applicative f, Reifies s W) => AddFunc a -> AddFunc b -> AddFunc c -> ZeroFunc a -> ZeroFunc b -> ZeroFunc c -> (BVar s a -> BVar s b -> BVar s c) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -- | liftA3, but taking explicit add and zero. -- -- See documentation for collectVar for information the API change -- in v0.2.3 that removed the ZeroFunc (f d) parameter. liftA3 :: (Traversable f, Applicative f, Reifies s W) => AddFunc a -> AddFunc b -> AddFunc c -> AddFunc d -> ZeroFunc a -> ZeroFunc b -> ZeroFunc c -> ZeroFunc d -> (BVar s a -> BVar s b -> BVar s c -> BVar s d) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -> BVar s (f d) -- | fromIntegral, but taking explicit add and zero. fromIntegral :: (Integral a, Integral b, Reifies s W) => AddFunc a -> ZeroFunc b -> BVar s a -> BVar s b -- | realToFrac, but taking explicit add and zero. realToFrac :: (Fractional a, Real a, Fractional b, Real b, Reifies s W) => AddFunc a -> ZeroFunc b -> BVar s a -> BVar s b -- | round, but taking explicit add and zero. round :: (RealFrac a, Integral b, Reifies s W) => AddFunc a -> ZeroFunc b -> BVar s a -> BVar s b -- | fromIntegral', but taking explicit add and zero. fromIntegral' :: (Integral a, RealFrac b, Reifies s W) => AddFunc a -> ZeroFunc b -> BVar s a -> BVar s b -- | Coerce items inside a BVar. coerce :: Coercible a b => BVar s a -> BVar s b -- | Some lifted versions of common functions found in Prelude (or -- base in general). -- -- This module is intended to be a catch-all one, so feel free to suggest -- other functions or submit a PR if you think one would make sense. -- -- See Prelude.Backprop.Num for a version with Num -- constraints instead of Backprop constraints, and -- Prelude.Backprop.Explicit for a version allowing you to provide -- zero, add, and one explicitly. module Prelude.Backprop -- | Lifted sum. More efficient than going through toList. sum :: (Foldable t, Functor t, Backprop (t a), Backprop a, Num a, Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted product. More efficient than going through -- toList. product :: (Foldable t, Functor t, Backprop (t a), Backprop a, Fractional a, Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted length. More efficient than going through toList. length :: (Foldable t, Backprop (t a), Backprop b, Num b, Reifies s W) => BVar s (t a) -> BVar s b -- | Lifted minimum. Undefined for situations where minimum -- would be undefined. More efficient than going through toList. minimum :: (Foldable t, Functor t, Backprop a, Ord a, Backprop (t a), Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted maximum. Undefined for situations where maximum -- would be undefined. More efficient than going through toList. maximum :: (Foldable t, Functor t, Backprop a, Ord a, Backprop (t a), Reifies s W) => BVar s (t a) -> BVar s a -- | Lifted traverse. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Functors. -- -- Prior to v0.2.3, required a Backprop constraint on f (t -- b). traverse :: (Traversable t, Applicative f, Foldable f, Backprop a, Backprop b, Backprop (t b), Reifies s W) => (BVar s a -> f (BVar s b)) -> BVar s (t a) -> BVar s (f (t b)) -- | Lifted version of toList. Takes a BVar of a -- Traversable of items and returns a list of BVars for -- each item. -- -- You can use this to implement "lifted" versions of Foldable -- methods like foldr, foldl', etc.; however, sum, -- product, length, minimum, and maximum have -- more efficient implementations than simply minimum . -- toList. toList :: (Traversable t, Backprop a, Reifies s W) => BVar s (t a) -> [BVar s a] -- | Lifted version of mapAccumL. -- -- Prior to v0.2.3, required a Backprop constraint on t -- b. mapAccumL :: (Traversable t, Backprop b, Backprop c, Reifies s W) => (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | Lifted version of mapAccumR. -- -- Prior to v0.2.3, required a Backprop constraint on t -- b. mapAccumR :: (Traversable t, Backprop b, Backprop c, Reifies s W) => (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | Lifed foldr. Essentially just toList composed with a -- normal list foldr, and is only here for convenience. foldr :: (Traversable t, Backprop a, Reifies s W) => (BVar s a -> BVar s b -> BVar s b) -> BVar s b -> BVar s (t a) -> BVar s b -- | Lifed foldl'. Essentially just toList composed with a -- normal list foldl', and is only here for convenience. foldl' :: (Traversable t, Backprop a, Reifies s W) => (BVar s b -> BVar s a -> BVar s b) -> BVar s b -> BVar s (t a) -> BVar s b -- | Lifted fmap. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Functors. -- -- Prior to v0.2.3, required a Backprop constraint on f -- b. fmap :: (Traversable f, Backprop a, Backprop b, Reifies s W) => (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | Alias for fmap. (<$>) :: (Traversable f, Backprop a, Backprop b, Reifies s W) => (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | Lifted pure. pure :: (Foldable t, Applicative t, Backprop (t a), Backprop a, Reifies s W) => BVar s a -> BVar s (t a) -- | Lifted liftA2. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Applicatives. -- -- Prior to v0.2.3, required a Backprop constraint on f -- c. liftA2 :: (Traversable f, Applicative f, Backprop a, Backprop b, Backprop c, Reifies s W) => (BVar s a -> BVar s b -> BVar s c) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -- | Lifted liftA3. Lifts backpropagatable functions to be -- backpropagatable functions on Traversable Applicatives. -- -- Prior to v0.2.3, required a Backprop constraint on f -- d. liftA3 :: (Traversable f, Applicative f, Backprop a, Backprop b, Backprop c, Backprop d, Reifies s W) => (BVar s a -> BVar s b -> BVar s c -> BVar s d) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -> BVar s (f d) -- | Lifted conversion between two Integral instances. fromIntegral :: (Backprop a, Integral a, Backprop b, Integral b, Reifies s W) => BVar s a -> BVar s b -- | Lifted conversion between two Fractional and Real -- instances. realToFrac :: (Backprop a, Fractional a, Real a, Backprop b, Fractional b, Real b, Reifies s W) => BVar s a -> BVar s b -- | Lifted version of round. -- -- Gradient should technically diverge whenever the fractional part is -- 0.5, but does not do this for convenience reasons. round :: (RealFrac a, Integral b, Reifies s W) => BVar s a -> BVar s b -- | Lifted version of fromIntegral, defined to let you return -- RealFrac instances as targets, instead of only other -- Integrals. Essentially the opposite of round. -- -- The gradient should technically diverge whenever the fractional part -- of the downstream gradient is 0.5, but does not do this for -- convenience reasons. fromIntegral' :: (Integral a, RealFrac b, Reifies s W) => BVar s a -> BVar s b -- | Coerce items inside a BVar. coerce :: Coercible a b => BVar s a -> BVar s b -- | Provides the exact same API as Prelude.Backprop, except -- requiring Num instances for all types involved instead of -- Backprop instances. module Prelude.Backprop.Num -- | sum, but with Num constraints instead of -- Backprop constraints. sum :: (Foldable t, Functor t, Num (t a), Num a, Reifies s W) => BVar s (t a) -> BVar s a -- | product, but with Num constraints instead of -- Backprop constraints. product :: (Foldable t, Functor t, Num (t a), Fractional a, Reifies s W) => BVar s (t a) -> BVar s a -- | length, but with Num constraints instead of -- Backprop constraints. length :: (Foldable t, Num (t a), Num b, Reifies s W) => BVar s (t a) -> BVar s b -- | minimum, but with Num constraints instead of -- Backprop constraints. minimum :: (Foldable t, Functor t, Num a, Ord a, Num (t a), Reifies s W) => BVar s (t a) -> BVar s a -- | maximum, but with Num constraints instead of -- Backprop constraints. maximum :: (Foldable t, Functor t, Num a, Ord a, Num (t a), Reifies s W) => BVar s (t a) -> BVar s a -- | traverse, but with Num constraints instead of -- Backprop constraints. -- -- See vector-sized for a fixed-length vector type with a very -- appropriate Num instance! -- -- Prior to v0.2.3, required a Num constraint on f (t b). traverse :: (Traversable t, Applicative f, Foldable f, Num a, Num b, Num (t b), Reifies s W) => (BVar s a -> f (BVar s b)) -> BVar s (t a) -> BVar s (f (t b)) -- | toList, but with Num constraints instead of -- Backprop constraints. toList :: (Traversable t, Num a, Reifies s W) => BVar s (t a) -> [BVar s a] -- | mapAccumL, but with Num constraints instead of -- Backprop constraints. -- -- Prior to v0.2.3, required a Num constraint on t b. mapAccumL :: (Traversable t, Num b, Num c, Reifies s W) => (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | mapAccumR, but with Num constraints instead of -- Backprop constraints. -- -- Prior to v0.2.3, required a Num constraint on t b. mapAccumR :: (Traversable t, Num b, Num c, Reifies s W) => (BVar s a -> BVar s b -> (BVar s a, BVar s c)) -> BVar s a -> BVar s (t b) -> (BVar s a, BVar s (t c)) -- | foldr, but with Num constraints instead of -- Backprop constraints. foldr :: (Traversable t, Num a, Reifies s W) => (BVar s a -> BVar s b -> BVar s b) -> BVar s b -> BVar s (t a) -> BVar s b -- | foldl', but with Num constraints instead of -- Backprop constraints. foldl' :: (Traversable t, Num a, Reifies s W) => (BVar s b -> BVar s a -> BVar s b) -> BVar s b -> BVar s (t a) -> BVar s b -- | fmap, but with Num constraints instead of -- Backprop constraints. -- -- Prior to v0.2.3, required a Num constraint on f b. fmap :: (Traversable f, Num a, Num b, Reifies s W) => (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | Alias for fmap. (<$>) :: (Traversable f, Num a, Num b, Reifies s W) => (BVar s a -> BVar s b) -> BVar s (f a) -> BVar s (f b) -- | pure, but with Num constraints instead of -- Backprop constraints. pure :: (Foldable t, Applicative t, Num (t a), Num a, Reifies s W) => BVar s a -> BVar s (t a) -- | liftA2, but with Num constraints instead of -- Backprop constraints. -- -- Prior to v0.2.3, required a Num constraint on f c. liftA2 :: (Traversable f, Applicative f, Num a, Num b, Num c, Reifies s W) => (BVar s a -> BVar s b -> BVar s c) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -- | liftA3, but with Num constraints instead of -- Backprop constraints. -- -- Prior to v0.2.3, required a Num constraint on f d. liftA3 :: (Traversable f, Applicative f, Num a, Num b, Num c, Num d, Reifies s W) => (BVar s a -> BVar s b -> BVar s c -> BVar s d) -> BVar s (f a) -> BVar s (f b) -> BVar s (f c) -> BVar s (f d) -- | fromIntegral, but with Num constraints instead of -- Backprop constraints. fromIntegral :: (Integral a, Integral b, Reifies s W) => BVar s a -> BVar s b -- | realToFrac, but with Num constraints instead of -- Backprop constraints. realToFrac :: (Fractional a, Real a, Fractional b, Real b, Reifies s W) => BVar s a -> BVar s b -- | round, but with Num constraints instead of -- Backprop constraints. round :: (RealFrac a, Integral b, Reifies s W) => BVar s a -> BVar s b -- | fromIntegral', but with Num constraints instead of -- Backprop constraints. fromIntegral' :: (Integral a, RealFrac b, Reifies s W) => BVar s a -> BVar s b -- | Coerce items inside a BVar. coerce :: Coercible a b => BVar s a -> BVar s b