Copyright | (c) Justin Le 2018 |
---|---|
License | BSD3 |
Maintainer | justin@jle.im |
Stability | experimental |
Portability | non-portable |
Safe Haskell | None |
Language | Haskell2010 |
Automatic differentation and backpropagation.
Main idea: Write a function computing what you want, and the library automatically provies the gradient of that function as well, for usage with gradient descent and other training methods.
In more detail: instead of working directly with values to produce your
result, you work with BVar
s containing those values. Working with
these BVar
s is made smooth with the usage of lenses and other
combinators, and libraries can offer operatons on BVar
s instead of
those on normal types directly.
Then, you can use:
evalBP
:: (forall s.Reifies
sW
.BVar
s a -> BVar s b) -> (a -> b)
to turn a BVar
function into the function on actual values a -> b
.
This has virtually zero overhead over writing the actual function
directly.
Then, there's:
gradBP
:: (forall s.Reifies
sW
.BVar
s a -> BVar s b) -> (a -> a)
to automatically get the gradient, as well, for a given input.
See the README for more information
and links to demonstrations and tutorials, or dive striaght in by
reading the docs for BVar
.
- data BVar s a
- data W
- backprop :: forall a b. (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, a)
- evalBP :: (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> b
- gradBP :: forall a b. (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> a
- backprop2 :: forall a b c. (Num a, Num b, Num c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, (a, b))
- evalBP2 :: (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> c
- gradBP2 :: (Num a, Num b, Num c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (a, b)
- backpropN :: forall as b. (Every Num as, Num b) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> (b, Tuple as)
- evalBPN :: forall as b. (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> b
- gradBPN :: forall as b. (Every Num as, Num b) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> Tuple as
- class EveryC k c as => Every k (c :: k -> Constraint) (as :: [k])
- constVar :: a -> BVar s a
- (^^.) :: forall a b s. (Reifies s W, Num a) => BVar s b -> Lens' b a -> BVar s a
- (.~~) :: forall a b s. (Reifies s W, Num a, Num b) => Lens' b a -> BVar s a -> BVar s b -> BVar s b
- (^^?) :: forall b a s. (Num a, Reifies s W) => BVar s b -> Traversal' b a -> Maybe (BVar s a)
- (^^..) :: forall b a s. (Num a, Reifies s W) => BVar s b -> Traversal' b a -> [BVar s a]
- viewVar :: forall a b s. (Reifies s W, Num a) => Lens' b a -> BVar s b -> BVar s a
- setVar :: forall a b s. (Reifies s W, Num a, Num b) => Lens' b a -> BVar s a -> BVar s b -> BVar s b
- sequenceVar :: forall t a s. (Reifies s W, Traversable t, Num a) => BVar s (t a) -> t (BVar s a)
- collectVar :: forall a t s. (Reifies s W, Foldable t, Functor t, Num (t a), Num a) => t (BVar s a) -> BVar s (t a)
- previewVar :: forall b a s. (Num a, Reifies s W) => Traversal' b a -> BVar s b -> Maybe (BVar s a)
- toListOfVar :: forall b a s. (Num a, Reifies s W) => Traversal' b a -> BVar s b -> [BVar s a]
- liftOp :: forall s as b. (Reifies s W, Num b, Every Num as) => Op as b -> Prod (BVar s) as -> BVar s b
- liftOp1 :: forall s a b. (Reifies s W, Num a, Num b) => Op '[a] b -> BVar s a -> BVar s b
- liftOp2 :: forall s a b c. (Reifies s W, Num a, Num b, Num c) => Op '[a, b] c -> BVar s a -> BVar s b -> BVar s c
- liftOp3 :: forall s a b c d. (Reifies s W, Num a, Num b, Num c, Num d) => Op '[a, b, c] d -> BVar s a -> BVar s b -> BVar s c -> BVar s d
- newtype Op as a = Op {}
- op0 :: a -> Op '[] a
- opConst :: (Every Num as, Known Length as) => a -> Op as a
- idOp :: Op '[a] a
- opConst' :: Every Num as => Length as -> a -> Op as a
- op1 :: (a -> (b, b -> a)) -> Op '[a] b
- op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c
- op3 :: (a -> b -> c -> (d, d -> (a, b, c))) -> Op '[a, b, c] d
- opCoerce :: Coercible a b => Op '[a] b
- opTup :: Op as (Tuple as)
- opIso :: (a -> b) -> (b -> a) -> Op '[a] b
- opLens :: Num a => Lens' a b -> Op '[a] b
- data Prod k (f :: k -> *) (a :: [k]) :: forall k. (k -> *) -> [k] -> * where
- pattern (:>) :: forall k (f :: k -> *) (a :: k) (b :: k). f a -> f b -> Prod k f ((:) k a ((:) k b ([] k)))
- only :: f a -> Prod k f ((:) k a ([] k))
- head' :: Prod k f ((:<) k a as) -> f a
- type Tuple = Prod * I
- pattern (::<) :: forall a (as :: [*]). a -> Tuple as -> Tuple ((:<) * a as)
- only_ :: a -> Tuple ((:) * a ([] *))
- newtype I a :: * -> * = I {
- getI :: a
- class Reifies k (s :: k) a | s -> a
Types
A
is a value of type BVar
s aa
that can be "backpropagated".
Functions referring to BVar
s are tracked by the library and can be
automatically differentiated to get their gradients and results.
For simple numeric values, you can use its Num
, Fractional
, and
Floating
instances to manipulate them as if they were the numbers they
represent.
If a
contains items, the items can be accessed and extracted using
lenses. A
can be used to access an Lens'
b aa
inside a b
, using
^^.
(viewVar
):
(^.
) :: a ->Lens'
a b -> b (^^.
) ::BVar
s a ->Lens'
a b ->BVar
s b
There is also ^^?
(previewVar
), to use a Prism'
or Traversal'
to
extract a target that may or may not be present (which can implement
pattern matching), ^^..
(toListOfVar
) to use a Traversal'
to
extract all targets inside a BVar
, and .~~
(setVar
) to set and
update values inside a BVar
.
For more complex operations, libraries can provide functions on BVar
s
using liftOp
and related functions. This is how you can create
primitive functions that users can use to manipulate your library's
values.
For example, the hmatrix library has a matrix-vector multiplication
function, #> :: L m n -> R n -> L m
.
A library could instead provide a function #> ::
, which the user can then use to manipulate their
BVar
(L m n) -> BVar
(R n) -> BVar (R m)BVar
s of L m n
s and R n
s, etc.
See Numeric.Backprop and documentation for liftOp
for more
information.
Running
backprop :: forall a b. (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> (b, a) Source #
Turn a function
into the function BVar
s a -> BVar
s ba -> b
that it represents, also computing its gradient a
as well.
The Rank-N type forall s.
is used to ensure
that Reifies
s W
=> ...BVar
s do not leak out of the context (similar to how it is used
in Control.Monad.ST), and also as a reference to an ephemeral Wengert
tape used to track the graph of references.
Note that every type involved has to be an instance of Num
. This is
because gradients all need to be "summable" (which is implemented using
sum
and +
), and we also need to able to generate gradients of 1
and 0. Really, only +
and fromInteger
methods are used from the
Num
typeclass.
This might change in the future, to allow easier integration with tuples
(which typically do not have a Num
instance), and potentially make
types easier to use (by only requiring +
, 0, and 1, and not the rest
of the Num
class).
See the README for a more detailed discussion on this issue.
If you need a Num
instance for tuples, you can use the canonical 2-
and 3-tuples for the library in Numeric.Backprop.Tuple. If you need
one for larger tuples, consider making a custom product type instead
(making Num instances with something like
one-liner-instances).
You can also use the orphan instances in the
NumInstances package
(in particular, Data.NumInstances.Tuple) if you are writing an
application and do not have to worry about orphan instances.
evalBP :: (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> b Source #
Turn a function
into the function BVar
s a -> BVar
s ba -> b
that it represents.
Benchmarks show that this should have virtually no overhead over
directly writing a a -> b
. BVar
is, in this situation, a zero-cost
abstraction, performance-wise.
Has a nice advantage over using backprop
in that it doesn't require
Num
constraints on the input and output.
See documentation of backprop
for more information.
gradBP :: forall a b. (Num a, Num b) => (forall s. Reifies s W => BVar s a -> BVar s b) -> a -> a Source #
Take a function
, interpreted as a function
BVar
s a -> BVar
s ba -> b
, and compute its gradient with respect to its input.
The resulting a -> a
tells how the input (and its components) affects
the output. Positive numbers indicate that the result will vary in the
same direction as any adjustment in the input. Negative numbers
indicate that the result will vary in the opposite direction as any
adjustment in the input. Larger numbers indicate a greater sensitivity
of change, and small numbers indicate lower sensitivity.
See documentation of backprop
for more information.
Multiple inputs
backprop2 :: forall a b c. (Num a, Num b, Num c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (c, (a, b)) Source #
gradBP2 :: (Num a, Num b, Num c) => (forall s. Reifies s W => BVar s a -> BVar s b -> BVar s c) -> a -> b -> (a, b) Source #
backpropN :: forall as b. (Every Num as, Num b) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> (b, Tuple as) Source #
backprop
generalized to multiple inputs of different types. See the
Numeric.Backprop.Op for a mini-tutorial on heterogeneous lists.
Not strictly necessary, because you can always uncurry a function by passing in all of the inputs in a data type containing all of the arguments or a tuple from Numeric.Backprop.Tuple. You could also pass in a giant tuple with NumInstances. However, this can be convenient if you don't want to make a custom larger tuple type or pull in orphan instances. This could potentially also be more performant.
A
, for instance, is a tuple
of Prod
(BVar
s) '[Double, Float, Double]
, BVar
s Double
, and BVar
s Float
, and
can be pattern matched on using BVar
s Double
:<
(cons) and 'Ø' (nil).
Tuples can be built and pattern matched on using ::<
(cons) and 'Ø'
(nil), as well.
The
in the constraint says that every value in the
type-level list Every
Num
asas
must have a Num
instance. This means you can
use, say, '[Double, Float, Int]
, but not '[Double, Bool, String]
.
If you stick to concerete, monomorphic usage of this (with specific
types, typed into source code, known at compile-time), then
should be fulfilled automatically.Every
Num
as
evalBPN :: forall as b. (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> b Source #
evalBP
generalized to multiple inputs of different types. See
documentation for backpropN
for more details.
gradBPN :: forall as b. (Every Num as, Num b) => (forall s. Reifies s W => Prod (BVar s) as -> BVar s b) -> Tuple as -> Tuple as Source #
class EveryC k c as => Every k (c :: k -> Constraint) (as :: [k]) #
Manipulating BVar
constVar :: a -> BVar s a Source #
Lift a value into a BVar
representing a constant value.
This value will not be considered an input, and its gradients will not be backpropagated.
(^^.) :: forall a b s. (Reifies s W, Num a) => BVar s b -> Lens' b a -> BVar s a infixl 8 Source #
An infix version of viewVar
, meant to evoke parallels to ^.
from
lens.
With normal values, you can extract something from that value with a lens:
x ^.
myLens
would extract a piece of x :: b
, specified by myLens ::
.
The result has type Lens'
b aa
.
xVar ^^.
myLens
would extract a piece out of xVar ::
(a BVar
s bBVar
holding a
b
), specified by myLens :: Lens' b a
. The result has type
(a BVar
s aBVar
holding a a
)
This is the main way to pull out values from BVar
of container types.
(.~~) :: forall a b s. (Reifies s W, Num a, Num b) => Lens' b a -> BVar s a -> BVar s b -> BVar s b infixl 8 Source #
An infix version of setVar
, meant to evoke parallels to .~
from
lens.
With normal values, you can set something in a value with a lens: a lens:
x&
myLens.~
y
would "set" a part of x :: b
, specified by myLens ::
, to
a new value Lens'
a by :: a
.
xVar&
myLens.~~
yVar
would "set" a part of xVar ::
(a BVar
s bBVar
holding a b
),
specified by myLens ::
, to a new value given by Lens'
a byVar ::
. The result is a new (updated) value of type BVar
s a
.BVar
s b
This is the main way to set values inside BVar
s of container types.
(^^?) :: forall b a s. (Num a, Reifies s W) => BVar s b -> Traversal' b a -> Maybe (BVar s a) Source #
An infix version of previewVar
, meant to evoke parallels to ^?
from lens.
With normal values, you can (potentially) extract something from that value with a lens:
x ^?
myPrism
would (potentially) extract a piece of x :: b
, specified by
myPrism ::
. The result has type Traversal'
b a
.Maybe
a
xVar ^^?
myPrism
would (potentially) extract a piece out of xVar ::
(a
BVar
s bBVar
holding a b
), specified by myPrism :: Prism' b a
.
The result has type
(Maybe
(BVar
s a)Maybe
a BVar
holding
a a
).
This is intended to be used with Prism'
s (which hits at most one
target), but will actually work with any Traversal'
. If the
traversal hits more than one target, the first one found will be
extracted.
This can be used to "pattern match" on BVar
s, by using prisms on
constructors.
Note that many automatically-generated prisms by the lens package use
tuples, which cannot normally be backpropagated (because they do not
have a Num
instance).
If you are writing an application or don't have to worry about orphan
instances, you can pull in the orphan instances from
NumInstances.
Alternatively, you can chain those prisms with conversions to the
anonymous canonical strict tuple types in Numeric.Backprop.Tuple,
which do have Num
instances.
myPrism ::Prism'
c (a, b) myPrism .iso
tupT2
t2Tup
::Prism'
c (T2
a b)
(^^..) :: forall b a s. (Num a, Reifies s W) => BVar s b -> Traversal' b a -> [BVar s a] Source #
An infix version of toListOfVar
, meant to evoke parallels to ^..
from lens.
With normal values, you can extract all targets of a Traversal
from
that value with a:
x ^..
myTraversal
would extract all targets inside of x :: b
, specified by myTraversal
::
. The result has type Traversal'
b a[a]
.
xVar ^^..
myTraversal
would extract all targets inside of xVar ::
(a BVar
s bBVar
holding a b
), specified by myTraversal :: Traversal' b a
. The result
has type [
(A list of BVar
s a]BVar
s holding a
s).
setVar :: forall a b s. (Reifies s W, Num a, Num b) => Lens' b a -> BVar s a -> BVar s b -> BVar s b Source #
sequenceVar :: forall t a s. (Reifies s W, Traversable t, Num a) => BVar s (t a) -> t (BVar s a) Source #
Extract all of the BVar
s out of a Traversable
container of
BVar
s.
collectVar :: forall a t s. (Reifies s W, Foldable t, Functor t, Num (t a), Num a) => t (BVar s a) -> BVar s (t a) Source #
previewVar :: forall b a s. (Num a, Reifies s W) => Traversal' b a -> BVar s b -> Maybe (BVar s a) Source #
Using a Traversal'
, extract a single value inside a BVar
, if it
exists. If more than one traversal target exists, returns te first.
Meant to evoke parallels to preview
from lens. Really only intended
to be used wth Prism'
s, or up-to-one target traversals.
See documentation for ^^?
for more information.
toListOfVar :: forall b a s. (Num a, Reifies s W) => Traversal' b a -> BVar s b -> [BVar s a] Source #
Using a Traversal'
, extract all targeted values inside a BVar
.
Meant to evoke parallels to toListOf
from lens.
See documentation for ^^..
for more information.
With Op
s
This library provides a few primitive actions for manipulating BVar
s
and the values inside them, including its Num
, Fractional
, and
Floating
instances, and lens-based operations like ^^.
, .~~
^^?
,
and ^^..
.
However, the power of this library comes from manipulating many
different types from libraries, like matrices and vectors. Libraries
can provide their own
functions, alongside
(or in lieu of) BVar
s a -> BVar
s ba -> b
functions for their types.
The easiest way to create a BVar
function is to use liftOp
with an
Op
constructor. For example, imagine a vector library providing a dot
product function. We can write this using liftOp2
and op2
:
dot ::BVar
s Vec -> BVar s Vec -> BVar s Double dot =liftOp2
. op2 $ \xs ys -> ( sum (zipWith (*) xs ys) , \g -> (map (*g) ys, map (*g) xs) )
We provide a function that, given the two inputs, returns:
- The result of the function on those two inputs
- A function taking the "total derivative", and returning the gradient with respect to each of the inputs.
See documentation in Numeric.Backprop.Op for more information on the second part (the gradient).
Nice Op
s are how backprop links together BVar
s and tracks them to
determine their gradient. Ideally, users would never have to deal with
these when backpropagating their own functions, and library authors
providing their matrix and vector operations, etc. would provide BVar
variants of their normal operations.
In fact, BVar
operations could even be defined instead of normal
operations, since it is easy to go from
to BVar
s a -> BVar
s ba
-> b
, using evalBP
, and this carries virtually zero overhead, so some
libraries might even provide BVar
versions by default.
liftOp :: forall s as b. (Reifies s W, Num b, Every Num as) => Op as b -> Prod (BVar s) as -> BVar s b Source #
Lift an Op
with an arbitrary number of inputs to a function on the
appropriate number of BVar
s.
Should preferably be used only by libraries to provide primitive BVar
functions for their types for users.
See Numeric.Backprop and documentation for liftOp
for more
information, and Numeric.Backprop.Op for a mini-tutorial on using
Prod
and Tuple
.
liftOp1 :: forall s a b. (Reifies s W, Num a, Num b) => Op '[a] b -> BVar s a -> BVar s b Source #
Lift an Op
with a single input to be a function on a single BVar
.
Should preferably be used only by libraries to provide primitive BVar
functions for their types for users.
See Numeric.Backprop and documentation for liftOp
for more
information.
liftOp2 :: forall s a b c. (Reifies s W, Num a, Num b, Num c) => Op '[a, b] c -> BVar s a -> BVar s b -> BVar s c Source #
Lift an Op
with two inputs to be a function on a two BVar
s.
Should preferably be used only by libraries to provide primitive BVar
functions for their types for users.
See Numeric.Backprop and documentation for liftOp
for more
information.
liftOp3 :: forall s a b c d. (Reifies s W, Num a, Num b, Num c, Num d) => Op '[a, b, c] d -> BVar s a -> BVar s b -> BVar s c -> BVar s d Source #
Lift an Op
with three inputs to be a function on a three BVar
s.
Should preferably be used only by libraries to provide primitive BVar
functions for their types for users.
See Numeric.Backprop and documentation for liftOp
for more
information.
Op
An
describes a differentiable function from Op
as aas
to a
.
For example, a value of type
Op
'[Int, Bool] Double
is a function from an Int
and a Bool
, returning a Double
. It can
be differentiated to give a gradient of an Int
and a Bool
if given
a total derivative for the Double
. If we call Bool
\(2\), then,
mathematically, it is akin to a:
\[ f : \mathbb{Z} \times 2 \rightarrow \mathbb{R} \]
See runOp
, gradOp
, and gradOpWith
for examples on how to run it,
and Op
for instructions on creating it.
It is simpler to not use this type constructor directly, and instead use
the op2
, op1
, op2
, and op3
helper smart constructors.
See Numeric.Backprop.Op for a mini-tutorial on using Prod
and
Tuple
.
Op | Construct an See the module documentation for Numeric.Backprop.Op for more
details on the function that this constructor and |
|
(Known [*] (Length *) as, Every * Floating as, Every * Fractional as, Every * Num as, Floating a) => Floating (Op as a) Source # | |
(Known [*] (Length *) as, Every * Fractional as, Every * Num as, Fractional a) => Fractional (Op as a) Source # | |
(Known [*] (Length *) as, Every * Num as, Num a) => Num (Op as a) Source # | |
Creation
opConst :: (Every Num as, Known Length as) => a -> Op as a Source #
An Op
that ignores all of its inputs and returns a given constant
value.
>>>
gradOp' (opConst 10) (1 ::< 2 ::< 3 ::< Ø)
(10, 0 ::< 0 ::< 0 ::< Ø)
opConst' :: Every Num as => Length as -> a -> Op as a Source #
A version of opConst
taking explicit Length
, indicating the
number of inputs and their types.
Requiring an explicit Length
is mostly useful for rare "extremely
polymorphic" situations, where GHC can't infer the type and length of
the the expected input tuple. If you ever actually explicitly write
down as
as a list of types, you should be able to just use
opConst
.
Giving gradients directly
op1 :: (a -> (b, b -> a)) -> Op '[a] b Source #
Create an Op
of a function taking one input, by giving its explicit
derivative. The function should return a tuple containing the result of
the function, and also a function taking the derivative of the result
and return the derivative of the input.
If we have
\[ \eqalign{ f &: \mathbb{R} \rightarrow \mathbb{R}\cr y &= f(x)\cr z &= g(y) } \]
Then the derivative \( \frac{dz}{dx} \), it would be:
\[ \frac{dz}{dx} = \frac{dz}{dy} \frac{dy}{dx} \]
If our Op
represents \(f\), then the second item in the resulting
tuple should be a function that takes \(\frac{dz}{dy}\) and returns
\(\frac{dz}{dx}\).
As an example, here is an Op
that squares its input:
square :: Num a =>Op
'[a] a square =op1
$ \x -> (x*x, \d -> 2 * d * x )
Remember that, generally, end users shouldn't directly construct Op
s;
they should be provided by libraries or generated automatically.
op2 :: (a -> b -> (c, c -> (a, b))) -> Op '[a, b] c Source #
Create an Op
of a function taking two inputs, by giving its explicit
gradient. The function should return a tuple containing the result of
the function, and also a function taking the derivative of the result
and return the derivative of the input.
If we have
\[ \eqalign{ f &: \mathbb{R}^2 \rightarrow \mathbb{R}\cr z &= f(x, y)\cr k &= g(z) } \]
Then the gradient \( \left< \frac{\partial k}{\partial x}, \frac{\partial k}{\partial y} \right> \) would be:
\[ \left< \frac{\partial k}{\partial x}, \frac{\partial k}{\partial y} \right> = \left< \frac{dk}{dz} \frac{\partial z}{dx}, \frac{dk}{dz} \frac{\partial z}{dy} \right> \]
If our Op
represents \(f\), then the second item in the resulting
tuple should be a function that takes \(\frac{dk}{dz}\) and returns
\( \left< \frac{\partial k}{dx}, \frac{\partial k}{dx} \right> \).
As an example, here is an Op
that multiplies its inputs:
mul :: Num a =>Op
'[a, a] a mul =op2'
$ \x y -> (x*y, \d -> (d*y, x*d) )
Remember that, generally, end users shouldn't directly construct Op
s;
they should be provided by libraries or generated automatically.
From Isomorphisms
opTup :: Op as (Tuple as) Source #
An Op
that takes as
and returns exactly the input tuple.
>>>
gradOp' opTup (1 ::< 2 ::< 3 ::< Ø)
(1 ::< 2 ::< 3 ::< Ø, 1 ::< 1 ::< 1 ::< Ø)
opIso :: (a -> b) -> (b -> a) -> Op '[a] b Source #
An Op
that runs the input value through an isomorphism.
Warning: This is unsafe! It assumes that the isomorphisms themselves
have derivative 1, so will break for things like
exponentiating
. Basically, don't use this for any
"numeric" isomorphisms.
Utility
Inductive tuples/heterogeneous lists
data Prod k (f :: k -> *) (a :: [k]) :: forall k. (k -> *) -> [k] -> * where #
Witness ØC ØC (Prod k f (Ø k)) | |
Functor1 k [k] (Prod k) | |
Foldable1 k [k] (Prod k) | |
Traversable1 k [k] (Prod k) | |
IxFunctor1 k [k] (Index k) (Prod k) | |
IxFoldable1 k [k] (Index k) (Prod k) | |
IxTraversable1 k [k] (Index k) (Prod k) | |
TestEquality k f => TestEquality [k] (Prod k f) | |
BoolEquality k f => BoolEquality [k] (Prod k f) | |
Eq1 k f => Eq1 [k] (Prod k f) | |
Ord1 k f => Ord1 [k] (Prod k f) | |
Show1 k f => Show1 [k] (Prod k f) | |
Read1 k f => Read1 [k] (Prod k f) | |
(Known [k] (Length k) as, Every k (Known k f) as) => Known [k] (Prod k f) as | |
(Witness p q (f a2), Witness s t (Prod a1 f as)) => Witness (p, s) (q, t) (Prod a1 f ((:<) a1 a2 as)) | |
ListC ((<$>) * Constraint Eq ((<$>) k * f as)) => Eq (Prod k f as) | |
(ListC ((<$>) * Constraint Eq ((<$>) k * f as)), ListC ((<$>) * Constraint Ord ((<$>) k * f as))) => Ord (Prod k f as) | |
ListC ((<$>) * Constraint Show ((<$>) k * f as)) => Show (Prod k f as) | |
type WitnessC ØC ØC (Prod k f (Ø k)) | |
type KnownC [k] (Prod k f) as | |
type WitnessC (p, s) (q, t) (Prod a1 f ((:<) a1 a2 as)) | |
pattern (:>) :: forall k (f :: k -> *) (a :: k) (b :: k). f a -> f b -> Prod k f ((:) k a ((:) k b ([] k))) infix 6 #
Construct a two element Prod. Since the precedence of (:>) is higher than (:<), we can conveniently write lists like:
>>>
a :< b :> c
Which is identical to:
>>>
a :< b :< c :< Ø
pattern (::<) :: forall a (as :: [*]). a -> Tuple as -> Tuple ((:<) * a as) infixr 5 #
Cons onto a Tuple.
Misc
class Reifies k (s :: k) a | s -> a #
KnownNat n => Reifies Nat n Integer | |
KnownSymbol n => Reifies Symbol n String | |
Reifies * Z Int | |
Reifies * n Int => Reifies * (D n) Int | |
Reifies * n Int => Reifies * (SD n) Int | |
Reifies * n Int => Reifies * (PD n) Int | |
(B * b0, B * b1, B * b2, B * b3, B * b4, B * b5, B * b6, B * b7, (~) * w0 (W b0 b1 b2 b3), (~) * w1 (W b4 b5 b6 b7)) => Reifies * (Stable w0 w1 a) a | |