| Copyright | (c) Sam Stites 2017 |
|---|---|
| License | BSD3 |
| Maintainer | sam@stites.io |
| Stability | experimental |
| Portability | non-portable |
| Safe Haskell | None |
| Language | Haskell2010 |
Torch.Indef.Static.NN.Math
Description
Synopsis
- abs_updateOutput :: Tensor d -> IO (Tensor d)
- abs_updateGradInput :: Product d ~ Product d' => Tensor d -> Tensor d' -> IO (Tensor d)
- _sqrt_updateOutput :: Tensor d -> Tensor d -> Double -> IO ()
- _sqrt_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Tensor d -> IO ()
- _square_updateOutput :: Tensor d -> Tensor d -> IO ()
- _square_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> IO ()
- _logSigmoid_updateOutput :: Tensor d -> Tensor d -> Tensor d -> IO ()
- _logSigmoid_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Tensor d -> IO ()
- _sigmoid_updateOutput :: Tensor d -> Tensor d -> IO ()
- _sigmoid_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> IO ()
- softmax :: KnownDim n => Reifies s W => BVar s (Tensor '[n]) -> BVar s (Tensor '[n])
- softmaxBatch :: KnownDim b => KnownDim n => Reifies s W => BVar s (Tensor '[b, n]) -> BVar s (Tensor '[b, n])
- softmaxN :: forall s i d. Reifies s W => (i < Length d) ~ True => Dimensions d => Dim i -> BVar s (Tensor d) -> BVar s (Tensor d)
- logSoftMax :: KnownDim n => Reifies s W => BVar s (Tensor '[n]) -> BVar s (Tensor '[n])
- logSoftMaxBatch :: KnownDim n => KnownDim b => Reifies s W => BVar s (Tensor '[b, n]) -> BVar s (Tensor '[b, n])
- logSoftMaxN :: forall s i d. Reifies s W => (i < Length d) ~ True => Dimensions d => Dim i -> BVar s (Tensor d) -> BVar s (Tensor d)
- _softPlus_updateOutput :: Tensor d -> Tensor d -> Double -> Double -> IO ()
- _softPlus_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Tensor d -> Double -> Double -> IO ()
- _softShrink_updateOutput :: Tensor d -> Tensor d -> Double -> IO ()
- _softShrink_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Double -> IO ()
- _tanh_updateOutput :: Tensor d -> Tensor d -> IO ()
- _tanh_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> IO ()
- _hardTanh_updateOutput :: Tensor d -> Tensor d -> Double -> Double -> Bool -> IO ()
- _hardTanh_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Double -> Double -> Bool -> IO ()
Documentation
abs backward-update (updates the layer and bias tensors)
_sqrt_updateOutput :: Tensor d -> Tensor d -> Double -> IO () Source #
sqrt forward pass (updates the output tensor)
_sqrt_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Tensor d -> IO () Source #
sqrt backward-update (updates the layer and bias tensors)
_square_updateOutput :: Tensor d -> Tensor d -> IO () Source #
square forward pass (updates the output tensor)
_square_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> IO () Source #
square backward-update (updates the layer and bias tensors)
_logSigmoid_updateOutput :: Tensor d -> Tensor d -> Tensor d -> IO () Source #
logSigmoid forward pass (updates the output tensor)
_logSigmoid_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Tensor d -> IO () Source #
logSigmoid backward-update (updates the layer and bias tensors)
_sigmoid_updateOutput :: Tensor d -> Tensor d -> IO () Source #
sigmoid forward pass (updates the output tensor)
_sigmoid_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> IO () Source #
sigmoid backward-update (updates the layer and bias tensors)
one dimensional version of softmaxN
Arguments
| :: KnownDim b | |
| => KnownDim n | |
| => Reifies s W | |
| => BVar s (Tensor '[b, n]) | input |
| -> BVar s (Tensor '[b, n]) | output |
softmaxN along the mini-batch dimension.
Arguments
| :: Reifies s W | |
| => (i < Length d) ~ True | |
| => Dimensions d | |
| => Dim i | dimension to softmax over |
| -> BVar s (Tensor d) | input |
| -> BVar s (Tensor d) | output |
run a threshold function againts two BVar variables
run a threshold function againts two BVar variables
Arguments
| :: KnownDim n | |
| => KnownDim b | |
| => Reifies s W | |
| => BVar s (Tensor '[b, n]) | input |
| -> BVar s (Tensor '[b, n]) | output |
run a threshold function againts two BVar variables
Arguments
| :: Reifies s W | |
| => (i < Length d) ~ True | |
| => Dimensions d | |
| => Dim i | dimension to logSoftMax over |
| -> BVar s (Tensor d) | input |
| -> BVar s (Tensor d) | output |
run a threshold function againts two BVar variables
_softPlus_updateOutput :: Tensor d -> Tensor d -> Double -> Double -> IO () Source #
softPlus forward pass (updates the output tensor)
_softPlus_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Tensor d -> Double -> Double -> IO () Source #
softPlus backward-update (updates the layer and bias tensors)
_softShrink_updateOutput :: Tensor d -> Tensor d -> Double -> IO () Source #
softShrink forward pass (updates the output tensor)
_softShrink_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Double -> IO () Source #
softShrink backward-update (updates the layer and bias tensors)
_tanh_updateOutput :: Tensor d -> Tensor d -> IO () Source #
tanh forward pass (updates the output tensor)
_tanh_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> IO () Source #
tanh backward-update (updates the layer and bias tensors)