{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# OPTIONS_GHC -fno-cse #-}
module Torch.Indef.Static.NN.Math where
import Data.Singletons.Prelude.Ord (type (<))
import Data.Singletons.Prelude.List
import Numeric.Dimensions hiding (Length)
import Numeric.Backprop
import System.IO.Unsafe
import Torch.Indef.Static.Tensor.Math.Reduce (sumall, maxall)
import Torch.Indef.Static.Tensor.Math.Pointwise ((^*^), (^-^))
import Torch.Indef.Static.Tensor.Math.Pairwise ((^-), (^/))
import Torch.Indef.Types
import Torch.Indef.Static.Tensor
import Torch.Indef.Static.NN.Backprop ()
import qualified Torch.Indef.Dynamic.NN as Dynamic
import qualified Torch.Indef.Static.Tensor.Math.Pointwise.Floating as Torch
abs_updateOutput :: Tensor d -> IO (Tensor d)
abs_updateOutput i =
let o = empty
in Dynamic._abs_updateOutput (asDynamic i) (asDynamic o)
>> pure o
abs_updateGradInput
:: (Product d ~ Product d')
=> Tensor d
-> Tensor d'
-> IO (Tensor d)
abs_updateGradInput i go =
let gi = empty
in Dynamic._abs_updateGradInput (asDynamic i) (asDynamic go) (asDynamic gi)
>> pure gi
_sqrt_updateOutput :: Tensor d -> Tensor d -> Double -> IO ()
_sqrt_updateOutput t0 t1 = Dynamic._sqrt_updateOutput (asDynamic t0) (asDynamic t1)
_sqrt_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Tensor d -> IO ()
_sqrt_updateGradInput t0 t1 t2 t3 = Dynamic._sqrt_updateGradInput (asDynamic t0) (asDynamic t1) (asDynamic t2) (asDynamic t3)
_square_updateOutput :: Tensor d -> Tensor d -> IO ()
_square_updateOutput t0 t1 = Dynamic._square_updateOutput (asDynamic t0) (asDynamic t1)
_square_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> IO ()
_square_updateGradInput t0 t1 t2 = Dynamic._square_updateGradInput (asDynamic t0) (asDynamic t1) (asDynamic t2)
_logSigmoid_updateOutput :: Tensor d -> Tensor d -> Tensor d -> IO ()
_logSigmoid_updateOutput t0 t1 t2 = Dynamic._logSigmoid_updateOutput (asDynamic t0) (asDynamic t1) (asDynamic t2)
_logSigmoid_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Tensor d -> IO ()
_logSigmoid_updateGradInput t0 t1 t2 t3 = Dynamic._logSigmoid_updateGradInput (asDynamic t0) (asDynamic t1) (asDynamic t2) (asDynamic t3)
_sigmoid_updateOutput :: Tensor d -> Tensor d -> IO ()
_sigmoid_updateOutput t0 t1 = Dynamic._sigmoid_updateOutput (asDynamic t0) (asDynamic t1)
_sigmoid_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> IO ()
_sigmoid_updateGradInput t0 t1 t2 = Dynamic._sigmoid_updateGradInput (asDynamic t0) (asDynamic t1) (asDynamic t2)
softmax
:: KnownDim n
=> Reifies s W
=> BVar s (Tensor '[n])
-> BVar s (Tensor '[n])
softmax = softmaxN (dim :: Dim 0)
softmaxBatch
:: KnownDim b
=> KnownDim n
=> Reifies s W
=> BVar s (Tensor '[b, n])
-> BVar s (Tensor '[b, n])
softmaxBatch = softmaxN (dim :: Dim 1)
softmaxN
:: forall s i d
. Reifies s W
=> (i < Length d) ~ True
=> Dimensions d
=> Dim i
-> BVar s (Tensor d)
-> BVar s (Tensor d)
softmaxN d = liftOp1 . op1 $ \inp ->
let
idim = fromIntegral (dimVal d)
out = updateOutput inp idim
in
(out, \gout -> updateGradInput inp gout out idim)
where
{-# NOINLINE updateOutput #-}
updateOutput :: Dimensions d => Tensor d -> Integer -> Tensor d
updateOutput inp i = unsafePerformIO $ let out = new in do
Dynamic._softMax_updateOutput
(asDynamic inp)
(asDynamic out)
i
>> pure out
{-# NOINLINE updateGradInput #-}
updateGradInput
:: Dimensions d
=> Tensor d
-> Tensor d
-> Tensor d
-> Integer
-> Tensor d
updateGradInput inp gout out d = unsafePerformIO $ let gin = new in do
Dynamic._softMax_updateGradInput
(asDynamic inp)
(asDynamic gout)
(asDynamic gin)
(asDynamic out)
d
>> pure gin
logSoftMax
:: KnownDim n
=> Reifies s W
=> BVar s (Tensor '[n])
-> BVar s (Tensor '[n])
logSoftMax = logSoftMaxN (dim :: Dim 0)
logSoftMaxBatch
:: KnownDim n
=> KnownDim b
=> Reifies s W
=> BVar s (Tensor '[b, n])
-> BVar s (Tensor '[b, n])
logSoftMaxBatch = logSoftMaxN (dim :: Dim 1)
logSoftMaxN
:: forall s i d
. Reifies s W
=> (i < Length d) ~ True
=> Dimensions d
=> Dim i
-> BVar s (Tensor d)
-> BVar s (Tensor d)
logSoftMaxN i = liftOp1 . op1 $ \inp ->
let out = updateOutput inp i
in (updateOutput inp i, \gout -> updateGradInput inp gout out i)
where
{-# NOINLINE updateOutput #-}
updateOutput :: Tensor d -> Dim i -> Tensor d
updateOutput inp i = unsafePerformIO $ let out = new in
Dynamic._logSoftMax_updateOutput (asDynamic inp) (asDynamic out) (fromIntegral $ dimVal i)
>> pure out
{-# NOINLINE updateGradInput #-}
updateGradInput
:: Tensor d
-> Tensor d
-> Tensor d
-> Dim i
-> Tensor d
updateGradInput inp gout out i = unsafePerformIO $ let gin = new in
Dynamic._logSoftMax_updateGradInput
(asDynamic inp)
(asDynamic gout)
(asDynamic gin)
(asDynamic out)
(fromIntegral $ dimVal i)
>> pure gin
_softPlus_updateOutput :: Tensor d -> Tensor d -> Double -> Double -> IO ()
_softPlus_updateOutput t0 t1 = Dynamic._softPlus_updateOutput (asDynamic t0) (asDynamic t1)
_softPlus_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Tensor d -> Double -> Double -> IO ()
_softPlus_updateGradInput t0 t1 t2 t3 = Dynamic._softPlus_updateGradInput (asDynamic t0) (asDynamic t1) (asDynamic t2) (asDynamic t3)
_softShrink_updateOutput :: Tensor d -> Tensor d -> Double -> IO ()
_softShrink_updateOutput t0 t1 = Dynamic._softShrink_updateOutput (asDynamic t0) (asDynamic t1)
_softShrink_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Double -> IO ()
_softShrink_updateGradInput t0 t1 t2 = Dynamic._softShrink_updateGradInput (asDynamic t0) (asDynamic t1) (asDynamic t2)
_tanh_updateOutput :: Tensor d -> Tensor d -> IO ()
_tanh_updateOutput t0 t1 = Dynamic._tanh_updateOutput (asDynamic t0) (asDynamic t1)
_tanh_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> IO ()
_tanh_updateGradInput t0 t1 t2 = Dynamic._tanh_updateGradInput (asDynamic t0) (asDynamic t1) (asDynamic t2)
_hardTanh_updateOutput :: Tensor d -> Tensor d -> Double -> Double -> Bool -> IO ()
_hardTanh_updateOutput t0 t1 = Dynamic._hardTanh_updateOutput (asDynamic t0) (asDynamic t1)
_hardTanh_updateGradInput :: Tensor d -> Tensor d -> Tensor d -> Double -> Double -> Bool -> IO ()
_hardTanh_updateGradInput t0 t1 t2 = Dynamic._hardTanh_updateGradInput (asDynamic t0) (asDynamic t1) (asDynamic t2)