hasktorch-0.2.1.2: Haskell bindings to libtorch, supporting both typed and untyped tensors.
Safe HaskellNone
LanguageHaskell2010

Torch.Typed.NN.DataParallel

Documentation

data ForwardConcurrentlyF Source #

Instances

Instances details
HasForward model input output => Apply' ForwardConcurrentlyF (model, input) (Concurrently output) Source # 
Instance details

Defined in Torch.Typed.NN.DataParallel

Methods

apply' :: ForwardConcurrentlyF -> (model, input) -> Concurrently output Source #

forwardConcurrently' :: forall {k} (devices' :: [(DeviceType, Nat)]) (device' :: k) (device :: (DeviceType, Nat)) model input output (models :: [Type]) (inputs :: [Type]) (outputs :: [Type]). ('Just device ~ GetDevice model, 'Just device ~ GetDevice input, HasScatter devices' device input inputs, HasReplicate devices' device model models, HZipWithM Concurrently ForwardConcurrentlyF models inputs outputs, HasGather device' devices' outputs output) => model -> input -> IO output Source #

forwardConcurrentlyStoch' :: forall {k} (devices' :: [(DeviceType, Nat)]) (device' :: k) (device :: (DeviceType, Nat)) model input output (models :: [Type]) (inputs :: [Type]) (outputs :: [Type]). ('Just device ~ GetDevice model, 'Just device ~ GetDevice input, HasScatter devices' device input inputs, HasReplicate devices' device model models, HZipWithM Concurrently ForwardConcurrentlyF models inputs outputs, HasGather device' devices' outputs output) => model -> input -> IO output Source #

forwardConcurrently :: forall {k} (models :: [k]) (inputs :: [k]) (outputs :: [k]). HZipWithM Concurrently ForwardConcurrentlyF models inputs outputs => HList models -> HList inputs -> Concurrently (HList outputs) Source #

forwardConcurrentlyStoch :: forall {k} (models :: [k]) (inputs :: [k]) (outputs :: [k]). HZipWithM Concurrently ForwardConcurrentlyF models inputs outputs => HList models -> HList inputs -> Concurrently (HList outputs) Source #

class HasGradConcurrently (device' :: k) (devices :: k1) (parameters :: [k2]) (losses :: [k3]) (gradients :: [k4]) | device' devices parameters losses -> gradients where Source #

Methods

gradConcurrently :: HList parameters -> HList losses -> Concurrently (HList gradients) Source #

Instances

Instances details
(HZipWithM Concurrently GradConcurrentlyF parameters losses gradients', ReduceGradients device' devices gradients' gradients) => HasGradConcurrently (device' :: (DeviceType, Nat)) (devices :: [(DeviceType, Nat)]) (parameters :: [k1]) (losses :: [k1]) (gradients :: [k2]) Source # 
Instance details

Defined in Torch.Typed.NN.DataParallel

Methods

gradConcurrently :: HList parameters -> HList losses -> Concurrently (HList gradients) Source #

data GradConcurrentlyF Source #

Constructors

GradConcurrentlyF 

Instances

Instances details
(HasGrad (HList parameters) (HList gradients), Castable (HList gradients) [ATenTensor]) => Apply' GradConcurrentlyF (HList parameters, Loss device dtype) (Concurrently (HList gradients)) Source # 
Instance details

Defined in Torch.Typed.NN.DataParallel

Methods

apply' :: GradConcurrentlyF -> (HList parameters, Loss device dtype) -> Concurrently (HList gradients) Source #

class ReduceGradients (device' :: (DeviceType, Nat)) (devices :: [(DeviceType, Nat)]) (xxs :: [k]) (ys :: [k1]) | device' devices xxs -> ys where Source #

Methods

reduceGradients :: HList xxs -> HList ys Source #

Instances

Instances details
HasToDevice device' device (HList xs) (HList ys) => ReduceGradients device' '[device] ('[HList xs] :: [Type]) (ys :: [k]) Source # 
Instance details

Defined in Torch.Typed.NN.DataParallel

Methods

reduceGradients :: HList '[HList xs] -> HList ys Source #

(HasToDevice device' device (HList xs) (HList ys), ReduceGradients device' devices xxs ys, HZipWith SumF ys ys ys, 1 <= ListLength xxs) => ReduceGradients device' (device ': devices) (HList xs ': xxs :: [Type]) (ys :: [k]) Source # 
Instance details

Defined in Torch.Typed.NN.DataParallel

Methods

reduceGradients :: HList (HList xs ': xxs) -> HList ys Source #

data SumF Source #

Constructors

SumF 

Instances

Instances details
Num y => Apply' SumF (y, y) y Source # 
Instance details

Defined in Torch.Typed.NN.DataParallel

Methods

apply' :: SumF -> (y, y) -> y Source #