hasktorch-zoo-0.0.1.0: Neural architectures in hasktorch

Safe HaskellNone
LanguageHaskell2010

Torch.Models.Vision.LeNet

Synopsis

Documentation

type Flattened ker = (16 * ker) * ker Source #

data LeNet ch ker Source #

Constructors

LeNet 

Fields

Instances
(KnownDim (Flattened ker), KnownDim ch, KnownDim ker) => Show (LeNet ch ker) Source # 
Instance details

Defined in Torch.Models.Vision.LeNet

Methods

showsPrec :: Int -> LeNet ch ker -> ShowS #

show :: LeNet ch ker -> String #

showList :: [LeNet ch ker] -> ShowS #

Generic (LeNet ch ker) Source # 
Instance details

Defined in Torch.Models.Vision.LeNet

Associated Types

type Rep (LeNet ch ker) :: Type -> Type #

Methods

from :: LeNet ch ker -> Rep (LeNet ch ker) x #

to :: Rep (LeNet ch ker) x -> LeNet ch ker #

(KnownDim (Flattened ker), KnownDim ch, KnownDim ker) => Backprop (LeNet ch ker) Source # 
Instance details

Defined in Torch.Models.Vision.LeNet

Methods

zero :: LeNet ch ker -> LeNet ch ker

add :: LeNet ch ker -> LeNet ch ker -> LeNet ch ker

one :: LeNet ch ker -> LeNet ch ker

type Rep (LeNet ch ker) Source # 
Instance details

Defined in Torch.Models.Vision.LeNet

type Rep (LeNet ch ker) = D1 (MetaData "LeNet" "Torch.Models.Vision.LeNet" "hasktorch-zoo-0.0.1.0-inplace" False) (C1 (MetaCons "LeNet" PrefixI True) ((S1 (MetaSel (Just "_conv1") NoSourceUnpackedness SourceStrict DecidedStrict) (Rec0 (Conv2d ch 6 ((,) ker ker))) :*: S1 (MetaSel (Just "_conv2") NoSourceUnpackedness SourceStrict DecidedStrict) (Rec0 (Conv2d 6 16 ((,) ker ker)))) :*: (S1 (MetaSel (Just "_fc1") NoSourceUnpackedness SourceStrict DecidedStrict) (Rec0 (Linear (Flattened ker) 120)) :*: (S1 (MetaSel (Just "_fc2") NoSourceUnpackedness SourceStrict DecidedStrict) (Rec0 (Linear 120 84)) :*: S1 (MetaSel (Just "_fc3") NoSourceUnpackedness SourceStrict DecidedStrict) (Rec0 (Linear 84 10))))))

conv1 :: Lens' (LeNet ch ker) (Conv2d ch 6 '(ker, ker)) Source #

conv2 :: Lens' (LeNet ch ker) (Conv2d 6 16 '(ker, ker)) Source #

fc1 :: forall ch ker. Lens' (LeNet ch ker) (Linear (Flattened ker) 120) Source #

fc2 :: Lens' (LeNet ch ker) (Linear 120 84) Source #

fc3 :: Lens' (LeNet ch ker) (Linear 84 10) Source #

newLeNet :: All KnownDim '[ch, ker, Flattened ker, ker * ker] => Generator -> IO (LeNet ch ker) Source #

update :: (KnownDim ch, KnownDim kW, KnownDim ((16 * kW) * kW)) => LeNet ch kW -> HsReal -> LeNet ch kW -> LeNet ch kW Source #

update a LeNet network

update_ :: (KnownDim i, KnownDim kW, KnownDim ((16 * kW) * kW)) => LeNet i kW -> HsReal -> LeNet i kW -> IO () Source #

update a LeNet network inplace

lenet :: (If (Case_6989586621679794375 (Mod (((h1 - ker) + 1) - 2) 2) 0 (CmpNat (Mod (((h1 - ker) + 1) - 2) 2) 0)) ((2 + Div (((h1 - ker) + 1) - 2) 2) ~ h2) ((1 + Div (((h1 - ker) + 1) - 2) 2) ~ h2), If (Case_6989586621679794375 (Mod (((w1 - ker) + 1) - 2) 2) 0 (CmpNat (Mod (((w1 - ker) + 1) - 2) 2) 0)) ((2 + Div (((w1 - ker) + 1) - 2) 2) ~ w2) ((1 + Div (((w1 - ker) + 1) - 2) 2) ~ w2), If (Case_6989586621679794375 (Mod (((h2 - ker) + 1) - 2) 2) 0 (CmpNat (Mod (((h2 - ker) + 1) - 2) 2) 0)) ((2 + Div (((h2 - ker) + 1) - 2) 2) ~ moh) ((1 + Div (((h2 - ker) + 1) - 2) 2) ~ moh), If (Case_6989586621679794375 (Mod (((w2 - ker) + 1) - 2) 2) 0 (CmpNat (Mod (((w2 - ker) + 1) - 2) 2) 0)) ((2 + Div (((w2 - ker) + 1) - 2) 2) ~ mow) ((1 + Div (((w2 - ker) + 1) - 2) 2) ~ mow), Reifies s W, KnownDim ((16 * ker) * ker), KnownDim ch, KnownDim ker, KnownDim ((h2 - ker) + 1), KnownDim (((h2 - ker) + 1) * ((w2 - ker) + 1)), KnownDim ((6 * ker) * ker), KnownDim ((ker * ker) * 6), KnownDim w2, KnownDim h2, KnownDim ((w1 - ker) + 1), KnownDim w1, KnownDim ((ker * ker) * ch), KnownDim ((ch * ker) * ker), KnownDim (((h1 - ker) + 1) * ((w1 - ker) + 1)), KnownDim h1, KnownDim ((h1 - ker) + 1), KnownDim ((w2 - ker) + 1), KnownDim moh, KnownDim mow, KnownDim ((16 * moh) * mow), Case_6989586621679794339 h1 ker (CmpNat h1 ker) ~# False, Case_6989586621679794339 h2 ker (CmpNat h2 ker) ~# False, Case_6989586621679794339 w1 ker (CmpNat w1 ker) ~# False, Case_6989586621679794375 ((w1 - ker) + 1) 0 (CmpNat ((w1 - ker) + 1) 0) ~# True, Case_6989586621679794375 ((h2 - ker) + 1) 0 (CmpNat ((h2 - ker) + 1) 0) ~# True, Case_6989586621679794375 ker 0 (CmpNat ker 0) ~# True, Case_6989586621679794375 ((w2 - ker) + 1) 0 (CmpNat ((w2 - ker) + 1) 0) ~# True, Case_6989586621679794375 ((h1 - ker) + 1) 0 (CmpNat ((h1 - ker) + 1) 0) ~# True, Case_6989586621679794339 w2 ker (CmpNat w2 ker) ~# False, ((16 * moh) * mow) ~# ((16 * ker) * ker)) => Double -> BVar s (LeNet ch ker) -> BVar s (Tensor (ch ': (h1 ': (w1 ': ([] :: [Nat]))))) -> BVar s (Tensor (10 ': ([] :: [Nat]))) Source #

lenetLayer Source #

Arguments

:: Reifies s W 
=> All KnownDim '[inp, out, ker, (ker * ker) * inp] 
=> pad ~ 0 
=> step ~ 1 
=> SpatialConvolutionC inp h w ker ker step step pad pad oh ow 
=> SpatialDilationC oh ow 2 2 2 2 pad pad mow moh 1 1 True 
=> Double

learning rate for convolution layer

-> BVar s (Conv2d inp out '(ker, ker))

convolutional layer

-> BVar s (Tensor '[inp, h, w])

input

-> BVar s (Tensor '[out, moh, mow])

output

lenetBatch :: (If (Case_6989586621679794375 (Mod (((h1 - ker) + 1) - 2) 2) 0 (CmpNat (Mod (((h1 - ker) + 1) - 2) 2) 0)) ((2 + Div (((h1 - ker) + 1) - 2) 2) ~ h2) ((1 + Div (((h1 - ker) + 1) - 2) 2) ~ h2), If (Case_6989586621679794375 (Mod (((w1 - ker) + 1) - 2) 2) 0 (CmpNat (Mod (((w1 - ker) + 1) - 2) 2) 0)) ((2 + Div (((w1 - ker) + 1) - 2) 2) ~ w2) ((1 + Div (((w1 - ker) + 1) - 2) 2) ~ w2), If (Case_6989586621679794375 (Mod (((h2 - ker) + 1) - 2) 2) 0 (CmpNat (Mod (((h2 - ker) + 1) - 2) 2) 0)) ((2 + Div (((h2 - ker) + 1) - 2) 2) ~ moh) ((1 + Div (((h2 - ker) + 1) - 2) 2) ~ moh), If (Case_6989586621679794375 (Mod (((w2 - ker) + 1) - 2) 2) 0 (CmpNat (Mod (((w2 - ker) + 1) - 2) 2) 0)) ((2 + Div (((w2 - ker) + 1) - 2) 2) ~ mow) ((1 + Div (((w2 - ker) + 1) - 2) 2) ~ mow), Reifies s W, KnownDim ((16 * ker) * ker), KnownDim ch, KnownDim ker, KnownDim ((h2 - ker) + 1), KnownDim (((h2 - ker) + 1) * ((w2 - ker) + 1)), KnownDim ((6 * ker) * ker), KnownDim ((ker * ker) * 6), KnownDim w2, KnownDim h2, KnownDim ((w1 - ker) + 1), KnownDim w1, KnownDim b, KnownDim ((ker * ker) * ch), KnownDim ((ch * ker) * ker), KnownDim (((h1 - ker) + 1) * ((w1 - ker) + 1)), KnownDim h1, KnownDim ((h1 - ker) + 1), KnownDim ((w2 - ker) + 1), KnownDim moh, KnownDim mow, KnownDim ((16 * moh) * mow), Case_6989586621679794339 h1 ker (CmpNat h1 ker) ~# False, Case_6989586621679794339 w1 ker (CmpNat w1 ker) ~# False, Case_6989586621679794339 h2 ker (CmpNat h2 ker) ~# False, (((b * 16) * moh) * mow) ~# (b * ((16 * moh) * mow)), Case_6989586621679794375 ((h2 - ker) + 1) 0 (CmpNat ((h2 - ker) + 1) 0) ~# True, Case_6989586621679794375 ((w1 - ker) + 1) 0 (CmpNat ((w1 - ker) + 1) 0) ~# True, Case_6989586621679794375 ((h1 - ker) + 1) 0 (CmpNat ((h1 - ker) + 1) 0) ~# True, Case_6989586621679794375 ((w2 - ker) + 1) 0 (CmpNat ((w2 - ker) + 1) 0) ~# True, Case_6989586621679794375 ker 0 (CmpNat ker 0) ~# True, Case_6989586621679794339 w2 ker (CmpNat w2 ker) ~# False, ((16 * moh) * mow) ~# ((16 * ker) * ker)) => Double -> BVar s (LeNet ch ker) -> BVar s (Tensor (b ': (ch ': (h1 ': (w1 ': ([] :: [Nat])))))) -> BVar s (Tensor (b ': (10 ': ([] :: [Nat])))) Source #

lenetLayerBatch Source #

Arguments

:: Reifies s W 
=> All KnownDim '[batch, inp, out, ker, (ker * ker) * inp] 
=> pad ~ 0 
=> step ~ 1 
=> SpatialConvolutionC inp h w ker ker step step pad pad oh ow 
=> SpatialDilationC oh ow 2 2 2 2 pad pad mow moh 1 1 True 
=> Double

learning rate for convolution layer

-> BVar s (Conv2d inp out '(ker, ker))

convolutional layer

-> BVar s (Tensor '[batch, inp, h, w])

input

-> BVar s (Tensor '[batch, out, moh, mow])

output