-- Hoogle documentation, generated by Haddock
-- See Hoogle, http://www.haskell.org/hoogle/
-- | Create valid deep neural network architectures
--
-- TensorSafe provides a very simple API to create deep neural networks
-- structures which are validated using Dependent Types. Given a list of
-- Layers and an initial Shape, TensorSafe is able to check and
-- corroborate the structure of the network. Also, it's possible to
-- extract the definition and compile it to a target language like Python
-- and JavaScript.
@package tensor-safe
@version 0.1.0.1
-- | This module provides simple IO functions to operate with command line
-- programs.
module TensorSafe.Commands.Utils
-- | Transforms an InterpreterError into a string.
errorString :: InterpreterError -> String
-- | Lifts putStrLn to the Interpreter.
say :: String -> Interpreter ()
-- | This module provides compilation and interpretation functions using
-- the "hint" library.
module TensorSafe.Commands.Compile
-- | Compilation interface for the compile command. Given a path,
-- module name.
compile :: String -> String -> String -> Maybe FilePath -> IO ()
-- | This module provides checking and interpretation functions using the
-- "hint" library.
module TensorSafe.Commands.Check
-- | Checks if the file at the specified path compiles successfully.
check :: String -> IO ()
-- | This module describes the expression structure of a INetwork instance.
-- -- The INetwork can be structured into a Data structure called
-- CNetwork, with which later -- to compilation external languages can be
-- done.
module TensorSafe.Compile.Expr
-- | Auxiliary data representation of Layers IMPORTANT: If you add new
-- Layers definitions to Layers, you should add the corresponding
-- data structure here for the same layer.
data DLayer
DActivation :: DLayer
DAdd :: DLayer
DBatchNormalization :: DLayer
DConv2D :: DLayer
DDense :: DLayer
DDropout :: DLayer
DFlatten :: DLayer
DGlobalAvgPooling2D :: DLayer
DInput :: DLayer
DLSTM :: DLayer
DMaxPooling :: DLayer
DRelu :: DLayer
DZeroPadding2D :: DLayer
-- | Defines the
data CNetwork
CNSequence :: CNetwork -> CNetwork
CNAdd :: CNetwork -> CNetwork -> CNetwork
CNCons :: CNetwork -> CNetwork -> CNetwork
CNLayer :: DLayer -> Map String String -> CNetwork
CNReturn :: CNetwork
CNNil :: CNetwork
-- | Support for JavaScript compilation
data JavaScript
JavaScript :: JavaScript
-- | Support for Python compilation
data Python
Python :: Python
-- | Class that defines which languages are supported for CNetworks
-- generation to text
class Generator l
-- | Adds supports for a language. Generates a CNetwork to Text
generate :: Generator l => l -> CNetwork -> Text
-- | Similar to generate, but also adds necessary header and module
-- lines of text so as to have the CNetwork compiled at a separate file.
generateFile :: Generator l => l -> CNetwork -> Text
instance GHC.Show.Show TensorSafe.Compile.Expr.Python
instance GHC.Show.Show TensorSafe.Compile.Expr.JavaScript
instance GHC.Show.Show TensorSafe.Compile.Expr.CNetwork
instance GHC.Show.Show TensorSafe.Compile.Expr.DLayer
instance TensorSafe.Compile.Expr.Generator TensorSafe.Compile.Expr.JavaScript
instance TensorSafe.Compile.Expr.Generator TensorSafe.Compile.Expr.Python
instance TensorSafe.Compile.Expr.LayerGenerator TensorSafe.Compile.Expr.JavaScript
instance TensorSafe.Compile.Expr.LayerGenerator TensorSafe.Compile.Expr.Python
-- | This module adds some meaningfull type operations that are of use
-- throughout all the project.
module TensorSafe.Core
-- | Multiplies all numbers on a list of natural numbers
type family ShapeProduct (s :: [Nat]) :: Nat
-- | Compares two types in kinds level
type family TypeEquals (s1 :: Type) (s2 :: Type) :: Bool
-- | Compares two types in kinds level and raises error if they don't match
type family TypeEquals' s1 s2 :: Type
-- | Wrapper for a Nat value
data R (n :: Nat)
[R] :: KnownNat n => R n
-- | Wrapper for a tuple of 2 Nat values
data L (m :: Nat) (n :: Nat)
[L] :: (KnownNat m, KnownNat n) => L m n
instance (GHC.TypeNats.KnownNat m, GHC.TypeNats.KnownNat n) => GHC.Show.Show (TensorSafe.Core.L m n)
instance GHC.TypeNats.KnownNat n => GHC.Show.Show (TensorSafe.Core.R n)
-- | This module defines the Layer class from which all Layers should have
-- instances of.
module TensorSafe.Layer
-- | Auxiliary type for Input Shape parameter
type InputShape = Maybe String
-- | Defines that a type is a Layer Each layer can be compilated into a
-- specific CNetwork expression which can later be used to generate code
-- to a specific backend.
class Layer x
-- | Given the layer and a optional inputShape generates a CNetwork
-- structure
compile :: Layer x => x -> InputShape -> CNetwork
-- | The layer type
layer :: Layer x => x
-- | This module declares the Add layer data type.
module TensorSafe.Layers.Add
-- | Adds the dimensions of the shapes to a list of values with shape D1
data Add :: ls1 -> ls2 -> Type
instance forall ls1 (a :: ls1) ls2 (b :: ls2). GHC.Show.Show (TensorSafe.Layers.Add.Add a b)
instance forall ls4 ls5 (ls6 :: ls5) (ls7 :: ls4). TensorSafe.Layer.Layer (TensorSafe.Layers.Add.Add ls6 ls7)
-- | This module declares the BatchNormalization layer data type.
module TensorSafe.Layers.BatchNormalization
-- | A classic BatchNormalization layer with axis, momentum and epsilon
-- parameters
data BatchNormalization :: Nat -> Nat -> Nat -> Type
[BatchNormalization] :: BatchNormalization axis momentum epsilon
instance GHC.Show.Show (TensorSafe.Layers.BatchNormalization.BatchNormalization a b c)
instance (GHC.TypeNats.KnownNat axis, GHC.TypeNats.KnownNat momentum, GHC.TypeNats.KnownNat epsilon) => TensorSafe.Layer.Layer (TensorSafe.Layers.BatchNormalization.BatchNormalization axis momentum epsilon)
-- | This module declares the 2D convolutional layer data type.
module TensorSafe.Layers.Conv2D
-- | A 2D Convolutional layer
data Conv2D :: Nat -> Nat -> Nat -> Nat -> Nat -> Nat -> Type
[Conv2D] :: Conv2D channels filters kernelRows kernelColumns strideRows strideColumns
instance GHC.Show.Show (TensorSafe.Layers.Conv2D.Conv2D a b c d e f)
instance (GHC.TypeNats.KnownNat channels, GHC.TypeNats.KnownNat filters, GHC.TypeNats.KnownNat kernelRows, GHC.TypeNats.KnownNat kernelColumns, GHC.TypeNats.KnownNat strideRows, GHC.TypeNats.KnownNat strideColumns) => TensorSafe.Layer.Layer (TensorSafe.Layers.Conv2D.Conv2D channels filters kernelRows kernelColumns strideRows strideColumns)
-- | This module declares the Dense, a.k.a. FullyConnected, layer data
-- type.
module TensorSafe.Layers.Dense
-- | A classic Dense, or FullyConnected, layer with input and output
-- parameters.
data Dense :: Nat -> Nat -> Type
[Dense] :: Dense input output
instance GHC.Show.Show (TensorSafe.Layers.Dense.Dense a b)
instance (GHC.TypeNats.KnownNat input, GHC.TypeNats.KnownNat output) => TensorSafe.Layer.Layer (TensorSafe.Layers.Dense.Dense input output)
-- | This module declares the Dropout layer data type.
module TensorSafe.Layers.Dropout
-- | A Dropout layer with rate and seed arguments
data Dropout :: Nat -> Nat -> Type
instance GHC.Show.Show (TensorSafe.Layers.Dropout.Dropout a b)
instance (GHC.TypeNats.KnownNat rate, GHC.TypeNats.KnownNat seed) => TensorSafe.Layer.Layer (TensorSafe.Layers.Dropout.Dropout rate seed)
-- | This module declares the Flatten layer data type.
module TensorSafe.Layers.Flatten
-- | Flattens the dimensions of the shapes to a list of values with shape
-- D1
data Flatten
instance GHC.Show.Show TensorSafe.Layers.Flatten.Flatten
instance TensorSafe.Layer.Layer TensorSafe.Layers.Flatten.Flatten
-- | This module declares the GlobalAvgPooling2D layer data type.
module TensorSafe.Layers.GlobalAvgPooling2D
-- | A GlobalAvgPooling2D function
data GlobalAvgPooling2D
instance GHC.Show.Show TensorSafe.Layers.GlobalAvgPooling2D.GlobalAvgPooling2D
instance TensorSafe.Layer.Layer TensorSafe.Layers.GlobalAvgPooling2D.GlobalAvgPooling2D
-- | This module declares the Input layer data type.
module TensorSafe.Layers.Input
-- | Inputs the dimensions of the shapes to a list of values with shape D1
data Input
instance GHC.Show.Show TensorSafe.Layers.Input.Input
instance TensorSafe.Layer.Layer TensorSafe.Layers.Input.Input
-- | This module declares the classing LSTM layer data type.
module TensorSafe.Layers.LSTM
-- | A LSTM layer with a number of units and a option to return the
-- original sequences.
data LSTM :: Nat -> Bool -> Type
[LSTM] :: LSTM units returnSequences
instance GHC.Show.Show (TensorSafe.Layers.LSTM.LSTM a b)
instance GHC.TypeNats.KnownNat units => TensorSafe.Layer.Layer (TensorSafe.Layers.LSTM.LSTM units b)
-- | This module declares the 2D MaxPooling layer data type.
module TensorSafe.Layers.MaxPooling
-- | A 2D MaxPooling pooling that works for D2 and D3 shapes
data MaxPooling :: Nat -> Nat -> Nat -> Nat -> Type
[MaxPooling] :: MaxPooling kernelRows kernelColumns strideRows strideColumns
instance GHC.Show.Show (TensorSafe.Layers.MaxPooling.MaxPooling a b c d)
instance (GHC.TypeNats.KnownNat kernelRows, GHC.TypeNats.KnownNat kernelColumns, GHC.TypeNats.KnownNat strideRows, GHC.TypeNats.KnownNat strideColumns) => TensorSafe.Layer.Layer (TensorSafe.Layers.MaxPooling.MaxPooling kernelRows kernelColumns strideRows strideColumns)
-- | This module declares the ReLu activation layer data type.
module TensorSafe.Layers.Relu
-- | A ReLu activation function
data Relu
instance GHC.Show.Show TensorSafe.Layers.Relu.Relu
instance TensorSafe.Layer.Layer TensorSafe.Layers.Relu.Relu
-- | This module declares the Sigmoid activation layer data type.
module TensorSafe.Layers.Sigmoid
-- | A Sigmoid activation function
data Sigmoid
instance GHC.Show.Show TensorSafe.Layers.Sigmoid.Sigmoid
instance TensorSafe.Layer.Layer TensorSafe.Layers.Sigmoid.Sigmoid
-- | This module declares the ZeroPadding2D layer data type.
module TensorSafe.Layers.ZeroPadding2D
-- | A ZeroPadding2D layer with padding_rows and padding_cols arguments
data ZeroPadding2D :: Nat -> Nat -> Type
instance GHC.Show.Show (TensorSafe.Layers.ZeroPadding2D.ZeroPadding2D a b)
instance (GHC.TypeNats.KnownNat padding_rows, GHC.TypeNats.KnownNat padding_cols) => TensorSafe.Layer.Layer (TensorSafe.Layers.ZeroPadding2D.ZeroPadding2D padding_rows padding_cols)
-- | This module exposes all Layers declared at TensorSafe.Layers.
module TensorSafe.Layers
-- | Adds the dimensions of the shapes to a list of values with shape D1
data Add :: ls1 -> ls2 -> Type
-- | A classic BatchNormalization layer with axis, momentum and epsilon
-- parameters
data BatchNormalization :: Nat -> Nat -> Nat -> Type
-- | A 2D Convolutional layer
data Conv2D :: Nat -> Nat -> Nat -> Nat -> Nat -> Nat -> Type
-- | A classic Dense, or FullyConnected, layer with input and output
-- parameters.
data Dense :: Nat -> Nat -> Type
-- | A Dropout layer with rate and seed arguments
data Dropout :: Nat -> Nat -> Type
-- | Flattens the dimensions of the shapes to a list of values with shape
-- D1
data Flatten
-- | A GlobalAvgPooling2D function
data GlobalAvgPooling2D
-- | Inputs the dimensions of the shapes to a list of values with shape D1
data Input
-- | A LSTM layer with a number of units and a option to return the
-- original sequences.
data LSTM :: Nat -> Bool -> Type
-- | A 2D MaxPooling pooling that works for D2 and D3 shapes
data MaxPooling :: Nat -> Nat -> Nat -> Nat -> Type
-- | A ReLu activation function
data Relu
-- | A Sigmoid activation function
data Sigmoid
-- | A ZeroPadding2D layer with padding_rows and padding_cols arguments
data ZeroPadding2D :: Nat -> Nat -> Type
-- | This module declares all Shape related functions and data structures,
-- as well as all singleton -- instances for the Shape data type. This
-- module was highly influenciated by Grenade, a Haskell -- library for
-- deep learning with dependent types. See:
-- https://github.com/HuwCampbell/grenade
module TensorSafe.Shape
-- | The current shapes we accept. at the moment this is just one, two, and
-- three dimensional Vectors/Matricies.
--
-- These are only used with DataKinds, as Kind Shape, with Types
-- 'D1, 'D2, 'D3.
data Shape
-- | One dimensional vector
D1 :: Nat -> Shape
-- | Two dimensional matrix. Row, Column.
D2 :: Nat -> Nat -> Shape
-- | Three dimensional matrix. Row, Column, Channels.
D3 :: Nat -> Nat -> Nat -> Shape
-- | Concrete data structures for a Shape.
--
-- All shapes are held in contiguous memory. 3D is held in a matrix
-- (usually row oriented) which has height depth * rows.
data S (n :: Shape)
[S1D] :: KnownNat len => R len -> S ( 'D1 len)
[S2D] :: (KnownNat rows, KnownNat columns) => L rows columns -> S ( 'D2 rows columns)
[S3D] :: (KnownNat rows, KnownNat columns, KnownNat depth, KnownNat (rows * depth)) => L (rows * depth) columns -> S ( 'D3 rows columns depth)
-- | Compares two Shapes at kinds level and returns a Bool kind
type family ShapeEquals (sIn :: Shape) (sOut :: Shape) :: Bool
-- | Same as ShapeEquals, which compares two Shapes at kinds level, but
-- raises a TypeError exception if the Shapes are not the equal.
type family ShapeEquals' (sIn :: Shape) (sOut :: Shape) :: Bool
instance GHC.Show.Show (TensorSafe.Shape.S n)
instance GHC.TypeNats.KnownNat a => Data.Singletons.Internal.SingI ('TensorSafe.Shape.D1 a)
instance (GHC.TypeNats.KnownNat a, GHC.TypeNats.KnownNat b) => Data.Singletons.Internal.SingI ('TensorSafe.Shape.D2 a b)
instance (GHC.TypeNats.KnownNat a, GHC.TypeNats.KnownNat b, GHC.TypeNats.KnownNat c) => Data.Singletons.Internal.SingI ('TensorSafe.Shape.D3 a b c)
-- | This module is the core of TensorSafe. It defines all Network data
-- structures -- and types functions that respresent Layers modifications
-- of shapes, as well as -- all needed information for compiling the
-- Network structures to CNetworks for later code -- generation.
module TensorSafe.Network
-- | A network that defines a specific sequence of layers
data Network :: [Type] -> Type
[NNil] :: Network '[]
[:~~] :: Layer x => !x -> !Network xs -> Network (x : xs)
infixr 5 :~~
-- | A network that defines a specific sequence of layers with the
-- corresponding shape transformation along the network. It's an Instance
-- of a Network: given a Network and a initial Shape, this type structure
-- can be generated automatically using the type functions defined in
-- this module, like Out and MkINetwork.
data INetwork :: [Type] -> [Shape] -> Type
[INNil] :: SingI i => INetwork '[] '[i]
[:~>] :: (SingI i, SingI h, Layer x) => !x -> !INetwork xs (h : hs) -> INetwork (x : xs) (i : (h : hs))
infixr 5 :~>
-- | Returns the result of applying all the layers transformation to a
-- specific shape. Given a list of layers, this returns the expected
-- output for the computation of each layer starting with the first layer
-- transforming the Shape s. For example, if the initial Shape is
-- [28, 28] and the layers are [Relu, Flatten], the result will be [784].
type family ComputeOut (layers :: [Type]) (s :: Shape) :: Shape
-- | Returns a list of shapes describing ALL the transformations applied to
-- a specific shape. Given a list of layers return a type with all the
-- Shapes from the initial Shape until the last one. In theory, the last
-- Shape should be the same than the ComputeOut function applied to this
-- same parameters.
type family ComposeOut' (layers :: [Type]) (s :: Shape) :: [Shape]
-- | Same than ComposeOut' but the Shape list includes the initial Shape
type family ComposeOut (layers :: [Type]) (s :: Shape) :: [Shape]
-- | Compares the layers shape computation and the expected output
type family ValidateOutput (layers :: [Type]) (sIn :: Shape) (sOut :: Shape) :: Bool
-- | Creates an INetwork type, and by "unconstrained" I mean that I don't
-- check for an expected output
type family MkINetworkUnconstrained (layers :: [Type]) (s :: Shape) :: Type
-- | If the second type argument is 'True, then it returns the type t,
-- otherwise it returns a default type. Note that for this example,
-- ValidateOutput would raise an exception if the expected output and the
-- actual one do not match.
type family MaybeType (t :: Type) (b :: Bool) :: Type
-- | Creates an INetwork type validating the the expected output and the
-- computed one match.
type family MkINetwork (layers :: [Type]) (sIn :: Shape) (sOut :: Shape) :: Type
type family MaybeShape (s :: Shape) (b :: Bool) :: Shape
type family Add' (layers :: [Type]) (layers2 :: [Type]) (shape :: Shape)
-- | Defines the expected output of a layer This type function should be
-- instanciated for each of the Layers defined.
type family Out (l :: Type) (s :: Shape) :: Shape
-- | Instanciates a Network after defining a type definition, using
-- MkINetworkUnconstrained or MkINetwork, for example. After defining a
-- variable with INetwork type, you can instanciate that variable like
-- this: ``` myNet :: MNIST myNet = mkINetwork ```
class ValidNetwork (xs :: [Type]) (ss :: [Shape])
-- | Makes a valid instance of INetwork
mkINetwork :: ValidNetwork xs ss => INetwork xs ss
-- | Compilation: Gets the initial shape using Singleton instances. Since
-- this is the function we run for transforming an INetwork to CNetwork,
-- the nested argument of toCNetwork' is set to False.
toCNetwork :: forall i x xs ss. (SingI i, Layer x, ValidNetwork (x : xs) (i : ss)) => INetwork (x : xs) (i : ss) -> CNetwork
-- | Helper function for toCNetwork
toCNetwork' :: INetwork xs ss -> Bool -> Maybe String -> CNetwork
instance TensorSafe.Network.ValidNetwork ls ss => TensorSafe.Layer.Layer (TensorSafe.Network.INetwork ls ss)
instance Data.Singletons.Internal.SingI i => TensorSafe.Network.ValidNetwork '[] '[i]
instance (Data.Singletons.Internal.SingI i, Data.Singletons.Internal.SingI o, TensorSafe.Layer.Layer x, TensorSafe.Network.ValidNetwork xs (o : rs), TensorSafe.Network.Out x i Data.Type.Equality.~ o) => TensorSafe.Network.ValidNetwork (x : xs) (i : o : rs)
instance GHC.Show.Show (TensorSafe.Network.INetwork '[] '[i])
instance (GHC.Show.Show x, GHC.Show.Show (TensorSafe.Network.INetwork xs rs)) => GHC.Show.Show (TensorSafe.Network.INetwork (x : xs) (i : rs))
instance GHC.Show.Show (TensorSafe.Network.Network '[])
instance (GHC.Show.Show x, GHC.Show.Show (TensorSafe.Network.Network xs)) => GHC.Show.Show (TensorSafe.Network.Network (x : xs))
-- | This module declares what is visible to use TensorSafe as an API.
module TensorSafe
-- | Support for JavaScript compilation
data JavaScript
JavaScript :: JavaScript
-- | Support for Python compilation
data Python
Python :: Python
-- | Adds supports for a language. Generates a CNetwork to Text
generate :: Generator l => l -> CNetwork -> Text
-- | Similar to generate, but also adds necessary header and module
-- lines of text so as to have the CNetwork compiled at a separate file.
generateFile :: Generator l => l -> CNetwork -> Text
-- | A network that defines a specific sequence of layers with the
-- corresponding shape transformation along the network. It's an Instance
-- of a Network: given a Network and a initial Shape, this type structure
-- can be generated automatically using the type functions defined in
-- this module, like Out and MkINetwork.
data INetwork :: [Type] -> [Shape] -> Type
-- | Creates an INetwork type validating the the expected output and the
-- computed one match.
type family MkINetwork (layers :: [Type]) (sIn :: Shape) (sOut :: Shape) :: Type
-- | Makes a valid instance of INetwork
mkINetwork :: ValidNetwork xs ss => INetwork xs ss
-- | Compilation: Gets the initial shape using Singleton instances. Since
-- this is the function we run for transforming an INetwork to CNetwork,
-- the nested argument of toCNetwork' is set to False.
toCNetwork :: forall i x xs ss. (SingI i, Layer x, ValidNetwork (x : xs) (i : ss)) => INetwork (x : xs) (i : ss) -> CNetwork
-- | This module implements a very simple example of a deep neural network.
module TensorSafe.Examples.SimpleExample
-- | Simple network example
myNet :: MyNet
-- | Simple network example
myNet2 :: MyNet2
-- | Simple network example
myNet3 :: MkINetwork '[MaxPooling 2 2 2 2, Flatten, Dense 196 10, Sigmoid, Relu] ( 'D2 28 28) ( 'D1 10)
-- | Simple LSTM network example
lstm :: MyLSTM
-- | This module implements the ResNet50 model using Convs with
-- BatchNormalization. This implementation is based on the Keras
-- application implementation:
-- https://github.com/keras-team/keras-applications/blob/master/keras_applications/resnet50.py
module TensorSafe.Examples.ResNet50Example
type IdentityBlock channels kernel_size filters1 filters2 filters3 = '[Conv2D channels filters1 1 1 1 1, BatchNormalization 3 99 1, Relu, Conv2D filters1 filters2 kernel_size kernel_size 1 1, BatchNormalization 3 99 1, ZeroPadding2D 1 1, Relu, Conv2D filters2 filters3 1 1 1 1, BatchNormalization 3 99 1]
type Shortcut channels stride_size filters3 = '[Conv2D channels filters3 1 1 stride_size stride_size, BatchNormalization 3 99 1]
type ConvBlock channels kernel_size stride_size filters1 filters2 filters3 = '[Conv2D channels filters1 1 1 stride_size stride_size, BatchNormalization 3 99 1, Relu, Conv2D filters1 filters2 kernel_size kernel_size 1 1, ZeroPadding2D 1 1, BatchNormalization 3 99 1, Relu, Conv2D filters2 filters3 1 1 1 1, BatchNormalization 3 99 1]
type ResNet50 img_size channels = MkINetwork '[Input, ZeroPadding2D 3 3, Conv2D channels 64 7 7 2 2, BatchNormalization 3 99 1, Relu, ZeroPadding2D 1 1, MaxPooling 3 3 2 2, Add (ConvBlock 64 3 1 64 64 256) (Shortcut 64 1 256), Relu, Add (IdentityBlock 256 3 64 64 256) '[Input], Relu, Add (IdentityBlock 256 3 64 64 256) '[Input], Relu, Add (ConvBlock 256 3 1 128 128 512) (Shortcut 256 1 512), Relu, Add (IdentityBlock 512 3 128 128 512) '[Input], Relu, Add (IdentityBlock 512 3 128 128 512) '[Input], Relu, Add (IdentityBlock 512 3 128 128 512) '[Input], Relu, Add (ConvBlock 512 3 1 256 256 1024) (Shortcut 512 1 1024), Relu, Add (IdentityBlock 1024 3 256 256 1024) '[Input], Relu, Add (IdentityBlock 1024 3 256 256 1024) '[Input], Relu, Add (IdentityBlock 1024 3 256 256 1024) '[Input], Relu, Add (IdentityBlock 1024 3 256 256 1024) '[Input], Relu, Add (IdentityBlock 1024 3 256 256 1024) '[Input], Relu, Add (ConvBlock 1024 3 1 512 512 2048) (Shortcut 1024 1 2048), Relu, Add (IdentityBlock 2048 3 512 512 2048) '[Input], Relu, Add (IdentityBlock 2048 3 512 512 2048) '[Input], Relu, GlobalAvgPooling2D, Dense 2048 1000] ( 'D3 img_size img_size channels) ( 'D1 1000)
resnet50 :: ResNet50 224 1
-- | This module implements the MNIST examples using Convs and Dense
-- layers.
module TensorSafe.Examples.MnistExample
-- | MNIST implementation using Convolutional layers
mnist :: MNIST
-- | MNIST implementation using just Dense layers
mnistDense :: MNISTDense
-- | This module wraps all examples in simple fuctions.
module TensorSafe.Examples.Examples
-- | Puts MNIST examples results to stdout
mnistExample :: IO ()
-- | Puts MNIST Dense examples results to stdout
mnistExampleDense :: IO ()
-- | Puts simple examples results to stdout
simpleExample :: IO ()
-- | This module implements the examples command for TensorSafe.
module TensorSafe.Commands.Examples
-- | Outputs to stdout the results of the examples
examples :: IO ()