{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}

-- Derived from AWS service descriptions, licensed under Apache 2.0.

-- |
-- Module      : Amazonka.SageMaker.Types.TransformInput
-- Copyright   : (c) 2013-2023 Brendan Hay
-- License     : Mozilla Public License, v. 2.0.
-- Maintainer  : Brendan Hay
-- Stability   : auto-generated
-- Portability : non-portable (GHC extensions)
module Amazonka.SageMaker.Types.TransformInput where

import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import Amazonka.SageMaker.Types.CompressionType
import Amazonka.SageMaker.Types.SplitType
import Amazonka.SageMaker.Types.TransformDataSource

-- | Describes the input source of a transform job and the way the transform
-- job consumes it.
--
-- /See:/ 'newTransformInput' smart constructor.
data TransformInput = TransformInput'
  { -- | If your transform data is compressed, specify the compression type.
    -- Amazon SageMaker automatically decompresses the data for the transform
    -- job accordingly. The default value is @None@.
    TransformInput -> Maybe CompressionType
compressionType :: Prelude.Maybe CompressionType,
    -- | The multipurpose internet mail extension (MIME) type of the data. Amazon
    -- SageMaker uses the MIME type with each http call to transfer data to the
    -- transform job.
    TransformInput -> Maybe Text
contentType :: Prelude.Maybe Prelude.Text,
    -- | The method to use to split the transform job\'s data files into smaller
    -- batches. Splitting is necessary when the total size of each object is
    -- too large to fit in a single request. You can also use data splitting to
    -- improve performance by processing multiple concurrent mini-batches. The
    -- default value for @SplitType@ is @None@, which indicates that input data
    -- files are not split, and request payloads contain the entire contents of
    -- an input object. Set the value of this parameter to @Line@ to split
    -- records on a newline character boundary. @SplitType@ also supports a
    -- number of record-oriented binary data formats. Currently, the supported
    -- record formats are:
    --
    -- -   RecordIO
    --
    -- -   TFRecord
    --
    -- When splitting is enabled, the size of a mini-batch depends on the
    -- values of the @BatchStrategy@ and @MaxPayloadInMB@ parameters. When the
    -- value of @BatchStrategy@ is @MultiRecord@, Amazon SageMaker sends the
    -- maximum number of records in each request, up to the @MaxPayloadInMB@
    -- limit. If the value of @BatchStrategy@ is @SingleRecord@, Amazon
    -- SageMaker sends individual records in each request.
    --
    -- Some data formats represent a record as a binary payload wrapped with
    -- extra padding bytes. When splitting is applied to a binary data format,
    -- padding is removed if the value of @BatchStrategy@ is set to
    -- @SingleRecord@. Padding is not removed if the value of @BatchStrategy@
    -- is set to @MultiRecord@.
    --
    -- For more information about @RecordIO@, see
    -- <https://mxnet.apache.org/api/faq/recordio Create a Dataset Using RecordIO>
    -- in the MXNet documentation. For more information about @TFRecord@, see
    -- <https://www.tensorflow.org/guide/data#consuming_tfrecord_data Consuming TFRecord data>
    -- in the TensorFlow documentation.
    TransformInput -> Maybe SplitType
splitType :: Prelude.Maybe SplitType,
    -- | Describes the location of the channel data, which is, the S3 location of
    -- the input data that the model can consume.
    TransformInput -> TransformDataSource
dataSource :: TransformDataSource
  }
  deriving (TransformInput -> TransformInput -> Bool
forall a. (a -> a -> Bool) -> (a -> a -> Bool) -> Eq a
/= :: TransformInput -> TransformInput -> Bool
$c/= :: TransformInput -> TransformInput -> Bool
== :: TransformInput -> TransformInput -> Bool
$c== :: TransformInput -> TransformInput -> Bool
Prelude.Eq, ReadPrec [TransformInput]
ReadPrec TransformInput
Int -> ReadS TransformInput
ReadS [TransformInput]
forall a.
(Int -> ReadS a)
-> ReadS [a] -> ReadPrec a -> ReadPrec [a] -> Read a
readListPrec :: ReadPrec [TransformInput]
$creadListPrec :: ReadPrec [TransformInput]
readPrec :: ReadPrec TransformInput
$creadPrec :: ReadPrec TransformInput
readList :: ReadS [TransformInput]
$creadList :: ReadS [TransformInput]
readsPrec :: Int -> ReadS TransformInput
$creadsPrec :: Int -> ReadS TransformInput
Prelude.Read, Int -> TransformInput -> ShowS
[TransformInput] -> ShowS
TransformInput -> String
forall a.
(Int -> a -> ShowS) -> (a -> String) -> ([a] -> ShowS) -> Show a
showList :: [TransformInput] -> ShowS
$cshowList :: [TransformInput] -> ShowS
show :: TransformInput -> String
$cshow :: TransformInput -> String
showsPrec :: Int -> TransformInput -> ShowS
$cshowsPrec :: Int -> TransformInput -> ShowS
Prelude.Show, forall x. Rep TransformInput x -> TransformInput
forall x. TransformInput -> Rep TransformInput x
forall a.
(forall x. a -> Rep a x) -> (forall x. Rep a x -> a) -> Generic a
$cto :: forall x. Rep TransformInput x -> TransformInput
$cfrom :: forall x. TransformInput -> Rep TransformInput x
Prelude.Generic)

-- |
-- Create a value of 'TransformInput' with all optional fields omitted.
--
-- Use <https://hackage.haskell.org/package/generic-lens generic-lens> or <https://hackage.haskell.org/package/optics optics> to modify other optional fields.
--
-- The following record fields are available, with the corresponding lenses provided
-- for backwards compatibility:
--
-- 'compressionType', 'transformInput_compressionType' - If your transform data is compressed, specify the compression type.
-- Amazon SageMaker automatically decompresses the data for the transform
-- job accordingly. The default value is @None@.
--
-- 'contentType', 'transformInput_contentType' - The multipurpose internet mail extension (MIME) type of the data. Amazon
-- SageMaker uses the MIME type with each http call to transfer data to the
-- transform job.
--
-- 'splitType', 'transformInput_splitType' - The method to use to split the transform job\'s data files into smaller
-- batches. Splitting is necessary when the total size of each object is
-- too large to fit in a single request. You can also use data splitting to
-- improve performance by processing multiple concurrent mini-batches. The
-- default value for @SplitType@ is @None@, which indicates that input data
-- files are not split, and request payloads contain the entire contents of
-- an input object. Set the value of this parameter to @Line@ to split
-- records on a newline character boundary. @SplitType@ also supports a
-- number of record-oriented binary data formats. Currently, the supported
-- record formats are:
--
-- -   RecordIO
--
-- -   TFRecord
--
-- When splitting is enabled, the size of a mini-batch depends on the
-- values of the @BatchStrategy@ and @MaxPayloadInMB@ parameters. When the
-- value of @BatchStrategy@ is @MultiRecord@, Amazon SageMaker sends the
-- maximum number of records in each request, up to the @MaxPayloadInMB@
-- limit. If the value of @BatchStrategy@ is @SingleRecord@, Amazon
-- SageMaker sends individual records in each request.
--
-- Some data formats represent a record as a binary payload wrapped with
-- extra padding bytes. When splitting is applied to a binary data format,
-- padding is removed if the value of @BatchStrategy@ is set to
-- @SingleRecord@. Padding is not removed if the value of @BatchStrategy@
-- is set to @MultiRecord@.
--
-- For more information about @RecordIO@, see
-- <https://mxnet.apache.org/api/faq/recordio Create a Dataset Using RecordIO>
-- in the MXNet documentation. For more information about @TFRecord@, see
-- <https://www.tensorflow.org/guide/data#consuming_tfrecord_data Consuming TFRecord data>
-- in the TensorFlow documentation.
--
-- 'dataSource', 'transformInput_dataSource' - Describes the location of the channel data, which is, the S3 location of
-- the input data that the model can consume.
newTransformInput ::
  -- | 'dataSource'
  TransformDataSource ->
  TransformInput
newTransformInput :: TransformDataSource -> TransformInput
newTransformInput TransformDataSource
pDataSource_ =
  TransformInput'
    { $sel:compressionType:TransformInput' :: Maybe CompressionType
compressionType = forall a. Maybe a
Prelude.Nothing,
      $sel:contentType:TransformInput' :: Maybe Text
contentType = forall a. Maybe a
Prelude.Nothing,
      $sel:splitType:TransformInput' :: Maybe SplitType
splitType = forall a. Maybe a
Prelude.Nothing,
      $sel:dataSource:TransformInput' :: TransformDataSource
dataSource = TransformDataSource
pDataSource_
    }

-- | If your transform data is compressed, specify the compression type.
-- Amazon SageMaker automatically decompresses the data for the transform
-- job accordingly. The default value is @None@.
transformInput_compressionType :: Lens.Lens' TransformInput (Prelude.Maybe CompressionType)
transformInput_compressionType :: Lens' TransformInput (Maybe CompressionType)
transformInput_compressionType = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\TransformInput' {Maybe CompressionType
compressionType :: Maybe CompressionType
$sel:compressionType:TransformInput' :: TransformInput -> Maybe CompressionType
compressionType} -> Maybe CompressionType
compressionType) (\s :: TransformInput
s@TransformInput' {} Maybe CompressionType
a -> TransformInput
s {$sel:compressionType:TransformInput' :: Maybe CompressionType
compressionType = Maybe CompressionType
a} :: TransformInput)

-- | The multipurpose internet mail extension (MIME) type of the data. Amazon
-- SageMaker uses the MIME type with each http call to transfer data to the
-- transform job.
transformInput_contentType :: Lens.Lens' TransformInput (Prelude.Maybe Prelude.Text)
transformInput_contentType :: Lens' TransformInput (Maybe Text)
transformInput_contentType = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\TransformInput' {Maybe Text
contentType :: Maybe Text
$sel:contentType:TransformInput' :: TransformInput -> Maybe Text
contentType} -> Maybe Text
contentType) (\s :: TransformInput
s@TransformInput' {} Maybe Text
a -> TransformInput
s {$sel:contentType:TransformInput' :: Maybe Text
contentType = Maybe Text
a} :: TransformInput)

-- | The method to use to split the transform job\'s data files into smaller
-- batches. Splitting is necessary when the total size of each object is
-- too large to fit in a single request. You can also use data splitting to
-- improve performance by processing multiple concurrent mini-batches. The
-- default value for @SplitType@ is @None@, which indicates that input data
-- files are not split, and request payloads contain the entire contents of
-- an input object. Set the value of this parameter to @Line@ to split
-- records on a newline character boundary. @SplitType@ also supports a
-- number of record-oriented binary data formats. Currently, the supported
-- record formats are:
--
-- -   RecordIO
--
-- -   TFRecord
--
-- When splitting is enabled, the size of a mini-batch depends on the
-- values of the @BatchStrategy@ and @MaxPayloadInMB@ parameters. When the
-- value of @BatchStrategy@ is @MultiRecord@, Amazon SageMaker sends the
-- maximum number of records in each request, up to the @MaxPayloadInMB@
-- limit. If the value of @BatchStrategy@ is @SingleRecord@, Amazon
-- SageMaker sends individual records in each request.
--
-- Some data formats represent a record as a binary payload wrapped with
-- extra padding bytes. When splitting is applied to a binary data format,
-- padding is removed if the value of @BatchStrategy@ is set to
-- @SingleRecord@. Padding is not removed if the value of @BatchStrategy@
-- is set to @MultiRecord@.
--
-- For more information about @RecordIO@, see
-- <https://mxnet.apache.org/api/faq/recordio Create a Dataset Using RecordIO>
-- in the MXNet documentation. For more information about @TFRecord@, see
-- <https://www.tensorflow.org/guide/data#consuming_tfrecord_data Consuming TFRecord data>
-- in the TensorFlow documentation.
transformInput_splitType :: Lens.Lens' TransformInput (Prelude.Maybe SplitType)
transformInput_splitType :: Lens' TransformInput (Maybe SplitType)
transformInput_splitType = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\TransformInput' {Maybe SplitType
splitType :: Maybe SplitType
$sel:splitType:TransformInput' :: TransformInput -> Maybe SplitType
splitType} -> Maybe SplitType
splitType) (\s :: TransformInput
s@TransformInput' {} Maybe SplitType
a -> TransformInput
s {$sel:splitType:TransformInput' :: Maybe SplitType
splitType = Maybe SplitType
a} :: TransformInput)

-- | Describes the location of the channel data, which is, the S3 location of
-- the input data that the model can consume.
transformInput_dataSource :: Lens.Lens' TransformInput TransformDataSource
transformInput_dataSource :: Lens' TransformInput TransformDataSource
transformInput_dataSource = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\TransformInput' {TransformDataSource
dataSource :: TransformDataSource
$sel:dataSource:TransformInput' :: TransformInput -> TransformDataSource
dataSource} -> TransformDataSource
dataSource) (\s :: TransformInput
s@TransformInput' {} TransformDataSource
a -> TransformInput
s {$sel:dataSource:TransformInput' :: TransformDataSource
dataSource = TransformDataSource
a} :: TransformInput)

instance Data.FromJSON TransformInput where
  parseJSON :: Value -> Parser TransformInput
parseJSON =
    forall a. String -> (Object -> Parser a) -> Value -> Parser a
Data.withObject
      String
"TransformInput"
      ( \Object
x ->
          Maybe CompressionType
-> Maybe Text
-> Maybe SplitType
-> TransformDataSource
-> TransformInput
TransformInput'
            forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> (Object
x forall a. FromJSON a => Object -> Key -> Parser (Maybe a)
Data..:? Key
"CompressionType")
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> (Object
x forall a. FromJSON a => Object -> Key -> Parser (Maybe a)
Data..:? Key
"ContentType")
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> (Object
x forall a. FromJSON a => Object -> Key -> Parser (Maybe a)
Data..:? Key
"SplitType")
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> (Object
x forall a. FromJSON a => Object -> Key -> Parser a
Data..: Key
"DataSource")
      )

instance Prelude.Hashable TransformInput where
  hashWithSalt :: Int -> TransformInput -> Int
hashWithSalt Int
_salt TransformInput' {Maybe Text
Maybe CompressionType
Maybe SplitType
TransformDataSource
dataSource :: TransformDataSource
splitType :: Maybe SplitType
contentType :: Maybe Text
compressionType :: Maybe CompressionType
$sel:dataSource:TransformInput' :: TransformInput -> TransformDataSource
$sel:splitType:TransformInput' :: TransformInput -> Maybe SplitType
$sel:contentType:TransformInput' :: TransformInput -> Maybe Text
$sel:compressionType:TransformInput' :: TransformInput -> Maybe CompressionType
..} =
    Int
_salt
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Maybe CompressionType
compressionType
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Maybe Text
contentType
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Maybe SplitType
splitType
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` TransformDataSource
dataSource

instance Prelude.NFData TransformInput where
  rnf :: TransformInput -> ()
rnf TransformInput' {Maybe Text
Maybe CompressionType
Maybe SplitType
TransformDataSource
dataSource :: TransformDataSource
splitType :: Maybe SplitType
contentType :: Maybe Text
compressionType :: Maybe CompressionType
$sel:dataSource:TransformInput' :: TransformInput -> TransformDataSource
$sel:splitType:TransformInput' :: TransformInput -> Maybe SplitType
$sel:contentType:TransformInput' :: TransformInput -> Maybe Text
$sel:compressionType:TransformInput' :: TransformInput -> Maybe CompressionType
..} =
    forall a. NFData a => a -> ()
Prelude.rnf Maybe CompressionType
compressionType
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf Maybe Text
contentType
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf Maybe SplitType
splitType
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf TransformDataSource
dataSource

instance Data.ToJSON TransformInput where
  toJSON :: TransformInput -> Value
toJSON TransformInput' {Maybe Text
Maybe CompressionType
Maybe SplitType
TransformDataSource
dataSource :: TransformDataSource
splitType :: Maybe SplitType
contentType :: Maybe Text
compressionType :: Maybe CompressionType
$sel:dataSource:TransformInput' :: TransformInput -> TransformDataSource
$sel:splitType:TransformInput' :: TransformInput -> Maybe SplitType
$sel:contentType:TransformInput' :: TransformInput -> Maybe Text
$sel:compressionType:TransformInput' :: TransformInput -> Maybe CompressionType
..} =
    [Pair] -> Value
Data.object
      ( forall a. [Maybe a] -> [a]
Prelude.catMaybes
          [ (Key
"CompressionType" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..=)
              forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> Maybe CompressionType
compressionType,
            (Key
"ContentType" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..=) forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> Maybe Text
contentType,
            (Key
"SplitType" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..=) forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> Maybe SplitType
splitType,
            forall a. a -> Maybe a
Prelude.Just (Key
"DataSource" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..= TransformDataSource
dataSource)
          ]
      )