{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}

-- Derived from AWS service descriptions, licensed under Apache 2.0.

-- |
-- Module      : Amazonka.Kendra.Types.WebCrawlerConfiguration
-- Copyright   : (c) 2013-2023 Brendan Hay
-- License     : Mozilla Public License, v. 2.0.
-- Maintainer  : Brendan Hay
-- Stability   : auto-generated
-- Portability : non-portable (GHC extensions)
module Amazonka.Kendra.Types.WebCrawlerConfiguration where

import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import Amazonka.Kendra.Types.AuthenticationConfiguration
import Amazonka.Kendra.Types.ProxyConfiguration
import Amazonka.Kendra.Types.Urls
import qualified Amazonka.Prelude as Prelude

-- | Provides the configuration information required for Amazon Kendra Web
-- Crawler.
--
-- /See:/ 'newWebCrawlerConfiguration' smart constructor.
data WebCrawlerConfiguration = WebCrawlerConfiguration'
  { -- | Configuration information required to connect to websites using
    -- authentication.
    --
    -- You can connect to websites using basic authentication of user name and
    -- password. You use a secret in
    -- <https://docs.aws.amazon.com/secretsmanager/latest/userguide/intro.html Secrets Manager>
    -- to store your authentication credentials.
    --
    -- You must provide the website host name and port number. For example, the
    -- host name of https:\/\/a.example.com\/page1.html is \"a.example.com\"
    -- and the port is 443, the standard port for HTTPS.
    WebCrawlerConfiguration -> Maybe AuthenticationConfiguration
authenticationConfiguration :: Prelude.Maybe AuthenticationConfiguration,
    -- | Specifies the number of levels in a website that you want to crawl.
    --
    -- The first level begins from the website seed or starting point URL. For
    -- example, if a website has 3 levels – index level (i.e. seed in this
    -- example), sections level, and subsections level – and you are only
    -- interested in crawling information up to the sections level (i.e. levels
    -- 0-1), you can set your depth to 1.
    --
    -- The default crawl depth is set to 2.
    WebCrawlerConfiguration -> Maybe Natural
crawlDepth :: Prelude.Maybe Prelude.Natural,
    -- | The maximum size (in MB) of a webpage or attachment to crawl.
    --
    -- Files larger than this size (in MB) are skipped\/not crawled.
    --
    -- The default maximum size of a webpage or attachment is set to 50 MB.
    WebCrawlerConfiguration -> Maybe Double
maxContentSizePerPageInMegaBytes :: Prelude.Maybe Prelude.Double,
    -- | The maximum number of URLs on a webpage to include when crawling a
    -- website. This number is per webpage.
    --
    -- As a website’s webpages are crawled, any URLs the webpages link to are
    -- also crawled. URLs on a webpage are crawled in order of appearance.
    --
    -- The default maximum links per page is 100.
    WebCrawlerConfiguration -> Maybe Natural
maxLinksPerPage :: Prelude.Maybe Prelude.Natural,
    -- | The maximum number of URLs crawled per website host per minute.
    --
    -- A minimum of one URL is required.
    --
    -- The default maximum number of URLs crawled per website host per minute
    -- is 300.
    WebCrawlerConfiguration -> Maybe Natural
maxUrlsPerMinuteCrawlRate :: Prelude.Maybe Prelude.Natural,
    -- | Configuration information required to connect to your internal websites
    -- via a web proxy.
    --
    -- You must provide the website host name and port number. For example, the
    -- host name of https:\/\/a.example.com\/page1.html is \"a.example.com\"
    -- and the port is 443, the standard port for HTTPS.
    --
    -- Web proxy credentials are optional and you can use them to connect to a
    -- web proxy server that requires basic authentication. To store web proxy
    -- credentials, you use a secret in
    -- <https://docs.aws.amazon.com/secretsmanager/latest/userguide/intro.html Secrets Manager>.
    WebCrawlerConfiguration -> Maybe ProxyConfiguration
proxyConfiguration :: Prelude.Maybe ProxyConfiguration,
    -- | A list of regular expression patterns to exclude certain URLs to crawl.
    -- URLs that match the patterns are excluded from the index. URLs that
    -- don\'t match the patterns are included in the index. If a URL matches
    -- both an inclusion and exclusion pattern, the exclusion pattern takes
    -- precedence and the URL file isn\'t included in the index.
    WebCrawlerConfiguration -> Maybe [Text]
urlExclusionPatterns :: Prelude.Maybe [Prelude.Text],
    -- | A list of regular expression patterns to include certain URLs to crawl.
    -- URLs that match the patterns are included in the index. URLs that don\'t
    -- match the patterns are excluded from the index. If a URL matches both an
    -- inclusion and exclusion pattern, the exclusion pattern takes precedence
    -- and the URL file isn\'t included in the index.
    WebCrawlerConfiguration -> Maybe [Text]
urlInclusionPatterns :: Prelude.Maybe [Prelude.Text],
    -- | Specifies the seed or starting point URLs of the websites or the sitemap
    -- URLs of the websites you want to crawl.
    --
    -- You can include website subdomains. You can list up to 100 seed URLs and
    -- up to three sitemap URLs.
    --
    -- You can only crawl websites that use the secure communication protocol,
    -- Hypertext Transfer Protocol Secure (HTTPS). If you receive an error when
    -- crawling a website, it could be that the website is blocked from
    -- crawling.
    --
    -- /When selecting websites to index, you must adhere to the
    -- <https://aws.amazon.com/aup/ Amazon Acceptable Use Policy> and all other
    -- Amazon terms. Remember that you must only use Amazon Kendra Web Crawler
    -- to index your own webpages, or webpages that you have authorization to
    -- index./
    WebCrawlerConfiguration -> Urls
urls :: Urls
  }
  deriving (WebCrawlerConfiguration -> WebCrawlerConfiguration -> Bool
forall a. (a -> a -> Bool) -> (a -> a -> Bool) -> Eq a
/= :: WebCrawlerConfiguration -> WebCrawlerConfiguration -> Bool
$c/= :: WebCrawlerConfiguration -> WebCrawlerConfiguration -> Bool
== :: WebCrawlerConfiguration -> WebCrawlerConfiguration -> Bool
$c== :: WebCrawlerConfiguration -> WebCrawlerConfiguration -> Bool
Prelude.Eq, ReadPrec [WebCrawlerConfiguration]
ReadPrec WebCrawlerConfiguration
Int -> ReadS WebCrawlerConfiguration
ReadS [WebCrawlerConfiguration]
forall a.
(Int -> ReadS a)
-> ReadS [a] -> ReadPrec a -> ReadPrec [a] -> Read a
readListPrec :: ReadPrec [WebCrawlerConfiguration]
$creadListPrec :: ReadPrec [WebCrawlerConfiguration]
readPrec :: ReadPrec WebCrawlerConfiguration
$creadPrec :: ReadPrec WebCrawlerConfiguration
readList :: ReadS [WebCrawlerConfiguration]
$creadList :: ReadS [WebCrawlerConfiguration]
readsPrec :: Int -> ReadS WebCrawlerConfiguration
$creadsPrec :: Int -> ReadS WebCrawlerConfiguration
Prelude.Read, Int -> WebCrawlerConfiguration -> ShowS
[WebCrawlerConfiguration] -> ShowS
WebCrawlerConfiguration -> String
forall a.
(Int -> a -> ShowS) -> (a -> String) -> ([a] -> ShowS) -> Show a
showList :: [WebCrawlerConfiguration] -> ShowS
$cshowList :: [WebCrawlerConfiguration] -> ShowS
show :: WebCrawlerConfiguration -> String
$cshow :: WebCrawlerConfiguration -> String
showsPrec :: Int -> WebCrawlerConfiguration -> ShowS
$cshowsPrec :: Int -> WebCrawlerConfiguration -> ShowS
Prelude.Show, forall x. Rep WebCrawlerConfiguration x -> WebCrawlerConfiguration
forall x. WebCrawlerConfiguration -> Rep WebCrawlerConfiguration x
forall a.
(forall x. a -> Rep a x) -> (forall x. Rep a x -> a) -> Generic a
$cto :: forall x. Rep WebCrawlerConfiguration x -> WebCrawlerConfiguration
$cfrom :: forall x. WebCrawlerConfiguration -> Rep WebCrawlerConfiguration x
Prelude.Generic)

-- |
-- Create a value of 'WebCrawlerConfiguration' with all optional fields omitted.
--
-- Use <https://hackage.haskell.org/package/generic-lens generic-lens> or <https://hackage.haskell.org/package/optics optics> to modify other optional fields.
--
-- The following record fields are available, with the corresponding lenses provided
-- for backwards compatibility:
--
-- 'authenticationConfiguration', 'webCrawlerConfiguration_authenticationConfiguration' - Configuration information required to connect to websites using
-- authentication.
--
-- You can connect to websites using basic authentication of user name and
-- password. You use a secret in
-- <https://docs.aws.amazon.com/secretsmanager/latest/userguide/intro.html Secrets Manager>
-- to store your authentication credentials.
--
-- You must provide the website host name and port number. For example, the
-- host name of https:\/\/a.example.com\/page1.html is \"a.example.com\"
-- and the port is 443, the standard port for HTTPS.
--
-- 'crawlDepth', 'webCrawlerConfiguration_crawlDepth' - Specifies the number of levels in a website that you want to crawl.
--
-- The first level begins from the website seed or starting point URL. For
-- example, if a website has 3 levels – index level (i.e. seed in this
-- example), sections level, and subsections level – and you are only
-- interested in crawling information up to the sections level (i.e. levels
-- 0-1), you can set your depth to 1.
--
-- The default crawl depth is set to 2.
--
-- 'maxContentSizePerPageInMegaBytes', 'webCrawlerConfiguration_maxContentSizePerPageInMegaBytes' - The maximum size (in MB) of a webpage or attachment to crawl.
--
-- Files larger than this size (in MB) are skipped\/not crawled.
--
-- The default maximum size of a webpage or attachment is set to 50 MB.
--
-- 'maxLinksPerPage', 'webCrawlerConfiguration_maxLinksPerPage' - The maximum number of URLs on a webpage to include when crawling a
-- website. This number is per webpage.
--
-- As a website’s webpages are crawled, any URLs the webpages link to are
-- also crawled. URLs on a webpage are crawled in order of appearance.
--
-- The default maximum links per page is 100.
--
-- 'maxUrlsPerMinuteCrawlRate', 'webCrawlerConfiguration_maxUrlsPerMinuteCrawlRate' - The maximum number of URLs crawled per website host per minute.
--
-- A minimum of one URL is required.
--
-- The default maximum number of URLs crawled per website host per minute
-- is 300.
--
-- 'proxyConfiguration', 'webCrawlerConfiguration_proxyConfiguration' - Configuration information required to connect to your internal websites
-- via a web proxy.
--
-- You must provide the website host name and port number. For example, the
-- host name of https:\/\/a.example.com\/page1.html is \"a.example.com\"
-- and the port is 443, the standard port for HTTPS.
--
-- Web proxy credentials are optional and you can use them to connect to a
-- web proxy server that requires basic authentication. To store web proxy
-- credentials, you use a secret in
-- <https://docs.aws.amazon.com/secretsmanager/latest/userguide/intro.html Secrets Manager>.
--
-- 'urlExclusionPatterns', 'webCrawlerConfiguration_urlExclusionPatterns' - A list of regular expression patterns to exclude certain URLs to crawl.
-- URLs that match the patterns are excluded from the index. URLs that
-- don\'t match the patterns are included in the index. If a URL matches
-- both an inclusion and exclusion pattern, the exclusion pattern takes
-- precedence and the URL file isn\'t included in the index.
--
-- 'urlInclusionPatterns', 'webCrawlerConfiguration_urlInclusionPatterns' - A list of regular expression patterns to include certain URLs to crawl.
-- URLs that match the patterns are included in the index. URLs that don\'t
-- match the patterns are excluded from the index. If a URL matches both an
-- inclusion and exclusion pattern, the exclusion pattern takes precedence
-- and the URL file isn\'t included in the index.
--
-- 'urls', 'webCrawlerConfiguration_urls' - Specifies the seed or starting point URLs of the websites or the sitemap
-- URLs of the websites you want to crawl.
--
-- You can include website subdomains. You can list up to 100 seed URLs and
-- up to three sitemap URLs.
--
-- You can only crawl websites that use the secure communication protocol,
-- Hypertext Transfer Protocol Secure (HTTPS). If you receive an error when
-- crawling a website, it could be that the website is blocked from
-- crawling.
--
-- /When selecting websites to index, you must adhere to the
-- <https://aws.amazon.com/aup/ Amazon Acceptable Use Policy> and all other
-- Amazon terms. Remember that you must only use Amazon Kendra Web Crawler
-- to index your own webpages, or webpages that you have authorization to
-- index./
newWebCrawlerConfiguration ::
  -- | 'urls'
  Urls ->
  WebCrawlerConfiguration
newWebCrawlerConfiguration :: Urls -> WebCrawlerConfiguration
newWebCrawlerConfiguration Urls
pUrls_ =
  WebCrawlerConfiguration'
    { $sel:authenticationConfiguration:WebCrawlerConfiguration' :: Maybe AuthenticationConfiguration
authenticationConfiguration =
        forall a. Maybe a
Prelude.Nothing,
      $sel:crawlDepth:WebCrawlerConfiguration' :: Maybe Natural
crawlDepth = forall a. Maybe a
Prelude.Nothing,
      $sel:maxContentSizePerPageInMegaBytes:WebCrawlerConfiguration' :: Maybe Double
maxContentSizePerPageInMegaBytes = forall a. Maybe a
Prelude.Nothing,
      $sel:maxLinksPerPage:WebCrawlerConfiguration' :: Maybe Natural
maxLinksPerPage = forall a. Maybe a
Prelude.Nothing,
      $sel:maxUrlsPerMinuteCrawlRate:WebCrawlerConfiguration' :: Maybe Natural
maxUrlsPerMinuteCrawlRate = forall a. Maybe a
Prelude.Nothing,
      $sel:proxyConfiguration:WebCrawlerConfiguration' :: Maybe ProxyConfiguration
proxyConfiguration = forall a. Maybe a
Prelude.Nothing,
      $sel:urlExclusionPatterns:WebCrawlerConfiguration' :: Maybe [Text]
urlExclusionPatterns = forall a. Maybe a
Prelude.Nothing,
      $sel:urlInclusionPatterns:WebCrawlerConfiguration' :: Maybe [Text]
urlInclusionPatterns = forall a. Maybe a
Prelude.Nothing,
      $sel:urls:WebCrawlerConfiguration' :: Urls
urls = Urls
pUrls_
    }

-- | Configuration information required to connect to websites using
-- authentication.
--
-- You can connect to websites using basic authentication of user name and
-- password. You use a secret in
-- <https://docs.aws.amazon.com/secretsmanager/latest/userguide/intro.html Secrets Manager>
-- to store your authentication credentials.
--
-- You must provide the website host name and port number. For example, the
-- host name of https:\/\/a.example.com\/page1.html is \"a.example.com\"
-- and the port is 443, the standard port for HTTPS.
webCrawlerConfiguration_authenticationConfiguration :: Lens.Lens' WebCrawlerConfiguration (Prelude.Maybe AuthenticationConfiguration)
webCrawlerConfiguration_authenticationConfiguration :: Lens' WebCrawlerConfiguration (Maybe AuthenticationConfiguration)
webCrawlerConfiguration_authenticationConfiguration = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\WebCrawlerConfiguration' {Maybe AuthenticationConfiguration
authenticationConfiguration :: Maybe AuthenticationConfiguration
$sel:authenticationConfiguration:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe AuthenticationConfiguration
authenticationConfiguration} -> Maybe AuthenticationConfiguration
authenticationConfiguration) (\s :: WebCrawlerConfiguration
s@WebCrawlerConfiguration' {} Maybe AuthenticationConfiguration
a -> WebCrawlerConfiguration
s {$sel:authenticationConfiguration:WebCrawlerConfiguration' :: Maybe AuthenticationConfiguration
authenticationConfiguration = Maybe AuthenticationConfiguration
a} :: WebCrawlerConfiguration)

-- | Specifies the number of levels in a website that you want to crawl.
--
-- The first level begins from the website seed or starting point URL. For
-- example, if a website has 3 levels – index level (i.e. seed in this
-- example), sections level, and subsections level – and you are only
-- interested in crawling information up to the sections level (i.e. levels
-- 0-1), you can set your depth to 1.
--
-- The default crawl depth is set to 2.
webCrawlerConfiguration_crawlDepth :: Lens.Lens' WebCrawlerConfiguration (Prelude.Maybe Prelude.Natural)
webCrawlerConfiguration_crawlDepth :: Lens' WebCrawlerConfiguration (Maybe Natural)
webCrawlerConfiguration_crawlDepth = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\WebCrawlerConfiguration' {Maybe Natural
crawlDepth :: Maybe Natural
$sel:crawlDepth:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Natural
crawlDepth} -> Maybe Natural
crawlDepth) (\s :: WebCrawlerConfiguration
s@WebCrawlerConfiguration' {} Maybe Natural
a -> WebCrawlerConfiguration
s {$sel:crawlDepth:WebCrawlerConfiguration' :: Maybe Natural
crawlDepth = Maybe Natural
a} :: WebCrawlerConfiguration)

-- | The maximum size (in MB) of a webpage or attachment to crawl.
--
-- Files larger than this size (in MB) are skipped\/not crawled.
--
-- The default maximum size of a webpage or attachment is set to 50 MB.
webCrawlerConfiguration_maxContentSizePerPageInMegaBytes :: Lens.Lens' WebCrawlerConfiguration (Prelude.Maybe Prelude.Double)
webCrawlerConfiguration_maxContentSizePerPageInMegaBytes :: Lens' WebCrawlerConfiguration (Maybe Double)
webCrawlerConfiguration_maxContentSizePerPageInMegaBytes = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\WebCrawlerConfiguration' {Maybe Double
maxContentSizePerPageInMegaBytes :: Maybe Double
$sel:maxContentSizePerPageInMegaBytes:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Double
maxContentSizePerPageInMegaBytes} -> Maybe Double
maxContentSizePerPageInMegaBytes) (\s :: WebCrawlerConfiguration
s@WebCrawlerConfiguration' {} Maybe Double
a -> WebCrawlerConfiguration
s {$sel:maxContentSizePerPageInMegaBytes:WebCrawlerConfiguration' :: Maybe Double
maxContentSizePerPageInMegaBytes = Maybe Double
a} :: WebCrawlerConfiguration)

-- | The maximum number of URLs on a webpage to include when crawling a
-- website. This number is per webpage.
--
-- As a website’s webpages are crawled, any URLs the webpages link to are
-- also crawled. URLs on a webpage are crawled in order of appearance.
--
-- The default maximum links per page is 100.
webCrawlerConfiguration_maxLinksPerPage :: Lens.Lens' WebCrawlerConfiguration (Prelude.Maybe Prelude.Natural)
webCrawlerConfiguration_maxLinksPerPage :: Lens' WebCrawlerConfiguration (Maybe Natural)
webCrawlerConfiguration_maxLinksPerPage = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\WebCrawlerConfiguration' {Maybe Natural
maxLinksPerPage :: Maybe Natural
$sel:maxLinksPerPage:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Natural
maxLinksPerPage} -> Maybe Natural
maxLinksPerPage) (\s :: WebCrawlerConfiguration
s@WebCrawlerConfiguration' {} Maybe Natural
a -> WebCrawlerConfiguration
s {$sel:maxLinksPerPage:WebCrawlerConfiguration' :: Maybe Natural
maxLinksPerPage = Maybe Natural
a} :: WebCrawlerConfiguration)

-- | The maximum number of URLs crawled per website host per minute.
--
-- A minimum of one URL is required.
--
-- The default maximum number of URLs crawled per website host per minute
-- is 300.
webCrawlerConfiguration_maxUrlsPerMinuteCrawlRate :: Lens.Lens' WebCrawlerConfiguration (Prelude.Maybe Prelude.Natural)
webCrawlerConfiguration_maxUrlsPerMinuteCrawlRate :: Lens' WebCrawlerConfiguration (Maybe Natural)
webCrawlerConfiguration_maxUrlsPerMinuteCrawlRate = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\WebCrawlerConfiguration' {Maybe Natural
maxUrlsPerMinuteCrawlRate :: Maybe Natural
$sel:maxUrlsPerMinuteCrawlRate:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Natural
maxUrlsPerMinuteCrawlRate} -> Maybe Natural
maxUrlsPerMinuteCrawlRate) (\s :: WebCrawlerConfiguration
s@WebCrawlerConfiguration' {} Maybe Natural
a -> WebCrawlerConfiguration
s {$sel:maxUrlsPerMinuteCrawlRate:WebCrawlerConfiguration' :: Maybe Natural
maxUrlsPerMinuteCrawlRate = Maybe Natural
a} :: WebCrawlerConfiguration)

-- | Configuration information required to connect to your internal websites
-- via a web proxy.
--
-- You must provide the website host name and port number. For example, the
-- host name of https:\/\/a.example.com\/page1.html is \"a.example.com\"
-- and the port is 443, the standard port for HTTPS.
--
-- Web proxy credentials are optional and you can use them to connect to a
-- web proxy server that requires basic authentication. To store web proxy
-- credentials, you use a secret in
-- <https://docs.aws.amazon.com/secretsmanager/latest/userguide/intro.html Secrets Manager>.
webCrawlerConfiguration_proxyConfiguration :: Lens.Lens' WebCrawlerConfiguration (Prelude.Maybe ProxyConfiguration)
webCrawlerConfiguration_proxyConfiguration :: Lens' WebCrawlerConfiguration (Maybe ProxyConfiguration)
webCrawlerConfiguration_proxyConfiguration = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\WebCrawlerConfiguration' {Maybe ProxyConfiguration
proxyConfiguration :: Maybe ProxyConfiguration
$sel:proxyConfiguration:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe ProxyConfiguration
proxyConfiguration} -> Maybe ProxyConfiguration
proxyConfiguration) (\s :: WebCrawlerConfiguration
s@WebCrawlerConfiguration' {} Maybe ProxyConfiguration
a -> WebCrawlerConfiguration
s {$sel:proxyConfiguration:WebCrawlerConfiguration' :: Maybe ProxyConfiguration
proxyConfiguration = Maybe ProxyConfiguration
a} :: WebCrawlerConfiguration)

-- | A list of regular expression patterns to exclude certain URLs to crawl.
-- URLs that match the patterns are excluded from the index. URLs that
-- don\'t match the patterns are included in the index. If a URL matches
-- both an inclusion and exclusion pattern, the exclusion pattern takes
-- precedence and the URL file isn\'t included in the index.
webCrawlerConfiguration_urlExclusionPatterns :: Lens.Lens' WebCrawlerConfiguration (Prelude.Maybe [Prelude.Text])
webCrawlerConfiguration_urlExclusionPatterns :: Lens' WebCrawlerConfiguration (Maybe [Text])
webCrawlerConfiguration_urlExclusionPatterns = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\WebCrawlerConfiguration' {Maybe [Text]
urlExclusionPatterns :: Maybe [Text]
$sel:urlExclusionPatterns:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe [Text]
urlExclusionPatterns} -> Maybe [Text]
urlExclusionPatterns) (\s :: WebCrawlerConfiguration
s@WebCrawlerConfiguration' {} Maybe [Text]
a -> WebCrawlerConfiguration
s {$sel:urlExclusionPatterns:WebCrawlerConfiguration' :: Maybe [Text]
urlExclusionPatterns = Maybe [Text]
a} :: WebCrawlerConfiguration) forall b c a. (b -> c) -> (a -> b) -> a -> c
Prelude.. forall (f :: * -> *) (g :: * -> *) s t a b.
(Functor f, Functor g) =>
AnIso s t a b -> Iso (f s) (g t) (f a) (g b)
Lens.mapping forall s t a b. (Coercible s a, Coercible t b) => Iso s t a b
Lens.coerced

-- | A list of regular expression patterns to include certain URLs to crawl.
-- URLs that match the patterns are included in the index. URLs that don\'t
-- match the patterns are excluded from the index. If a URL matches both an
-- inclusion and exclusion pattern, the exclusion pattern takes precedence
-- and the URL file isn\'t included in the index.
webCrawlerConfiguration_urlInclusionPatterns :: Lens.Lens' WebCrawlerConfiguration (Prelude.Maybe [Prelude.Text])
webCrawlerConfiguration_urlInclusionPatterns :: Lens' WebCrawlerConfiguration (Maybe [Text])
webCrawlerConfiguration_urlInclusionPatterns = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\WebCrawlerConfiguration' {Maybe [Text]
urlInclusionPatterns :: Maybe [Text]
$sel:urlInclusionPatterns:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe [Text]
urlInclusionPatterns} -> Maybe [Text]
urlInclusionPatterns) (\s :: WebCrawlerConfiguration
s@WebCrawlerConfiguration' {} Maybe [Text]
a -> WebCrawlerConfiguration
s {$sel:urlInclusionPatterns:WebCrawlerConfiguration' :: Maybe [Text]
urlInclusionPatterns = Maybe [Text]
a} :: WebCrawlerConfiguration) forall b c a. (b -> c) -> (a -> b) -> a -> c
Prelude.. forall (f :: * -> *) (g :: * -> *) s t a b.
(Functor f, Functor g) =>
AnIso s t a b -> Iso (f s) (g t) (f a) (g b)
Lens.mapping forall s t a b. (Coercible s a, Coercible t b) => Iso s t a b
Lens.coerced

-- | Specifies the seed or starting point URLs of the websites or the sitemap
-- URLs of the websites you want to crawl.
--
-- You can include website subdomains. You can list up to 100 seed URLs and
-- up to three sitemap URLs.
--
-- You can only crawl websites that use the secure communication protocol,
-- Hypertext Transfer Protocol Secure (HTTPS). If you receive an error when
-- crawling a website, it could be that the website is blocked from
-- crawling.
--
-- /When selecting websites to index, you must adhere to the
-- <https://aws.amazon.com/aup/ Amazon Acceptable Use Policy> and all other
-- Amazon terms. Remember that you must only use Amazon Kendra Web Crawler
-- to index your own webpages, or webpages that you have authorization to
-- index./
webCrawlerConfiguration_urls :: Lens.Lens' WebCrawlerConfiguration Urls
webCrawlerConfiguration_urls :: Lens' WebCrawlerConfiguration Urls
webCrawlerConfiguration_urls = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\WebCrawlerConfiguration' {Urls
urls :: Urls
$sel:urls:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Urls
urls} -> Urls
urls) (\s :: WebCrawlerConfiguration
s@WebCrawlerConfiguration' {} Urls
a -> WebCrawlerConfiguration
s {$sel:urls:WebCrawlerConfiguration' :: Urls
urls = Urls
a} :: WebCrawlerConfiguration)

instance Data.FromJSON WebCrawlerConfiguration where
  parseJSON :: Value -> Parser WebCrawlerConfiguration
parseJSON =
    forall a. String -> (Object -> Parser a) -> Value -> Parser a
Data.withObject
      String
"WebCrawlerConfiguration"
      ( \Object
x ->
          Maybe AuthenticationConfiguration
-> Maybe Natural
-> Maybe Double
-> Maybe Natural
-> Maybe Natural
-> Maybe ProxyConfiguration
-> Maybe [Text]
-> Maybe [Text]
-> Urls
-> WebCrawlerConfiguration
WebCrawlerConfiguration'
            forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> (Object
x forall a. FromJSON a => Object -> Key -> Parser (Maybe a)
Data..:? Key
"AuthenticationConfiguration")
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> (Object
x forall a. FromJSON a => Object -> Key -> Parser (Maybe a)
Data..:? Key
"CrawlDepth")
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> (Object
x forall a. FromJSON a => Object -> Key -> Parser (Maybe a)
Data..:? Key
"MaxContentSizePerPageInMegaBytes")
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> (Object
x forall a. FromJSON a => Object -> Key -> Parser (Maybe a)
Data..:? Key
"MaxLinksPerPage")
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> (Object
x forall a. FromJSON a => Object -> Key -> Parser (Maybe a)
Data..:? Key
"MaxUrlsPerMinuteCrawlRate")
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> (Object
x forall a. FromJSON a => Object -> Key -> Parser (Maybe a)
Data..:? Key
"ProxyConfiguration")
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> ( Object
x
                            forall a. FromJSON a => Object -> Key -> Parser (Maybe a)
Data..:? Key
"UrlExclusionPatterns"
                            forall a. Parser (Maybe a) -> a -> Parser a
Data..!= forall a. Monoid a => a
Prelude.mempty
                        )
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> ( Object
x
                            forall a. FromJSON a => Object -> Key -> Parser (Maybe a)
Data..:? Key
"UrlInclusionPatterns"
                            forall a. Parser (Maybe a) -> a -> Parser a
Data..!= forall a. Monoid a => a
Prelude.mempty
                        )
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> (Object
x forall a. FromJSON a => Object -> Key -> Parser a
Data..: Key
"Urls")
      )

instance Prelude.Hashable WebCrawlerConfiguration where
  hashWithSalt :: Int -> WebCrawlerConfiguration -> Int
hashWithSalt Int
_salt WebCrawlerConfiguration' {Maybe Double
Maybe Natural
Maybe [Text]
Maybe AuthenticationConfiguration
Maybe ProxyConfiguration
Urls
urls :: Urls
urlInclusionPatterns :: Maybe [Text]
urlExclusionPatterns :: Maybe [Text]
proxyConfiguration :: Maybe ProxyConfiguration
maxUrlsPerMinuteCrawlRate :: Maybe Natural
maxLinksPerPage :: Maybe Natural
maxContentSizePerPageInMegaBytes :: Maybe Double
crawlDepth :: Maybe Natural
authenticationConfiguration :: Maybe AuthenticationConfiguration
$sel:urls:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Urls
$sel:urlInclusionPatterns:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe [Text]
$sel:urlExclusionPatterns:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe [Text]
$sel:proxyConfiguration:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe ProxyConfiguration
$sel:maxUrlsPerMinuteCrawlRate:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Natural
$sel:maxLinksPerPage:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Natural
$sel:maxContentSizePerPageInMegaBytes:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Double
$sel:crawlDepth:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Natural
$sel:authenticationConfiguration:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe AuthenticationConfiguration
..} =
    Int
_salt
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Maybe AuthenticationConfiguration
authenticationConfiguration
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Maybe Natural
crawlDepth
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Maybe Double
maxContentSizePerPageInMegaBytes
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Maybe Natural
maxLinksPerPage
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Maybe Natural
maxUrlsPerMinuteCrawlRate
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Maybe ProxyConfiguration
proxyConfiguration
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Maybe [Text]
urlExclusionPatterns
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Maybe [Text]
urlInclusionPatterns
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Urls
urls

instance Prelude.NFData WebCrawlerConfiguration where
  rnf :: WebCrawlerConfiguration -> ()
rnf WebCrawlerConfiguration' {Maybe Double
Maybe Natural
Maybe [Text]
Maybe AuthenticationConfiguration
Maybe ProxyConfiguration
Urls
urls :: Urls
urlInclusionPatterns :: Maybe [Text]
urlExclusionPatterns :: Maybe [Text]
proxyConfiguration :: Maybe ProxyConfiguration
maxUrlsPerMinuteCrawlRate :: Maybe Natural
maxLinksPerPage :: Maybe Natural
maxContentSizePerPageInMegaBytes :: Maybe Double
crawlDepth :: Maybe Natural
authenticationConfiguration :: Maybe AuthenticationConfiguration
$sel:urls:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Urls
$sel:urlInclusionPatterns:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe [Text]
$sel:urlExclusionPatterns:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe [Text]
$sel:proxyConfiguration:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe ProxyConfiguration
$sel:maxUrlsPerMinuteCrawlRate:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Natural
$sel:maxLinksPerPage:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Natural
$sel:maxContentSizePerPageInMegaBytes:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Double
$sel:crawlDepth:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Natural
$sel:authenticationConfiguration:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe AuthenticationConfiguration
..} =
    forall a. NFData a => a -> ()
Prelude.rnf Maybe AuthenticationConfiguration
authenticationConfiguration
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf Maybe Natural
crawlDepth
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf Maybe Double
maxContentSizePerPageInMegaBytes
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf Maybe Natural
maxLinksPerPage
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf Maybe Natural
maxUrlsPerMinuteCrawlRate
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf Maybe ProxyConfiguration
proxyConfiguration
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf Maybe [Text]
urlExclusionPatterns
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf Maybe [Text]
urlInclusionPatterns
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf Urls
urls

instance Data.ToJSON WebCrawlerConfiguration where
  toJSON :: WebCrawlerConfiguration -> Value
toJSON WebCrawlerConfiguration' {Maybe Double
Maybe Natural
Maybe [Text]
Maybe AuthenticationConfiguration
Maybe ProxyConfiguration
Urls
urls :: Urls
urlInclusionPatterns :: Maybe [Text]
urlExclusionPatterns :: Maybe [Text]
proxyConfiguration :: Maybe ProxyConfiguration
maxUrlsPerMinuteCrawlRate :: Maybe Natural
maxLinksPerPage :: Maybe Natural
maxContentSizePerPageInMegaBytes :: Maybe Double
crawlDepth :: Maybe Natural
authenticationConfiguration :: Maybe AuthenticationConfiguration
$sel:urls:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Urls
$sel:urlInclusionPatterns:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe [Text]
$sel:urlExclusionPatterns:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe [Text]
$sel:proxyConfiguration:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe ProxyConfiguration
$sel:maxUrlsPerMinuteCrawlRate:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Natural
$sel:maxLinksPerPage:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Natural
$sel:maxContentSizePerPageInMegaBytes:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Double
$sel:crawlDepth:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe Natural
$sel:authenticationConfiguration:WebCrawlerConfiguration' :: WebCrawlerConfiguration -> Maybe AuthenticationConfiguration
..} =
    [Pair] -> Value
Data.object
      ( forall a. [Maybe a] -> [a]
Prelude.catMaybes
          [ (Key
"AuthenticationConfiguration" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..=)
              forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> Maybe AuthenticationConfiguration
authenticationConfiguration,
            (Key
"CrawlDepth" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..=) forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> Maybe Natural
crawlDepth,
            (Key
"MaxContentSizePerPageInMegaBytes" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..=)
              forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> Maybe Double
maxContentSizePerPageInMegaBytes,
            (Key
"MaxLinksPerPage" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..=)
              forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> Maybe Natural
maxLinksPerPage,
            (Key
"MaxUrlsPerMinuteCrawlRate" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..=)
              forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> Maybe Natural
maxUrlsPerMinuteCrawlRate,
            (Key
"ProxyConfiguration" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..=)
              forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> Maybe ProxyConfiguration
proxyConfiguration,
            (Key
"UrlExclusionPatterns" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..=)
              forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> Maybe [Text]
urlExclusionPatterns,
            (Key
"UrlInclusionPatterns" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..=)
              forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> Maybe [Text]
urlInclusionPatterns,
            forall a. a -> Maybe a
Prelude.Just (Key
"Urls" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..= Urls
urls)
          ]
      )