Safe Haskell | None |
---|
- type Robot = ([([UserAgent], [Directive])], [Unparsable])
- type Unparsable = ByteString
- data UserAgent
- type Path = ByteString
- type TimeInterval = (DiffTime, DiffTime)
- data Directive
- subParser :: Parser a -> ByteString -> Parser a
- safeParseRational :: Parser Rational
- dropUTF8BOM :: ByteString -> ByteString
- parseHourMinute :: Parser (Integer, Integer)
- parseTimeInterval :: Parser TimeInterval
- allDay :: TimeInterval
- parseRequestRate :: Parser Directive
- parseVisitTime :: Parser Directive
- parseCrawlDelay :: Parser Directive
- strip :: ByteString -> ByteString
- parseRobots :: ByteString -> Either String Robot
- robotP :: Parser Robot
- unparsableP :: Parser ByteString
- agentDirectiveP :: Parser ([UserAgent], [Directive])
- skipSpace :: Parser ()
- directiveP :: Parser Directive
- agentP :: Parser UserAgent
- commentsP :: Parser ()
- tokenP :: Parser ByteString
- tokenWithSpacesP :: Parser ByteString
- canAccess :: ByteString -> Robot -> Path -> Bool
Documentation
type Robot = ([([UserAgent], [Directive])], [Unparsable])Source
type Unparsable = ByteStringSource
type Path = ByteStringSource
type TimeInterval = (DiffTime, DiffTime)Source
subParser :: Parser a -> ByteString -> Parser aSource
strip :: ByteString -> ByteStringSource
parseRobots :: ByteString -> Either String RobotSource
parseRobots is the main entry point for parsing a robots.txt file.
agentDirectiveP :: Parser ([UserAgent], [Directive])Source