Safe Haskell | None |
---|---|
Language | Haskell98 |
Synopsis
- type Robot = ([([UserAgent], [Directive])], [Unparsable])
- type Unparsable = ByteString
- data UserAgent
- type Path = ByteString
- type TimeInterval = (DiffTime, DiffTime)
- data Directive
- subParser :: Parser a -> ByteString -> Parser a
- safeParseRational :: Parser Rational
- dropUTF8BOM :: ByteString -> ByteString
- parseHourMinute :: Parser (Integer, Integer)
- parseTimeInterval :: Parser TimeInterval
- allDay :: TimeInterval
- parseRequestRate :: Parser Directive
- parseVisitTime :: Parser Directive
- parseCrawlDelay :: Parser Directive
- strip :: ByteString -> ByteString
- parseRobots :: ByteString -> Either String Robot
- robotP :: Parser Robot
- unparsableP :: Parser ByteString
- agentDirectiveP :: Parser ([UserAgent], [Directive])
- skipSpace :: Parser ()
- directiveP :: Parser Directive
- agentP :: Parser UserAgent
- commentsP :: Parser ()
- tokenP :: Parser ByteString
- tokenWithSpacesP :: Parser ByteString
- canAccess :: ByteString -> Robot -> Path -> Bool
Documentation
type Unparsable = ByteString Source #
Instances
type Path = ByteString Source #
type TimeInterval = (DiffTime, DiffTime) Source #
Instances
dropUTF8BOM :: ByteString -> ByteString Source #
strip :: ByteString -> ByteString Source #
parseRobots :: ByteString -> Either String Robot Source #
parseRobots is the main entry point for parsing a robots.txt file.