-- Hoogle documentation, generated by Haddock -- See Hoogle, http://www.haskell.org/hoogle/ -- | A regex lexer -- @package tokenify @version 0.1.2.0 -- | An abstaction over string like data types to prevent restricting -- tokenizer to a particular data type, as there are many string -- types in haskell. module Text.Tokenify.CharSeq -- | This typeclass is designed to make the tokenizer more polymorphic, -- which is why it's a super set of both Monoid and Eq. class (Monoid a, Eq a) => CharSeq a head :: CharSeq a => a -> Maybe Char tail :: CharSeq a => a -> a cons :: CharSeq a => Char -> a -> a snoc :: CharSeq a => a -> Char -> a null :: CharSeq a => a -> Bool singleton :: CharSeq a => Char -> a lineInfo :: CharSeq a => a -> [Int] -- | Enables Text to be used for tokenizer -- | the main purpose of this implemenation is make testing in the repl -- non-painful instance CharSeq [Char] instance CharSeq Text -- | Common types used by Text.Tokenify module Text.Tokenify.Types -- | A series of Tokens which will match in sequencial order type Tokenizer s a = [Token s a] -- | Defines what is matches, and how to respond to said match type Token s a = (Regex s, Response s a) -- | The type for a token position in a file type Pos = (Int, Int) -- | The DSL for creating a grammar/tokenizer definition for -- tokenizer module Text.Tokenify.DSL -- | Creates a response which will fail on a regex fails :: Regex s -> Token s a -- | Creates a response which will ignore a regex ignore :: Regex s -> Token s a -- | Creates a response which consumes the text position insert :: Regex s -> (Pos -> a) -> Token s a -- | Creates a response which consumes the captures CharSeq and -- the text position evaluate :: Regex s -> (s -> Pos -> a) -> Token s a -- | Creates a regex that matches a string string :: s -> Regex s -- | Creates a regex that matches a char char :: Char -> Regex s -- | Creates a create that will match a range of characters range :: Char -> Char -> Regex s -- | Creates a regex that will attmpt to make the regex on the left, if -- that fails it will attmpt to match the regex on the right alt :: Regex s -> Regex s -> Regex s -- | Creates a regex that will attmpt to match a Sequence of regex's in a -- sequencial order any :: [Regex s] -> Regex s -- | Create a regex that appends the result of two regex's append :: Regex s -> Regex s -> Regex s -- | Create a regex that appends the result of a sequence of regex's concat :: [Regex s] -> Regex s -- | Create a regex that may or may not match a regex option :: Regex s -> Regex s -- | Create a regex that matches zero or more of a regex repeat :: Regex s -> Regex s -- | Create a regex that matches one or more of a regex repeat1 :: Regex s -> Regex s -- | Tokenify is a module used for generating a tokenizer from a -- regex based grammar module Text.Tokenify -- | tokenize will transform a CharSeq into a sequence of -- tokens tokenize :: CharSeq s => Tokenizer s a -> s -> Either String (Seq a) -- | Attmpts to match the front of a CharSeq with a Regex, if -- succeful, it returns a tuple containing -- -- matchHead :: CharSeq s => Regex s -> s -> Maybe (s, s, Int)