module Text.Highlighter.Lexers.S (lexer) where

import Text.Regex.PCRE.Light
import Text.Highlighter.Types

lexer :: Lexer
lexer = Lexer
    { lName = "\83"
    , lAliases = ["\115\112\108\117\115", "\115", "\114"]
    , lExtensions = ["\46\83", "\46\82"]
    , lMimetypes = ["\116\101\120\116\47\83\45\112\108\117\115", "\116\101\120\116\47\83", "\116\101\120\116\47\82"]
    , lStart = root'
    , lFlags = [multiline]
    }

statements' :: TokenMatcher
statements' =
    [ anyOf comments'
    , tok "\92\115\43" (Arbitrary "\84\101\120\116")
    , tokNext "\92\39" (Arbitrary "\76\105\116\101\114\97\108" :. Arbitrary "\83\116\114\105\110\103") (GoTo string_squote')
    , tokNext "\92\34" (Arbitrary "\76\105\116\101\114\97\108" :. Arbitrary "\83\116\114\105\110\103") (GoTo string_dquote')
    , anyOf builtin_symbols'
    , anyOf numbers'
    , anyOf keywords'
    , anyOf punctuation'
    , anyOf operators'
    , anyOf valid_name'
    ]

string_dquote' :: TokenMatcher
string_dquote' =
    [ tokNext "\91\94\92\34\93\42\92\34" (Arbitrary "\76\105\116\101\114\97\108" :. Arbitrary "\83\116\114\105\110\103") Pop
    ]

operators' :: TokenMatcher
operators' =
    [ tok "\60\45\124\45\124\61\61\124\60\61\124\62\61\124\60\124\62\124\38\38\124\38\124\33\61\124\92\124\92\124\63" (Arbitrary "\79\112\101\114\97\116\111\114")
    , tok "\92\42\124\92\43\124\92\94\124\47\124\37\37\124\37\47\37\124\61" (Arbitrary "\79\112\101\114\97\116\111\114")
    , tok "\37\105\110\37\124\37\42\37" (Arbitrary "\79\112\101\114\97\116\111\114")
    ]

keywords' :: TokenMatcher
keywords' =
    [ tok "\102\111\114\40\63\61\92\115\42\92\40\41\124\119\104\105\108\101\40\63\61\92\115\42\92\40\41\124\105\102\40\63\61\92\115\42\92\40\41\124\40\63\60\61\92\115\41\101\108\115\101\124\40\63\60\61\92\115\41\98\114\101\97\107\40\63\61\59\124\36\41\124\114\101\116\117\114\110\40\63\61\92\115\42\92\40\41\124\102\117\110\99\116\105\111\110\40\63\61\92\115\42\92\40\41" (Arbitrary "\75\101\121\119\111\114\100" :. Arbitrary "\82\101\115\101\114\118\101\100")
    ]

numbers' :: TokenMatcher
numbers' =
    [ tok "\40\63\60\33\91\48\45\57\97\45\122\65\45\90\92\41\92\125\92\93\96\92\34\93\41\40\63\61\92\115\42\41\91\45\92\43\93\63\91\48\45\57\93\43\40\92\46\91\48\45\57\93\42\41\63\40\69\91\48\45\57\93\91\45\92\43\93\63\40\92\46\91\48\45\57\93\42\41\63\41\63" (Arbitrary "\76\105\116\101\114\97\108" :. Arbitrary "\78\117\109\98\101\114")
    , tok "\92\46\91\48\45\57\93\42\40\69\91\48\45\57\93\91\45\92\43\93\63\40\92\46\91\48\45\57\93\42\41\63\41\63" (Arbitrary "\76\105\116\101\114\97\108" :. Arbitrary "\78\117\109\98\101\114")
    ]

valid_name' :: TokenMatcher
valid_name' =
    [ tok "\91\97\45\122\65\45\90\93\91\48\45\57\97\45\122\65\45\90\92\46\95\93\43" (Arbitrary "\84\101\120\116")
    , tok "\96\46\43\96" (Arbitrary "\76\105\116\101\114\97\108" :. Arbitrary "\83\116\114\105\110\103" :. Arbitrary "\66\97\99\107\116\105\99\107")
    ]

builtin_symbols' :: TokenMatcher
builtin_symbols' =
    [ tok "\40\78\85\76\76\124\78\65\124\84\82\85\69\124\70\65\76\83\69\124\78\97\78\41\92\98" (Arbitrary "\75\101\121\119\111\114\100" :. Arbitrary "\67\111\110\115\116\97\110\116")
    , tok "\40\84\124\70\41\92\98" (Arbitrary "\75\101\121\119\111\114\100" :. Arbitrary "\86\97\114\105\97\98\108\101")
    ]

punctuation' :: TokenMatcher
punctuation' =
    [ tok "\92\91\124\92\93\124\92\91\92\91\124\92\93\92\93\124\92\36\124\92\40\124\92\41\124\64\124\58\58\58\63\124\59\124\44" (Arbitrary "\80\117\110\99\116\117\97\116\105\111\110")
    ]

comments' :: TokenMatcher
comments' =
    [ tok "\35\46\42\36" (Arbitrary "\67\111\109\109\101\110\116" :. Arbitrary "\83\105\110\103\108\101")
    ]

root' :: TokenMatcher
root' =
    [ anyOf statements'
    , tok "\92\123\124\92\125" (Arbitrary "\80\117\110\99\116\117\97\116\105\111\110")
    , tok "\46" (Arbitrary "\84\101\120\116")
    ]

string_squote' :: TokenMatcher
string_squote' =
    [ tokNext "\91\94\92\39\93\42\92\39" (Arbitrary "\76\105\116\101\114\97\108" :. Arbitrary "\83\116\114\105\110\103") Pop
    ]