module Text.Highlighter.Lexers.Cython (lexer) where import Text.Regex.PCRE.Light import Text.Highlighter.Types lexer :: Lexer lexer = Lexer { lName = "Cython" , lAliases = ["cython", "pyx"] , lExtensions = [".pyx", ".pxd", ".pxi"] , lMimetypes = ["text/x-cython", "application/x-cython"] , lStart = root' , lFlags = [multiline] } tsqs' :: TokenMatcher tsqs' = [ tokNext "'''" (Arbitrary "Literal" :. Arbitrary "String") Pop , anyOf strings' , anyOf nl' ] builtins' :: TokenMatcher builtins' = [ tok "(?)" (ByGroups [(Arbitrary "Punctuation"), (Arbitrary "Keyword" :. Arbitrary "Type"), (Arbitrary "Punctuation")]) , tok "!=|==|<<|>>|[-\126+/*%=<>&^|.?]" (Arbitrary "Operator") , tok "(from)(\\d+)(<=)(\\s+)(<)(\\d+)(:)" (ByGroups [(Arbitrary "Keyword"), (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Integer"), (Arbitrary "Operator"), (Arbitrary "Name"), (Arbitrary "Operator"), (Arbitrary "Name"), (Arbitrary "Punctuation")]) , anyOf keywords' , tokNext "(def|property)(\\s+)" (ByGroups [(Arbitrary "Keyword"), (Arbitrary "Text")]) (GoTo funcname') , tokNext "(cp?def)(\\s+)" (ByGroups [(Arbitrary "Keyword"), (Arbitrary "Text")]) (GoTo cdef') , tokNext "(class|struct)(\\s+)" (ByGroups [(Arbitrary "Keyword"), (Arbitrary "Text")]) (GoTo classname') , tokNext "(from)(\\s+)" (ByGroups [(Arbitrary "Keyword"), (Arbitrary "Text")]) (GoTo fromimport') , tokNext "(c?import)(\\s+)" (ByGroups [(Arbitrary "Keyword"), (Arbitrary "Text")]) (GoTo import') , anyOf builtins' , anyOf backtick' , tokNext "(?:[rR]|[uU][rR]|[rR][uU])\"\"\"" (Arbitrary "Literal" :. Arbitrary "String") (GoTo tdqs') , tokNext "(?:[rR]|[uU][rR]|[rR][uU])'''" (Arbitrary "Literal" :. Arbitrary "String") (GoTo tsqs') , tokNext "(?:[rR]|[uU][rR]|[rR][uU])\"" (Arbitrary "Literal" :. Arbitrary "String") (GoTo dqs') , tokNext "(?:[rR]|[uU][rR]|[rR][uU])'" (Arbitrary "Literal" :. Arbitrary "String") (GoTo sqs') , tokNext "[uU]?\"\"\"" (Arbitrary "Literal" :. Arbitrary "String") (Combined [stringescape', tdqs']) , tokNext "[uU]?'''" (Arbitrary "Literal" :. Arbitrary "String") (Combined [stringescape', tsqs']) , tokNext "[uU]?\"" (Arbitrary "Literal" :. Arbitrary "String") (Combined [stringescape', dqs']) , tokNext "[uU]?'" (Arbitrary "Literal" :. Arbitrary "String") (Combined [stringescape', sqs']) , anyOf name' , anyOf numbers' ] strings' :: TokenMatcher strings' = [ tok "%(\\([a-zA-Z0-9]+\\))?[-#0 +]*([0-9]+|[*])?(\\.([0-9]+|[*]))?[hlL]?[diouxXeEfFgGcrs%]" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Interpol") , tok "[^\\\\\\'\"%\\n]+" (Arbitrary "Literal" :. Arbitrary "String") , tok "[\\'\"\\\\]" (Arbitrary "Literal" :. Arbitrary "String") , tok "%" (Arbitrary "Literal" :. Arbitrary "String") ] fromimport' :: TokenMatcher fromimport' = [ tokNext "(\\s+)(c?import)\\b" (ByGroups [(Arbitrary "Text"), (Arbitrary "Keyword")]) Pop , tok "[a-zA-Z_.][a-zA-Z0-9_.]*" (Arbitrary "Name" :. Arbitrary "Namespace") , tokNext "" (Arbitrary "Text") Pop ]