-- This Happy file was machine-generated by the BNF converter { {-# OPTIONS -fno-warn-incomplete-patterns -fno-warn-overlapping-patterns #-} module GFCC.Par where import GFCC.Abs import GFCC.Lex import GFCC.ErrM } %name pGrammar Grammar %name pHeader Header %name pAbstract Abstract %name pConcrete Concrete %name pAbsDef AbsDef %name pCncDef CncDef %name pType Type %name pExp Exp %name pAtom Atom %name pTerm Term %name pTokn Tokn %name pVariant Variant %name pListConcrete ListConcrete %name pListAbsDef ListAbsDef %name pListCncDef ListCncDef %name pListCId ListCId %name pListTerm ListTerm %name pListExp ListExp %name pListString ListString %name pListVariant ListVariant -- no lexer declaration %monad { Err } { thenM } { returnM } %tokentype { Token } %token ';' { PT _ (TS ";") } '(' { PT _ (TS "(") } ')' { PT _ (TS ")") } '{' { PT _ (TS "{") } '}' { PT _ (TS "}") } ':' { PT _ (TS ":") } '=' { PT _ (TS "=") } '->' { PT _ (TS "->") } '?' { PT _ (TS "?") } '[' { PT _ (TS "[") } ']' { PT _ (TS "]") } '!' { PT _ (TS "!") } '$' { PT _ (TS "$") } '[|' { PT _ (TS "[|") } '|]' { PT _ (TS "|]") } '+' { PT _ (TS "+") } '@' { PT _ (TS "@") } '#' { PT _ (TS "#") } '/' { PT _ (TS "/") } ',' { PT _ (TS ",") } 'abstract' { PT _ (TS "abstract") } 'concrete' { PT _ (TS "concrete") } 'grammar' { PT _ (TS "grammar") } 'pre' { PT _ (TS "pre") } L_quoted { PT _ (TL $$) } L_integ { PT _ (TI $$) } L_doubl { PT _ (TD $$) } L_CId { PT _ (T_CId $$) } L_err { _ } %% String :: { String } : L_quoted { $1 } Integer :: { Integer } : L_integ { (read $1) :: Integer } Double :: { Double } : L_doubl { (read $1) :: Double } CId :: { CId} : L_CId { CId ($1)} Grammar :: { Grammar } Grammar : Header ';' Abstract ';' ListConcrete { Grm $1 $3 (reverse $5) } Header :: { Header } Header : 'grammar' CId '(' ListCId ')' { Hdr $2 $4 } Abstract :: { Abstract } Abstract : 'abstract' '{' ListAbsDef '}' { Abs (reverse $3) } Concrete :: { Concrete } Concrete : 'concrete' CId '{' ListCncDef '}' { Cnc $2 (reverse $4) } AbsDef :: { AbsDef } AbsDef : CId ':' Type '=' Exp { Fun $1 $3 $5 } CncDef :: { CncDef } CncDef : CId '=' Term { Lin $1 $3 } Type :: { Type } Type : ListCId '->' CId { Typ $1 $3 } Exp :: { Exp } Exp : '(' Atom ListExp ')' { Tr $2 (reverse $3) } | Atom { trA_ $1 } Atom :: { Atom } Atom : CId { AC $1 } | String { AS $1 } | Integer { AI $1 } | Double { AF $1 } | '?' { AM } Term :: { Term } Term : '[' ListTerm ']' { R $2 } | '(' Term '!' Term ')' { P $2 $4 } | '(' ListTerm ')' { S $2 } | Tokn { K $1 } | '$' Integer { V $2 } | Integer { C $1 } | CId { F $1 } | '[|' ListTerm '|]' { FV $2 } | '(' String '+' Term ')' { W $2 $4 } | '(' Term '@' Term ')' { RP $2 $4 } | '?' { TM } | '(' CId '->' Term ')' { L $2 $4 } | '#' CId { BV $2 } Tokn :: { Tokn } Tokn : String { KS $1 } | '[' 'pre' ListString '[' ListVariant ']' ']' { KP (reverse $3) $5 } Variant :: { Variant } Variant : ListString '/' ListString { Var (reverse $1) (reverse $3) } ListConcrete :: { [Concrete] } ListConcrete : {- empty -} { [] } | ListConcrete Concrete ';' { flip (:) $1 $2 } ListAbsDef :: { [AbsDef] } ListAbsDef : {- empty -} { [] } | ListAbsDef AbsDef ';' { flip (:) $1 $2 } ListCncDef :: { [CncDef] } ListCncDef : {- empty -} { [] } | ListCncDef CncDef ';' { flip (:) $1 $2 } ListCId :: { [CId] } ListCId : {- empty -} { [] } | CId { (:[]) $1 } | CId ',' ListCId { (:) $1 $3 } ListTerm :: { [Term] } ListTerm : {- empty -} { [] } | Term { (:[]) $1 } | Term ',' ListTerm { (:) $1 $3 } ListExp :: { [Exp] } ListExp : {- empty -} { [] } | ListExp Exp { flip (:) $1 $2 } ListString :: { [String] } ListString : {- empty -} { [] } | ListString String { flip (:) $1 $2 } ListVariant :: { [Variant] } ListVariant : {- empty -} { [] } | Variant { (:[]) $1 } | Variant ',' ListVariant { (:) $1 $3 } { returnM :: a -> Err a returnM = return thenM :: Err a -> (a -> Err b) -> Err b thenM = (>>=) happyError :: [Token] -> Err a happyError ts = Bad $ "syntax error at " ++ tokenPos ts ++ case ts of [] -> [] [Err _] -> " due to lexer error" _ -> " before " ++ unwords (map prToken (take 4 ts)) myLexer = tokens trA_ a_ = Tr a_ [] }