language-python-0.5.3: Parsing and pretty printing of Python code.

Copyright(c) 2009 Bernie Pope
LicenseBSD-style
Maintainerbjpop@csse.unimelb.edu.au
Stabilityexperimental
Portabilityghc
Safe HaskellSafe
LanguageHaskell98

Language.Python.Common.Token

Contents

Description

Lexical tokens for the Python lexer. Contains the superset of tokens from version 2 and version 3 of Python (they are mostly the same).

Synopsis

The tokens

data Token Source #

Lexical tokens.

Constructors

IndentToken

Indentation: increase.

Fields

DedentToken

Indentation: decrease.

Fields

NewlineToken

Newline.

Fields

LineJoinToken

Line join (backslash at end of line).

Fields

CommentToken

Single line comment.

IdentifierToken

Identifier.

StringToken

Literal: string.

ByteStringToken

Literal: byte string.

UnicodeStringToken

Literal: unicode string, version 2 only.

IntegerToken

Literal: integer.

LongIntegerToken

Literal: long integer. Version 2 only.

FloatToken

Literal: floating point.

ImaginaryToken

Literal: imaginary number.

DefToken

Keyword: 'def'.

Fields

WhileToken

Keyword: 'while'.

Fields

IfToken

Keyword: 'if'.

Fields

TrueToken

Keyword: 'True'.

Fields

FalseToken

Keyword: 'False'.

Fields

ReturnToken

Keyword: 'Return'.

Fields

TryToken

Keyword: 'try'.

Fields

ExceptToken

Keyword: 'except'.

Fields

RaiseToken

Keyword: 'raise'.

Fields

InToken

Keyword: 'in'.

Fields

IsToken

Keyword: 'is'.

Fields

LambdaToken

Keyword: 'lambda'.

Fields

ClassToken

Keyword: 'class'.

Fields

FinallyToken

Keyword: 'finally'.

Fields

NoneToken

Keyword: 'None'.

Fields

ForToken

Keyword: 'for'.

Fields

FromToken

Keyword: 'from'.

Fields

GlobalToken

Keyword: 'global'.

Fields

WithToken

Keyword: 'with'.

Fields

AsToken

Keyword: 'as'.

Fields

ElifToken

Keyword: 'elif'.

Fields

YieldToken

Keyword: 'yield'.

Fields

AssertToken

Keyword: 'assert'.

Fields

ImportToken

Keyword: 'import'.

Fields

PassToken

Keyword: 'pass'.

Fields

BreakToken

Keyword: 'break'.

Fields

ContinueToken

Keyword: 'continue'.

Fields

DeleteToken

Keyword: 'del'.

Fields

ElseToken

Keyword: 'else'.

Fields

NotToken

Keyword: 'not'.

Fields

AndToken

Keyword: boolean conjunction 'and'.

Fields

OrToken

Keyword: boolean disjunction 'or'. Version 3.x only:

Fields

NonLocalToken

Keyword: 'nonlocal' (Python 3.x only) Version 2.x only:

Fields

PrintToken

Keyword: 'print'. (Python 2.x only)

Fields

ExecToken

Keyword: 'exec'. (Python 2.x only)

Fields

AtToken

Delimiter: at sign '@'.

Fields

LeftRoundBracketToken

Delimiter: left round bracket '('.

Fields

RightRoundBracketToken

Delimiter: right round bracket ')'.

Fields

LeftSquareBracketToken

Delimiter: left square bracket '['.

Fields

RightSquareBracketToken

Delimiter: right square bracket ']'.

Fields

LeftBraceToken

Delimiter: left curly bracket '{'.

Fields

RightBraceToken

Delimiter: right curly bracket '}'.

Fields

DotToken

Delimiter: dot (full stop) '.'.

Fields

CommaToken

Delimiter: comma ','.

Fields

SemiColonToken

Delimiter: semicolon ';'.

Fields

ColonToken

Delimiter: colon ':'.

Fields

EllipsisToken

Delimiter: ellipses (three dots) '...'.

Fields

RightArrowToken

Delimiter: right facing arrow '->'.

Fields

AssignToken

Delimiter: assignment '='.

Fields

PlusAssignToken

Delimiter: plus assignment '+='.

Fields

MinusAssignToken

Delimiter: minus assignment '-='.

Fields

MultAssignToken

Delimiter: multiply assignment '*='

Fields

DivAssignToken

Delimiter: divide assignment '/='.

Fields

ModAssignToken

Delimiter: modulus assignment '%='.

Fields

PowAssignToken

Delimiter: power assignment '**='.

Fields

BinAndAssignToken

Delimiter: binary-and assignment '&='.

Fields

BinOrAssignToken

Delimiter: binary-or assignment '|='.

Fields

BinXorAssignToken

Delimiter: binary-xor assignment '^='.

Fields

LeftShiftAssignToken

Delimiter: binary-left-shift assignment '<<='.

Fields

RightShiftAssignToken

Delimiter: binary-right-shift assignment '>>='.

Fields

FloorDivAssignToken

Delimiter: floor-divide assignment '//='.

Fields

BackQuoteToken

Delimiter: back quote character '`\'.

Fields

PlusToken

Operator: plus '+'.

Fields

MinusToken

Operator: minus: '-'.

Fields

MultToken

Operator: multiply '*'.

Fields

DivToken

Operator: divide '/'.

Fields

GreaterThanToken

Operator: greater-than '>'.

Fields

LessThanToken

Operator: less-than '<'.

Fields

EqualityToken

Operator: equals '=='.

Fields

GreaterThanEqualsToken

Operator: greater-than-or-equals '>='.

Fields

LessThanEqualsToken

Operator: less-than-or-equals '<='.

Fields

ExponentToken

Operator: exponential '**'.

Fields

BinaryOrToken

Operator: binary-or '|'.

Fields

XorToken

Operator: binary-xor '^'.

Fields

BinaryAndToken

Operator: binary-and '&'.

Fields

ShiftLeftToken

Operator: binary-shift-left '<<'.

Fields

ShiftRightToken

Operator: binary-shift-right '>>'.

Fields

ModuloToken

Operator: modulus '%'.

Fields

FloorDivToken

Operator: floor-divide '//'.

Fields

TildeToken

Operator: tilde '~'.

Fields

NotEqualsToken

Operator: not-equals '!='.

Fields

NotEqualsV2Token

Operator: not-equals '<>'. Version 2 only.

Fields

EOFToken

End of file

Fields

Instances

Eq Token Source # 

Methods

(==) :: Token -> Token -> Bool #

(/=) :: Token -> Token -> Bool #

Data Token Source # 

Methods

gfoldl :: (forall d b. Data d => c (d -> b) -> d -> c b) -> (forall g. g -> c g) -> Token -> c Token #

gunfold :: (forall b r. Data b => c (b -> r) -> c r) -> (forall r. r -> c r) -> Constr -> c Token #

toConstr :: Token -> Constr #

dataTypeOf :: Token -> DataType #

dataCast1 :: Typeable (* -> *) t => (forall d. Data d => c (t d)) -> Maybe (c Token) #

dataCast2 :: Typeable (* -> * -> *) t => (forall d e. (Data d, Data e) => c (t d e)) -> Maybe (c Token) #

gmapT :: (forall b. Data b => b -> b) -> Token -> Token #

gmapQl :: (r -> r' -> r) -> r -> (forall d. Data d => d -> r') -> Token -> r #

gmapQr :: (r' -> r -> r) -> r -> (forall d. Data d => d -> r') -> Token -> r #

gmapQ :: (forall d. Data d => d -> u) -> Token -> [u] #

gmapQi :: Int -> (forall d. Data d => d -> u) -> Token -> u #

gmapM :: Monad m => (forall d. Data d => d -> m d) -> Token -> m Token #

gmapMp :: MonadPlus m => (forall d. Data d => d -> m d) -> Token -> m Token #

gmapMo :: MonadPlus m => (forall d. Data d => d -> m d) -> Token -> m Token #

Ord Token Source # 

Methods

compare :: Token -> Token -> Ordering #

(<) :: Token -> Token -> Bool #

(<=) :: Token -> Token -> Bool #

(>) :: Token -> Token -> Bool #

(>=) :: Token -> Token -> Bool #

max :: Token -> Token -> Token #

min :: Token -> Token -> Token #

Show Token Source # 

Methods

showsPrec :: Int -> Token -> ShowS #

show :: Token -> String #

showList :: [Token] -> ShowS #

Span Token Source # 

String conversion

debugTokenString :: Token -> String Source #

Produce a string from a token containing detailed information. Mainly intended for debugging.

tokenString :: Token -> String Source #

Produce a string from a token which is suitable for printing as Python concrete syntax. Invisible tokens yield an empty string.

Classification

hasLiteral :: Token -> Bool Source #

Test if a token contains its literal source text.