Documentation
¶
Index ¶
- Constants
- Variables
- func Code(g Grammar, t Tokener) string
- func FileLine(doc []Token, t Tokener) (string, int)
- func List(a, b Grammar) *grList
- func List0(a, b Grammar) *grList
- func Repeat1(g Grammar) *grRepeat1
- func Source(doc []Token, t Tokener) (text []byte)
- type AutoKwScanner
- type CompileRet
- type Compiler
- type Context
- type GrNamed
- type GrVar
- type Grammar
- func Action(g Grammar, act func(tokens []Token, g Grammar)) Grammar
- func And(gs ...Grammar) Grammar
- func Clone(gs []Grammar) []Grammar
- func Gr(tok uint) Grammar
- func Not(g Grammar) Grammar
- func Or(gs ...Grammar) Grammar
- func Peek(g Grammar) Grammar
- func Repeat0(g Grammar) Grammar
- func Repeat01(g Grammar) Grammar
- func Transaction(g Grammar, begin func() interface{}, end func(trans interface{}, err error)) Grammar
- type MatchError
- type Matcher
- func (p *Matcher) Code() string
- func (p *Matcher) Eval(src string) (err error)
- func (p *Matcher) Match(src []byte, fname string) (next []Token, err error)
- func (p *Matcher) MatchExactly(src []byte, fname string) (err error)
- func (p *Matcher) Tokener() Tokener
- func (p *Matcher) Tokenize(src []byte, fname string) (tokens []Token, err error)
- type ScanErrorHandler
- type ScanMode
- type Scanner
- type Token
- type TokenSource
- type Tokener
- type TokenizeError
Constants ¶
const ( ILLEGAL uint = iota EOF COMMENT IDENT // main INT // 12345 FLOAT // 123.45 IMAG // 123.45i CHAR // 'a' STRING // "abc" ADD = '+' SUB = '-' MUL = '*' QUO = '/' REM = '%' AND = '&' OR = '|' XOR = '^' LT = '<' GT = '>' ASSIGN = '=' NOT = '!' LPAREN = '(' LBRACK = '[' LBRACE = '{' COMMA = ',' PERIOD = '.' RPAREN = ')' RBRACK = ']' RBRACE = '}' SEMICOLON = ';' COLON = ':' QUESTION = '?' TILDE = '~' AT = '@' )
const ( SHL uint // << SHR // >> AND_NOT // &^ ADD_ASSIGN // += SUB_ASSIGN // -= MUL_ASSIGN // *= QUO_ASSIGN // /= REM_ASSIGN // %= AND_ASSIGN // &= OR_ASSIGN // |= XOR_ASSIGN // ^= SHL_ASSIGN // <<= SHR_ASSIGN // >>= AND_NOT_ASSIGN // &^= LAND // && LOR // || ARROW // <- INC // ++ DEC // -- EQ // == NE // != LE // <= GE // >= DEFINE // := ELLIPSIS // ... USER_TOKEN_BEGIN uint = 0xb0 )
Variables ¶
var ( ErrVarNotAssigned = errors.New("variable is not assigned") ErrVarAssigned = errors.New("variable is already assigned") )
var (
ErrNoDoc = errors.New("no doc")
)
var (
ErrNoGrammar = errors.New("no grammar")
)
Functions ¶
Types ¶
type AutoKwScanner ¶
type AutoKwScanner struct { Scanner // contains filtered or unexported fields }
func (*AutoKwScanner) Init ¶
func (p *AutoKwScanner) Init(file *token.File, src []byte, err ScanErrorHandler, mode ScanMode)
func (*AutoKwScanner) Ltot ¶
func (p *AutoKwScanner) Ltot(lit string) (tok uint)
func (*AutoKwScanner) Scan ¶
func (p *AutoKwScanner) Scan() (t Token)
func (*AutoKwScanner) Ttol ¶
func (p *AutoKwScanner) Ttol(tok uint) (lit string)
type CompileRet ¶
func (CompileRet) EvalSub ¶
func (p CompileRet) EvalSub(name string, src interface{}) error
type Compiler ¶
type Compiler struct { Grammar []byte Marker func(g Grammar, mark string) Grammar Init func() Scanner Tokener ScanMode ScanMode }
func (*Compiler) Cl ¶
func (p *Compiler) Cl() (ret CompileRet, err error)
term = factor *(
'%' factor/list | "%=" factor/list0 | '/' IDENT/mark )
expr = +(term | '!'/nil)/and
grammar = expr % '|'/or
doc = +((IDENT '=' grammar ';')/assign)
factor =
IDENT/ident | CHAR/gr | STRING/gr | INT/true | '*' factor/repeat0 | '+' factor/repeat1 | '?' factor/repeat01 | '~' factor/not | '@' factor/peek | '(' grammar ')'
type Grammar ¶
type Grammar interface { Match(src []Token, ctx Context) (n int, err error) Marshal(b []byte, t Tokener, lvlParent int) []byte Len() int // 如果这个Grammar不是array型的,返回-1 }
var GrEOF Grammar = grEOF{}
var GrTrue Grammar = grTrue{}
func Transaction ¶
type MatchError ¶
func (*MatchError) Error ¶
func (p *MatchError) Error() string
type ScanErrorHandler ¶
An ScanErrorHandler may be provided to Scanner.Init. If a syntax error is encountered and a handler was installed, the handler is called with a position and an error message. The position points to the beginning of the offending token.
type ScanMode ¶
type ScanMode uint
A ScanMode value is a set of flags (or 0). They control scanner behavior.
type Scanner ¶
type Scanner struct { // public state - ok to modify ErrorCount int // number of errors encountered // contains filtered or unexported fields }
A Scanner holds the scanner's internal state while processing a given text. It can be allocated as part of another data structure but must be initialized via Init before use.
func (*Scanner) Init ¶
Init prepares the scanner s to tokenize the text src by setting the scanner at the beginning of src. The scanner uses the file set file for position information and it adds line information for each line. It is ok to re-use the same file when re-scanning the same file as line information which is already present is ignored. Init causes a panic if the file size does not match the src size.
Calls to Scan will invoke the error handler err if they encounter a syntax error and err is not nil. Also, for each error encountered, the Scanner field ErrorCount is incremented by one. The mode parameter determines how comments are handled.
Note that Init may call err if there is an error in the first character of the file.
func (*Scanner) Scan ¶
Scan scans the next token and returns the token position, the token, and its literal string if applicable. The source end is indicated by token.EOF.
If the returned token is a literal (IDENT, INT, FLOAT, IMAG, CHAR, STRING) or COMMENT, the literal string has the corresponding value.
If the returned token is SEMICOLON, the corresponding literal string is ";" if the semicolon was present in the source, and "\n" if the semicolon was inserted because of a newline or at EOF.
If the returned token is ILLEGAL, the literal string is the offending character.
In all other cases, Scan returns an empty literal string.
For more tolerant parsing, Scan will return a valid token if possible even if a syntax error was encountered. Thus, even if the resulting token sequence contains no illegal tokens, a client may not assume that no error occurred. Instead it must check the scanner's ErrorCount or the number of calls of the error handler, if there was one installed.
Scan adds line information to the file added to the file set with Init. Token positions are relative to that file and thus relative to the file set.
type TokenSource ¶
type TokenizeError ¶
func (*TokenizeError) Error ¶
func (p *TokenizeError) Error() string