Versions in this module Expand all Collapse all v0 v0.2.0-alpha.1 Dec 16, 2021 v0.1.0 Nov 13, 2021 Changes in this version + const Char + const Comment + const EOF + const Float + const Ident + const Int + const RawString + const String + var AnyToken []int = nil + func Lexeme(token interface{}) string + func MakeToken(name string, id int) lex.Action + func Skip(*lex.Scanner, *machines.Match) (interface{}, error) + type DefaultTokenizer struct + Error func(error) + func GoTokenizer(sourceID string, input io.Reader, opts ...Option) *DefaultTokenizer + func (t *DefaultTokenizer) NextToken(exp []int) (int, interface{}, uint64, uint64) + func (t *DefaultTokenizer) SetErrorHandler(h func(error)) + type LMAdapter struct + Lexer *lex.Lexer + func NewLMAdapter(init func(*lex.Lexer), literals []string, keywords []string, ...) (*LMAdapter, error) + func (lm *LMAdapter) Scanner(input string) (*LMScanner, error) + type LMScanner struct + Error func(error) + func (lms *LMScanner) NextToken(expected []int) (int, interface{}, uint64, uint64) + func (lms *LMScanner) SetErrorHandler(h func(error)) + type Option func(p *DefaultTokenizer) + func SkipComments(b bool) Option + func UnifyStrings(b bool) Option + type Tokenizer interface + NextToken func(expected []int) (tokval int, token interface{}, start, len uint64) + SetErrorHandler func(func(error))