Documentation
¶
Index ¶
- Variables
- func EndsWith(ts []Token, pat []Token) bool
- func HasPattern(ts []Token, pat []TokenType) bool
- func Literals(ts []Token) string
- func Split(ts []Token, tt TokenType) [][]Token
- func SplitOnFirst(ts []Token, tt TokenType) ([]Token, []Token)
- func StartsWith(ts []Token, pat []Token) bool
- type Lexer
- type Token
- type TokenReader
- type TokenStream
- type TokenType
Constants ¶
This section is empty.
Variables ¶
View Source
var ( TOKEN_EOF = Token{Type: TYPE_EOF} TOKEN_EOS = Token{Type: TYPE_EOS} TOKEN_NEWLINE = Token{Type: TYPE_NEWLINE, Literal: "\n"} TOKEN_DOUBLE_NEWLINE = Token{Type: TYPE_NEWLINE, Literal: "\n\n"} TOKEN_ASTERISK = Token{Type: TYPE_ASTERISK, Literal: "*"} )
View Source
var TokenTypeMap = map[TokenType][]rune{ TYPE_EOF: {eof}, TYPE_WHITESPACE: {' ', '\t'}, TYPE_NEWLINE: {'\n'}, TYPE_EQUALSIGN: {'='}, TYPE_BACKTICK: {'`'}, TYPE_DASH: {'-'}, TYPE_COLON: {':'}, TYPE_ASTERISK: {'*'}, TYPE_UNDERSCORE: {'_'}, TYPE_BRACKETOPEN: {'['}, TYPE_BRACKETCLOSE: {']'}, }
Functions ¶
Types ¶
type Lexer ¶
type Lexer struct {
// contains filtered or unexported fields
}
type Token ¶
type TokenReader ¶
type TokenReader struct {
// contains filtered or unexported fields
}
func NewTokenReader ¶
func NewTokenReader(in chan Token) *TokenReader
func (*TokenReader) Discard ¶
func (tr *TokenReader) Discard()
func (*TokenReader) Read ¶
func (tr *TokenReader) Read(n int) ([]Token, bool)
func (*TokenReader) Unread ¶
func (tr *TokenReader) Unread(n int) bool
type TokenStream ¶
type TokenStream struct {
// contains filtered or unexported fields
}
func NewTokenStream ¶
func NewTokenStream(toks []Token) *TokenStream
func (*TokenStream) Out ¶
func (s *TokenStream) Out() chan Token
Click to show internal directories.
Click to hide internal directories.