Documentation
¶
Index ¶
- Variables
- func OptimizeAst(root ast.Node) ast.Node
- type Error
- type InfixFn
- type PipeParser
- type PostLexer
- func (p *PostLexer) All() []*tokens.Token
- func (p *PostLexer) EatToken() *tokens.Token
- func (p *PostLexer) Errors() []lexers.LexerError
- func (p *PostLexer) HasErrors() bool
- func (p *PostLexer) Next() (token *tokens.Token, eof bool)
- func (p *PostLexer) Optimize() error
- func (p *PostLexer) PeekToken() *tokens.Token
- func (p *PostLexer) PeekTokenAt(i int) *tokens.Token
- func (p *PostLexer) PrevToken() *tokens.Token
- func (p *PostLexer) PrevTokenAt(i int) *tokens.Token
- type PostfixFn
- type PreLexer
- type PrefixFn
- type Stack
Constants ¶
This section is empty.
Variables ¶
View Source
var ( T_EOF = tokens.EOF T_INVALID = tokens.INVALID T_EOE tokens.TokenType = "eoe" // end of expression T_KEYWORD tokens.TokenType = "keyword" T_OPERATOR tokens.TokenType = "operator" T_ASSIGNMENT tokens.TokenType = "assignment" T_IDENTIFIER tokens.TokenType = "identifier" T_NUMBER tokens.TokenType = "number" T_HEX_NUMBER tokens.TokenType = "hex_number" T_BIN_NUMBER tokens.TokenType = "bin_number" T_OCT_NUMBER tokens.TokenType = "oct_number" T_STRING tokens.TokenType = "string" T_BOOLEAN tokens.TokenType = "boolean" T_COMMENT tokens.TokenType = "comment" T_SPREAD tokens.TokenType = "spread" T_COMMA tokens.TokenType = "comma" T_UNWRAP tokens.TokenType = "unwrap" T_WRAP tokens.TokenType = "wrap" T_ACCESS tokens.TokenType = "access" T_LAMBDA tokens.TokenType = "lambda" T_PIPE tokens.TokenType = "pipe" T_LPAREN tokens.TokenType = "lparen" T_RPAREN tokens.TokenType = "rparen" T_LBRACK tokens.TokenType = "lbrack" T_RBRACK tokens.TokenType = "rbrack" T_LBRACE tokens.TokenType = "lbrace" T_RBRACE tokens.TokenType = "rbrace" )
View Source
var Assignments = []string{
":=",
"=",
"+=",
"-=",
"*=",
"/=",
"%=",
"^=",
"..=",
}
View Source
var InfixOperators = []string{
"+",
"-",
"*",
"/",
"%",
"^",
"==",
"!=",
"<",
">",
"<=",
">=",
"<=>",
"??",
"..",
"and",
"or",
"xor",
}
View Source
var Keywords = []string{
"if",
"else",
"for",
"with",
"match",
"return",
"raise",
"yield",
"break",
"continue",
"defer",
"true",
"false",
"data",
"fn",
"as",
"is",
"in",
}
View Source
var Operators = slices.Concat(PrefixOperators, InfixOperators)
View Source
var PrefixOperators = []string{
"+",
"-",
"not",
}
Functions ¶
Types ¶
type PipeParser ¶
type PipeParser struct { *parsers.BaseParser Lexer *PostLexer Log *log.Logger // contains filtered or unexported fields }
func NewPipeParser ¶
func NewPipeParser(lexer *PostLexer) *PipeParser
func (*PipeParser) ExpectTypes ¶
func (p *PipeParser) ExpectTypes(expected ...tokens.TokenType) bool
Checks if the next token is the given types.
func (*PipeParser) NotExpectTypes ¶
func (p *PipeParser) NotExpectTypes(unexpected ...tokens.TokenType) bool
Checks if the next token is the given types.
func (*PipeParser) Parse ¶
func (p *PipeParser) Parse() ast.Node
type PostLexer ¶
type PostLexer struct {
// contains filtered or unexported fields
}
func NewPostLexer ¶
func (*PostLexer) All ¶
OPTIMIZATION ---------------------------------------------------------------
func (*PostLexer) Errors ¶
func (p *PostLexer) Errors() []lexers.LexerError
INTERFACE ------------------------------------------------------------------
type PreLexer ¶
func NewPreLexer ¶
Source Files
¶
Click to show internal directories.
Click to hide internal directories.