internal

package
v0.0.0-...-b738e08 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 28, 2024 License: Apache-2.0 Imports: 11 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	T_EOF     = tokens.EOF
	T_INVALID = tokens.INVALID

	T_EOE        tokens.TokenType = "eoe" // end of expression
	T_KEYWORD    tokens.TokenType = "keyword"
	T_OPERATOR   tokens.TokenType = "operator"
	T_ASSIGNMENT tokens.TokenType = "assignment"
	T_IDENTIFIER tokens.TokenType = "identifier"
	T_NUMBER     tokens.TokenType = "number"
	T_HEX_NUMBER tokens.TokenType = "hex_number"
	T_BIN_NUMBER tokens.TokenType = "bin_number"
	T_OCT_NUMBER tokens.TokenType = "oct_number"
	T_STRING     tokens.TokenType = "string"
	T_BOOLEAN    tokens.TokenType = "boolean"
	T_COMMENT    tokens.TokenType = "comment"

	T_SPREAD tokens.TokenType = "spread"
	T_COMMA  tokens.TokenType = "comma"
	T_UNWRAP tokens.TokenType = "unwrap"
	T_WRAP   tokens.TokenType = "wrap"
	T_ACCESS tokens.TokenType = "access"
	T_LAMBDA tokens.TokenType = "lambda"
	T_PIPE   tokens.TokenType = "pipe"

	T_LPAREN tokens.TokenType = "lparen"
	T_RPAREN tokens.TokenType = "rparen"
	T_LBRACK tokens.TokenType = "lbrack"
	T_RBRACK tokens.TokenType = "rbrack"
	T_LBRACE tokens.TokenType = "lbrace"
	T_RBRACE tokens.TokenType = "rbrace"
)
View Source
var Assignments = []string{
	":=",
	"=",
	"+=",
	"-=",
	"*=",
	"/=",
	"%=",
	"^=",
	"..=",
}
View Source
var InfixOperators = []string{
	"+",
	"-",
	"*",
	"/",
	"%",
	"^",

	"==",
	"!=",
	"<",
	">",
	"<=",
	">=",
	"<=>",

	"??",
	"..",

	"and",
	"or",
	"xor",
}
View Source
var Keywords = []string{
	"if",
	"else",
	"for",
	"with",
	"match",

	"return",
	"raise",
	"yield",
	"break",
	"continue",
	"defer",

	"true",
	"false",

	"data",
	"fn",
	"as",
	"is",
	"in",
}
View Source
var PrefixOperators = []string{
	"+",
	"-",
	"not",
}

Functions

func OptimizeAst

func OptimizeAst(root ast.Node) ast.Node

Types

type Error

type Error struct {
	Message string
	Node    ast.Node
	Stack   []ast.Node
}

func NewError

func NewError(message string, stack []ast.Node) *Error

func (*Error) At

func (e *Error) At() (line, column int)

func (*Error) Error

func (e *Error) Error() string

func (*Error) Range

func (e *Error) Range() (fromLine, fromCol, toLine, toCol int)

type InfixFn

type InfixFn func(left ast.Node) ast.Node

type PipeParser

type PipeParser struct {
	*parsers.BaseParser
	Lexer *PostLexer
	Log   *log.Logger
	// contains filtered or unexported fields
}

func NewPipeParser

func NewPipeParser(lexer *PostLexer) *PipeParser

func (*PipeParser) ExpectTypes

func (p *PipeParser) ExpectTypes(expected ...tokens.TokenType) bool

Checks if the next token is the given types.

func (*PipeParser) NotExpectTypes

func (p *PipeParser) NotExpectTypes(unexpected ...tokens.TokenType) bool

Checks if the next token is the given types.

func (*PipeParser) Parse

func (p *PipeParser) Parse() ast.Node

type PostLexer

type PostLexer struct {
	// contains filtered or unexported fields
}

func NewPostLexer

func NewPostLexer(tokens []*tokens.Token) *PostLexer

func (*PostLexer) All

func (p *PostLexer) All() []*tokens.Token

OPTIMIZATION ---------------------------------------------------------------

func (*PostLexer) EatToken

func (p *PostLexer) EatToken() *tokens.Token

func (*PostLexer) Errors

func (p *PostLexer) Errors() []lexers.LexerError

INTERFACE ------------------------------------------------------------------

func (*PostLexer) HasErrors

func (p *PostLexer) HasErrors() bool

func (*PostLexer) Next

func (p *PostLexer) Next() (token *tokens.Token, eof bool)

func (*PostLexer) Optimize

func (p *PostLexer) Optimize() error

func (*PostLexer) PeekToken

func (p *PostLexer) PeekToken() *tokens.Token

func (*PostLexer) PeekTokenAt

func (p *PostLexer) PeekTokenAt(i int) *tokens.Token

func (*PostLexer) PrevToken

func (p *PostLexer) PrevToken() *tokens.Token

func (*PostLexer) PrevTokenAt

func (p *PostLexer) PrevTokenAt(i int) *tokens.Token

type PostfixFn

type PostfixFn func(left ast.Node) ast.Node

type PreLexer

type PreLexer struct {
	*lexers.BaseLexer
	// contains filtered or unexported fields
}

func NewPreLexer

func NewPreLexer(input []byte) *PreLexer

func (*PreLexer) EatChars

func (p *PreLexer) EatChars(n int) []tokens.Char

type PrefixFn

type PrefixFn func() ast.Node

type Stack

type Stack[T any] struct {
	// contains filtered or unexported fields
}

Used for general stack data structure

func NewStack

func NewStack[T any]() *Stack[T]

func (*Stack[T]) Len

func (s *Stack[T]) Len() int

func (*Stack[T]) Peek

func (s *Stack[T]) Peek() T

func (*Stack[T]) PeekOr

func (s *Stack[T]) PeekOr(v T) T

func (*Stack[T]) Pop

func (s *Stack[T]) Pop() T

func (*Stack[T]) Push

func (s *Stack[T]) Push(e T)

func (*Stack[T]) Set

func (s *Stack[T]) Set(e T)

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL