Documentation ¶
Overview ¶
AUTOGENERATED. DO NOT EDIT.
Index ¶
- Constants
- Variables
- func AssertCodeErrors(t *testing.T, errString, code string) bool
- func AssertCodeEvalsToGrammar(t *testing.T, expected parser.Grammar, code string)
- func AssertCodeEvalsToType(t *testing.T, expected interface{}, code string) bool
- func AssertCodePanics(t *testing.T, code string) bool
- func AssertCodesEvalToSameValue(t *testing.T, expected, code string) bool
- func AssertEvalExprString(t *testing.T, expected, source string) bool
- func AssertScan(t *testing.T, l *Lexer, tok Token, intf interface{}, lexeme string) bool
- func Compile(filepath, source string) (_ rel.Expr, err error)
- func EvalWithScope(path, source string, scope rel.Scope) (rel.Value, error)
- func EvaluateExpr(path, source string) (rel.Value, error)
- func FixFuncs() (rel.Value, rel.Value)
- func GetBuildInfo(version, date, fullCommit, tags, os, arch, goVersion string) rel.Value
- func MustCompile(filePath, source string) rel.Expr
- func NewPackageExpr(scanner parser.Scanner, a rel.Expr) rel.Expr
- func ParseArraiString(lexeme []byte) string
- func PrettifyString(val rel.Value, indentsNum int) (string, error)
- func RequireCodesEvalToSameValue(t *testing.T, expected string, code string)
- func StdScope() rel.Scope
- func TokenRepr(token Token) string
- type Enumerable
- type FilePos
- type FileRange
- type Lexer
- func (l *Lexer) Data() interface{}
- func (l *Lexer) Error() error
- func (l *Lexer) Fail(err error) Token
- func (l *Lexer) Failf(fmtStr string, args ...interface{}) Token
- func (l *Lexer) FileRange() FileRange
- func (l *Lexer) InState(state lexerState, f func())
- func (l *Lexer) Lexeme() []byte
- func (l *Lexer) Offset() int
- func (l *Lexer) Peek() Token
- func (l *Lexer) PopState()
- func (l *Lexer) PushState(state lexerState)
- func (l *Lexer) Reader() io.Reader
- func (l *Lexer) Scan(expected ...Token) bool
- func (l *Lexer) ScanOperator(operatorsRe *regexp.Regexp) (Token, interface{})
- func (l *Lexer) ScanOperatorOrSymbol(operatorsRe *regexp.Regexp, symbols []LexerSymbol) (Token, interface{})
- func (l *Lexer) ScanSymbol(symbols []LexerSymbol) (Token, interface{})
- func (l *Lexer) String() string
- func (l *Lexer) Tail() []byte
- func (l *Lexer) Token() Token
- func (l *Lexer) Value() rel.Value
- type LexerSymbol
- type Macro
- type MacroValue
- type PackageExpr
- type ParseContext
- func (pc ParseContext) CompileExpr(b ast.Branch) rel.Expr
- func (pc ParseContext) MustParse(s *parser.Scanner) ast.Branch
- func (pc ParseContext) MustParseString(s string) ast.Branch
- func (pc ParseContext) Parse(s *parser.Scanner) (ast.Branch, error)
- func (pc ParseContext) ParseString(s string) (ast.Branch, error)
- type Token
Constants ¶
const NoPath = "\000"
Variables ¶
var BuildInfo rel.Value
BuildInfo represents arr.ai build information.
var RunOmitted = false
Functions ¶
func AssertCodeErrors ¶
AssertCodeErrors asserts that code fails with a certain message when executed.
func AssertCodeEvalsToGrammar ¶ added in v0.73.0
AssertCodeEvalsToGrammar asserts that code evaluates to a grammar equal to expected.
func AssertCodeEvalsToType ¶
AssertCodeEvalsToType asserts that code evaluates to the same type as expected.
func AssertCodePanics ¶
AssertCodePanics asserts that code panics when executed. TODO: Remove this. Should only intentionally panic for implementation bugs.
func AssertCodesEvalToSameValue ¶
AssertCodesEvalToSameValue asserts that code evaluate to the same value as expected.
func AssertEvalExprString ¶ added in v0.3.0
AssertEvalExprString asserts Expr string.
func AssertScan ¶
AssertScan asserts that a lexer's next produced token is as expected.
func GetBuildInfo ¶ added in v0.103.0
GetBuildInfo returns arr.ai build information.
func MustCompile ¶
func NewPackageExpr ¶
NewPackageExpr evaluates to !a.
func ParseArraiString ¶
ParseArraiString parses an arr.ai string.
func PrettifyString ¶ added in v0.102.0
PrettifyString returns a string which represents `rel.Value` with more reabable format. For example, `{b: 2, a: 1, c: (a: 2, b: {aa: {bb: (a: 22, d: {3, 1, 2})}})}` is formatted to:
{ b: 2, a: 1, c: ( a: 2, b: { aa: { bb: ( a: 22, d: {3, 1, 2} ) } } ) }
func RequireCodesEvalToSameValue ¶
RequireCodesEvalToSameValue requires that code evaluates to the same value as expected.
Types ¶
type Enumerable ¶ added in v0.117.0
type Enumerable interface {
ArrayEnumerator() (rel.OffsetValueEnumerator, bool)
}
type FilePos ¶
FilePos represents a position in a line-based file.
type FileRange ¶
FileRange represents a range of characters in a line-based.
type Lexer ¶
type Lexer struct {
// contains filtered or unexported fields
}
Lexer extracts a stream of tokens from an input file.
func NewLexerWithPrefix ¶
NewLexerWithPrefix returns a new Lexer for the given input.
func NewStringLexer ¶
NewStringLexer returns a new Lexer for the given input.
func (*Lexer) InState ¶
func (l *Lexer) InState(state lexerState, f func())
InState wraps a lambda in PushState()/PopState().
func (*Lexer) Offset ¶
Offset returns the current scanning position as an offset from the start of the input.
func (*Lexer) Peek ¶
Peek peeks at the next token. First scans the next token if Peek() has not been called since the last call to Lex().
func (*Lexer) PopState ¶
func (l *Lexer) PopState()
PopState pops the current state off the stack and makes it current.
func (*Lexer) PushState ¶
func (l *Lexer) PushState(state lexerState)
PushState pushes the current state onto the stack, and makes the given state current.
func (*Lexer) Scan ¶
Scan scans the expected tokens, otherwise stays put. If no expected tokens are given, scans any token. Returns true iff a token was scanned.
func (*Lexer) ScanOperator ¶
ScanOperator tries to recognise an operator or returns NULL.
func (*Lexer) ScanOperatorOrSymbol ¶
func (l *Lexer) ScanOperatorOrSymbol( operatorsRe *regexp.Regexp, symbols []LexerSymbol, ) (Token, interface{})
ScanOperatorOrSymbol tries to scan an operator or a symbol, or returns NULL.
func (*Lexer) ScanSymbol ¶
func (l *Lexer) ScanSymbol(symbols []LexerSymbol) (Token, interface{})
ScanSymbol tries to scan each given symbol or returns NULL.
func (*Lexer) String ¶
String produces a formatted string representation of the lexer with a line marker.
type LexerSymbol ¶
type LexerSymbol struct {
// contains filtered or unexported fields
}
LexerSymbol is a convenience structure for defining token recognition regexes and handler functions.
type Macro ¶ added in v0.73.0
type Macro struct {
// contains filtered or unexported fields
}
Macro represents the metadata of a macro invocation: the grammar and rule to parse with, and the transform to apply to the parsed body.
type MacroValue ¶ added in v0.73.0
MacroValue is an Extra node with an Expr value and a Scanner for the macro source.
func NewMacroValue ¶ added in v0.73.0
func NewMacroValue(expr rel.Expr, scanner parser.Scanner) MacroValue
NewMacroValue returns a MacroValue with a given Expr and Scanner.
func (MacroValue) Scanner ¶ added in v0.73.0
func (m MacroValue) Scanner() parser.Scanner
Scanner returns a scanner of the source that was replaced by the macro.
func (MacroValue) SubExpr ¶ added in v0.73.0
func (m MacroValue) SubExpr() rel.Expr
SubExpr returns the Expr resulting from evaluating the macro.
type PackageExpr ¶
type PackageExpr struct { rel.ExprScanner // contains filtered or unexported fields }
PackageExpr represents a range of operators.
func (PackageExpr) String ¶
func (e PackageExpr) String() string
String returns a string representation of the expression.
type ParseContext ¶
type ParseContext struct {
SourceDir string
}
func (ParseContext) CompileExpr ¶
func (pc ParseContext) CompileExpr(b ast.Branch) rel.Expr
func (ParseContext) MustParse ¶
func (pc ParseContext) MustParse(s *parser.Scanner) ast.Branch
MustParse parses input and returns the parsed Expr or panics.
func (ParseContext) MustParseString ¶
func (pc ParseContext) MustParseString(s string) ast.Branch
MustParseString parses input string and returns the parsed Expr or panics.
func (ParseContext) ParseString ¶
func (pc ParseContext) ParseString(s string) (ast.Branch, error)
ParseString parses input string and returns the parsed Expr or an error.
type Token ¶
type Token rune
Token represents a lexical token.
const ( NULL Token = 0 ERROR Token = 255 + iota EOF NUMBER IDENT STRING XML AND AS ELSE EXCEPT FOR IF IN MAX MEAN MEDIAN MIN NEST OR ORDER UNNEST WHERE WITH WITHOUT COUNT SUM ARROW // -> ARROWST // ->* ATARROW // @> CSET // |} DARROW // => GEQ // >= DSLASH // // JOIN // <&> LEQ // <= NEQ // != OSET // {| PI // π SQRT // √ SUBMOD // -% )
Non-character tokens
func LexerInitState ¶
LexerInitState recognises the next input Token.
Source Files ¶
- compile.go
- compile_xstr.go
- eval.go
- expr_package.go
- expr_set_compare.go
- file_pos.go
- import.go
- import_cache.go
- lex.go
- logging.go
- parse.go
- parse_macro.go
- parse_string.go
- parse_xml.go
- parser.go
- prettify.go
- std.go
- std_archive.go
- std_bits.go
- std_encoding.go
- std_encoding_json.go
- std_encoding_yaml.go
- std_eval.go
- std_fmt.go
- std_func.go
- std_net.go
- std_os.go
- std_os_nonwasm.go
- std_re.go
- std_reflect.go
- std_rel.go
- std_runtime.go
- std_seq.go
- std_seq_array_helper.go
- std_seq_bytes_helper.go
- std_str.go
- std_tst.go
- test_helpers.go
- util.go