syntax

package
v0.153.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Aug 31, 2020 License: Apache-2.0 Imports: 36 Imported by: 7

Documentation

Overview

AUTOGENERATED. DO NOT EDIT.

Index

Constants

View Source
const ModuleRootSentinel = "go.mod"

ModuleRootSentinel is a file which marks the module root of a project.

View Source
const NoPath = "\000"

Variables

View Source
var BuildInfo rel.Value = rel.EmptyTuple

BuildInfo represents arr.ai build information.

View Source
var RunOmitted = false

Functions

func AssertCodeErrors

func AssertCodeErrors(t *testing.T, errString, code string) bool

AssertCodeErrors asserts that code fails with a certain message when executed.

func AssertCodeEvalsToGrammar added in v0.73.0

func AssertCodeEvalsToGrammar(t *testing.T, expected parser.Grammar, code string)

AssertCodeEvalsToGrammar asserts that code evaluates to a grammar equal to expected.

func AssertCodeEvalsToType

func AssertCodeEvalsToType(t *testing.T, expected interface{}, code string) bool

AssertCodeEvalsToType asserts that code evaluates to the same type as expected.

func AssertCodePanics

func AssertCodePanics(t *testing.T, code string) bool

AssertCodePanics asserts that code panics when executed. TODO: Remove this. Should only intentionally panic for implementation bugs.

func AssertCodesEvalToSameValue

func AssertCodesEvalToSameValue(t *testing.T, expected, code string) bool

AssertCodesEvalToSameValue asserts that code evaluate to the same value as expected.

func AssertEvalExprString added in v0.3.0

func AssertEvalExprString(t *testing.T, expected, source string) bool

AssertEvalExprString asserts Expr string.

func AssertScan

func AssertScan(t *testing.T, l *Lexer, tok Token, intf interface{}, lexeme string) bool

AssertScan asserts that a lexer's next produced token is as expected.

func Compile

func Compile(ctx context.Context, filePath, source string) (rel.Expr, error)

Compile compiles source string.

func EvalWithScope

func EvalWithScope(ctx context.Context, path, source string, scope rel.Scope) (rel.Value, error)

func EvaluateExpr

func EvaluateExpr(ctx context.Context, path, source string) (rel.Value, error)

func FixFuncs added in v0.3.0

func FixFuncs() (rel.Value, rel.Value)

func GetBuildInfo added in v0.103.0

func GetBuildInfo(version, date, fullCommit, tags, os, arch, goVersion string) rel.Value

GetBuildInfo returns arr.ai build information.

func MustCompile

func MustCompile(ctx context.Context, filePath, source string) rel.Expr

func NewPackageExpr

func NewPackageExpr(scanner parser.Scanner, a rel.Expr) rel.Expr

NewPackageExpr evaluates to !a.

func ParseArraiString

func ParseArraiString(lexeme []byte) string

ParseArraiString parses an arr.ai string.

func PrettifyString added in v0.102.0

func PrettifyString(val rel.Value, indentsNum int) (string, error)

PrettifyString returns a string which represents `rel.Value` with more reabable format. For example, `{b: 2, a: 1, c: (a: 2, b: {aa: {bb: (a: 22, d: {3, 1, 2})}})}` is formatted to:

{
	b: 2,
	a: 1,
	c: (
		a: 2,
		b: {
			aa: {
				bb: (
					a: 22,
					d: {3, 1, 2}
				)
			}
		}
	)
}

func RequireCodesEvalToSameValue

func RequireCodesEvalToSameValue(t *testing.T, expected string, code string)

RequireCodesEvalToSameValue requires that code evaluates to the same value as expected.

func StdScope

func StdScope() rel.Scope

func TokenRepr

func TokenRepr(token Token) string

TokenRepr returns a string representation of a token value.

Types

type Enumerable added in v0.117.0

type Enumerable interface {
	ArrayEnumerator() (rel.OffsetValueEnumerator, bool)
}

type FilePos

type FilePos struct {
	Line   int
	Column int
}

FilePos represents a position in a line-based file.

func (FilePos) Advance

func (fp FilePos) Advance(b []byte) FilePos

Advance returns a FilePos advanced by the given bytes.

func (FilePos) Less

func (fp FilePos) Less(fp2 FilePos) bool

Less returns true iff fp comes before fp2.

func (FilePos) String

func (fp FilePos) String() string

String returns a string representation of a FilePos.

type FileRange

type FileRange struct {
	Start FilePos
	End   FilePos
}

FileRange represents a range of characters in a line-based.

func (FileRange) String

func (fr FileRange) String() string

String returns a string representation of a FileRange.

func (FileRange) Union

func (fr FileRange) Union(fr2 FileRange) FileRange

Union returns the minimal range that covers fr and fr2.

type ImportExpr added in v0.146.0

type ImportExpr struct {
	rel.ExprScanner
	// contains filtered or unexported fields
}

func NewImportExpr added in v0.146.0

func NewImportExpr(scanner parser.Scanner, imported rel.Expr, path string) ImportExpr

func (ImportExpr) Eval added in v0.146.0

func (i ImportExpr) Eval(ctx context.Context, _ rel.Scope) (rel.Value, error)

func (ImportExpr) String added in v0.146.0

func (i ImportExpr) String() string

type Lexer

type Lexer struct {
	// contains filtered or unexported fields
}

Lexer extracts a stream of tokens from an input file.

func NewLexer

func NewLexer(reader io.Reader) *Lexer

NewLexer returns a new Lexer for the given input.

func NewLexerWithPrefix

func NewLexerWithPrefix(prefix *bytes.Buffer, reader io.Reader) *Lexer

NewLexerWithPrefix returns a new Lexer for the given input.

func NewStringLexer

func NewStringLexer(input string) *Lexer

NewStringLexer returns a new Lexer for the given input.

func (*Lexer) Data

func (l *Lexer) Data() interface{}

Data returns the current data.

func (*Lexer) Error

func (l *Lexer) Error() error

Value returns the Value for the most recently recognized token.

func (*Lexer) Fail

func (l *Lexer) Fail(err error) Token

Fail sets an error and returns the ERROR token.

func (*Lexer) Failf

func (l *Lexer) Failf(fmtStr string, args ...interface{}) Token

Failf produces a formatted error with a line marker.

func (*Lexer) FileRange

func (l *Lexer) FileRange() FileRange

FileRange returns the FileRange for the most recently recognized token.

func (*Lexer) InState

func (l *Lexer) InState(state lexerState, f func())

InState wraps a lambda in PushState()/PopState().

func (*Lexer) Lexeme

func (l *Lexer) Lexeme() []byte

Lexeme returns the lexeme for the most recently recognized token.

func (*Lexer) Offset

func (l *Lexer) Offset() int

Offset returns the current scanning position as an offset from the start of the input.

func (*Lexer) Peek

func (l *Lexer) Peek() Token

Peek peeks at the next token. First scans the next token if Peek() has not been called since the last call to Lex().

func (*Lexer) PopState

func (l *Lexer) PopState()

PopState pops the current state off the stack and makes it current.

func (*Lexer) PushState

func (l *Lexer) PushState(state lexerState)

PushState pushes the current state onto the stack, and makes the given state current.

func (*Lexer) Reader

func (l *Lexer) Reader() io.Reader

Reader returns the most recently recognized token.

func (*Lexer) Scan

func (l *Lexer) Scan(expected ...Token) bool

Scan scans the expected tokens, otherwise stays put. If no expected tokens are given, scans any token. Returns true iff a token was scanned.

func (*Lexer) ScanOperator

func (l *Lexer) ScanOperator(operatorsRe *regexp.Regexp) (Token, interface{})

ScanOperator tries to recognise an operator or returns NULL.

func (*Lexer) ScanOperatorOrSymbol

func (l *Lexer) ScanOperatorOrSymbol(
	operatorsRe *regexp.Regexp, symbols []LexerSymbol,
) (Token, interface{})

ScanOperatorOrSymbol tries to scan an operator or a symbol, or returns NULL.

func (*Lexer) ScanSymbol

func (l *Lexer) ScanSymbol(symbols []LexerSymbol) (Token, interface{})

ScanSymbol tries to scan each given symbol or returns NULL.

func (*Lexer) String

func (l *Lexer) String() string

String produces a formatted string representation of the lexer with a line marker.

func (*Lexer) Tail

func (l *Lexer) Tail() []byte

Tail returns the unconsumed portion of the buffer.

func (*Lexer) Token

func (l *Lexer) Token() Token

Token returns the most recently recognized token.

func (*Lexer) Value

func (l *Lexer) Value() rel.Value

Value returns the Value for the most recently recognized token.

type LexerSymbol

type LexerSymbol struct {
	// contains filtered or unexported fields
}

LexerSymbol is a convenience structure for defining token recognition regexes and handler functions.

type Macro added in v0.73.0

type Macro struct {
	// contains filtered or unexported fields
}

Macro represents the metadata of a macro invocation: the grammar and rule to parse with, and the transform to apply to the parsed body.

type MacroValue added in v0.73.0

type MacroValue struct {
	ast.Extra
	// contains filtered or unexported fields
}

MacroValue is an Extra node with an Expr value and a Scanner for the macro source.

func NewMacroValue added in v0.73.0

func NewMacroValue(expr rel.Expr, scanner parser.Scanner) MacroValue

NewMacroValue returns a MacroValue with a given Expr and Scanner.

func (MacroValue) Scanner added in v0.73.0

func (m MacroValue) Scanner() parser.Scanner

Scanner returns a scanner of the source that was replaced by the macro.

func (MacroValue) SubExpr added in v0.73.0

func (m MacroValue) SubExpr() rel.Expr

SubExpr returns the Expr resulting from evaluating the macro.

type PackageExpr

type PackageExpr struct {
	rel.ExprScanner
	// contains filtered or unexported fields
}

PackageExpr represents a range of operators.

func (PackageExpr) Arg

func (e PackageExpr) Arg() rel.Expr

Arg returns the PackageExpr's arg.

func (PackageExpr) Eval

func (e PackageExpr) Eval(ctx context.Context, _ rel.Scope) (rel.Value, error)

Eval returns the subject

func (PackageExpr) String

func (e PackageExpr) String() string

String returns a string representation of the expression.

type ParseContext

type ParseContext struct {
	SourceDir string
}

func (ParseContext) CompileExpr

func (pc ParseContext) CompileExpr(ctx context.Context, b ast.Branch) (rel.Expr, error)

func (ParseContext) MustParse

func (pc ParseContext) MustParse(ctx context.Context, s *parser.Scanner) ast.Branch

MustParse parses input and returns the parsed Expr or panics.

func (ParseContext) MustParseString

func (pc ParseContext) MustParseString(ctx context.Context, s string) ast.Branch

MustParseString parses input string and returns the parsed Expr or panics.

func (ParseContext) Parse

func (pc ParseContext) Parse(ctx context.Context, s *parser.Scanner) (ast.Branch, error)

Parse parses input and returns the parsed Expr or an error.

func (ParseContext) ParseString

func (pc ParseContext) ParseString(ctx context.Context, s string) (ast.Branch, error)

ParseString parses input string and returns the parsed Expr or an error.

type Token

type Token rune

Token represents a lexical token.

const (
	NULL  Token = 0
	ERROR Token = 255 + iota
	EOF

	NUMBER
	IDENT
	STRING
	XML

	AND
	AS
	ELSE
	EXCEPT
	FOR
	IF
	IN
	MAX
	MEAN
	MEDIAN
	MIN
	NEST
	OR
	ORDER
	UNNEST
	WHERE
	WITH
	WITHOUT
	COUNT
	SUM

	ARROW   // ->
	ARROWST // ->*
	ATARROW // @>
	CSET    // |}
	DARROW  // =>
	GEQ     // >=
	DSLASH  // //
	JOIN    // <&>
	LEQ     // <=
	NEQ     // !=
	OSET    // {|
	PI      // π
	SQRT    // √
	SUBMOD  // -%
)

Non-character tokens

func LexerInitState

func LexerInitState(l *Lexer) (Token, interface{})

LexerInitState recognises the next input Token.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL