syntax

package
v0.45.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jun 7, 2020 License: Apache-2.0 Imports: 32 Imported by: 7

Documentation

Overview

AUTOGENERATED. DO NOT EDIT.

Index

Constants

View Source
const NoPath = "\000"

Variables

View Source
var RunOmitted = false

Functions

func AssertCodeErrors

func AssertCodeErrors(t *testing.T, code, errString string) bool

AssertCodeErrors asserts that code fails with a certain message when executed.

func AssertCodeEvalsToType

func AssertCodeEvalsToType(t *testing.T, expected interface{}, code string) bool

AssertCodeEvalsToType asserts that code evaluates to the same type as expected.

func AssertCodePanics

func AssertCodePanics(t *testing.T, code string) bool

AssertCodePanics asserts that code panics when executed.

func AssertCodesEvalToSameValue

func AssertCodesEvalToSameValue(t *testing.T, expected, code string) bool

AssertCodesEvalToSameValue asserts that code evaluate to the same value as expected.

func AssertEvalExprString added in v0.3.0

func AssertEvalExprString(t *testing.T, expected, source string) bool

AssertEvalExprString asserts Expr string.

func AssertScan

func AssertScan(t *testing.T, l *Lexer, tok Token, intf interface{}, lexeme string) bool

AssertScan asserts that a lexer's next produced token is as expected.

func Compile

func Compile(filepath, source string) (_ rel.Expr, err error)

Compile compiles source string.

func EvalWithScope

func EvalWithScope(path, source string, scope rel.Scope) (rel.Value, error)

func EvaluateExpr

func EvaluateExpr(path, source string) (rel.Value, error)

func FixFuncs added in v0.3.0

func FixFuncs() (rel.Value, rel.Value)

func MustCompile

func MustCompile(filePath, source string) rel.Expr

func NewPackageExpr

func NewPackageExpr(scanner parser.Scanner, a rel.Expr) rel.Expr

NewPackageExpr evaluates to !a.

func ParseArraiString

func ParseArraiString(lexeme []byte) string

ParseArraiString parses an arr.ai string.

func RequireCodesEvalToSameValue

func RequireCodesEvalToSameValue(t *testing.T, expected string, code string)

RequireCodesEvalToSameValue requires that code evaluates to the same value as expected.

func StdScope

func StdScope() rel.Scope

func TokenRepr

func TokenRepr(token Token) string

TokenRepr returns a string representation of a token value.

Types

type FilePos

type FilePos struct {
	Line   int
	Column int
}

FilePos represents a position in a line-based file.

func (FilePos) Advance

func (fp FilePos) Advance(b []byte) FilePos

Advance returns a FilePos advanced by the given bytes.

func (FilePos) Less

func (fp FilePos) Less(fp2 FilePos) bool

Less returns true iff fp comes before fp2.

func (FilePos) String

func (fp FilePos) String() string

String returns a string representation of a FilePos.

type FileRange

type FileRange struct {
	Start FilePos
	End   FilePos
}

FileRange represents a range of characters in a line-based.

func (FileRange) String

func (fr FileRange) String() string

String returns a string representation of a FileRange.

func (FileRange) Union

func (fr FileRange) Union(fr2 FileRange) FileRange

Union returns the minimal range that covers fr and fr2.

type Lexer

type Lexer struct {
	// contains filtered or unexported fields
}

Lexer extracts a stream of tokens from an input file.

func NewLexer

func NewLexer(reader io.Reader) *Lexer

NewLexer returns a new Lexer for the given input.

func NewLexerWithPrefix

func NewLexerWithPrefix(prefix *bytes.Buffer, reader io.Reader) *Lexer

NewLexerWithPrefix returns a new Lexer for the given input.

func NewStringLexer

func NewStringLexer(input string) *Lexer

NewStringLexer returns a new Lexer for the given input.

func (*Lexer) Data

func (l *Lexer) Data() interface{}

Data returns the current data.

func (*Lexer) Error

func (l *Lexer) Error() error

Value returns the Value for the most recently recognized token.

func (*Lexer) Fail

func (l *Lexer) Fail(err error) Token

Fail sets an error and returns the ERROR token.

func (*Lexer) Failf

func (l *Lexer) Failf(fmtStr string, args ...interface{}) Token

Failf produces a formatted error with a line marker.

func (*Lexer) FileRange

func (l *Lexer) FileRange() FileRange

FileRange returns the FileRange for the most recently recognized token.

func (*Lexer) InState

func (l *Lexer) InState(state lexerState, f func())

InState wraps a lambda in PushState()/PopState().

func (*Lexer) Lexeme

func (l *Lexer) Lexeme() []byte

Lexeme returns the lexeme for the most recently recognized token.

func (*Lexer) Offset

func (l *Lexer) Offset() int

Offset returns the current scanning position as an offset from the start of the input.

func (*Lexer) Peek

func (l *Lexer) Peek() Token

Peek peeks at the next token. First scans the next token if Peek() has not been called since the last call to Lex().

func (*Lexer) PopState

func (l *Lexer) PopState()

PopState pops the current state off the stack and makes it current.

func (*Lexer) PushState

func (l *Lexer) PushState(state lexerState)

PushState pushes the current state onto the stack, and makes the given state current.

func (*Lexer) Reader

func (l *Lexer) Reader() io.Reader

Reader returns the most recently recognized token.

func (*Lexer) Scan

func (l *Lexer) Scan(expected ...Token) bool

Scan scans the expected tokens, otherwise stays put. If no expected tokens are given, scans any token. Returns true iff a token was scanned.

func (*Lexer) ScanOperator

func (l *Lexer) ScanOperator(operatorsRe *regexp.Regexp) (Token, interface{})

ScanOperator tries to recognise an operator or returns NULL.

func (*Lexer) ScanOperatorOrSymbol

func (l *Lexer) ScanOperatorOrSymbol(
	operatorsRe *regexp.Regexp, symbols []LexerSymbol,
) (Token, interface{})

ScanOperatorOrSymbol tries to scan an operator or a symbol, or returns NULL.

func (*Lexer) ScanSymbol

func (l *Lexer) ScanSymbol(symbols []LexerSymbol) (Token, interface{})

ScanSymbol tries to scan each given symbol or returns NULL.

func (*Lexer) String

func (l *Lexer) String() string

String produces a formatted string representation of the lexer with a line marker.

func (*Lexer) Tail

func (l *Lexer) Tail() []byte

Tail returns the unconsumed portion of the buffer.

func (*Lexer) Token

func (l *Lexer) Token() Token

Token returns the most recently recognized token.

func (*Lexer) Value

func (l *Lexer) Value() rel.Value

Value returns the Value for the most recently recognized token.

type LexerSymbol

type LexerSymbol struct {
	// contains filtered or unexported fields
}

LexerSymbol is a convenience structure for defining token recognition regexes and handler functions.

type PackageExpr

type PackageExpr struct {
	rel.ExprScanner
	// contains filtered or unexported fields
}

PackageExpr represents a range of operators.

func (PackageExpr) Arg

func (e PackageExpr) Arg() rel.Expr

Arg returns the PackageExpr's arg.

func (PackageExpr) Eval

func (e PackageExpr) Eval(_ rel.Scope) (rel.Value, error)

Eval returns the subject

func (PackageExpr) String

func (e PackageExpr) String() string

String returns a string representation of the expression.

type ParseContext

type ParseContext struct {
	SourceDir string
}

func (ParseContext) CompileExpr

func (pc ParseContext) CompileExpr(b ast.Branch) rel.Expr

func (ParseContext) MustParse

func (pc ParseContext) MustParse(s *parser.Scanner) ast.Branch

MustParse parses input and returns the parsed Expr or panics.

func (ParseContext) MustParseString

func (pc ParseContext) MustParseString(s string) ast.Branch

MustParseString parses input string and returns the parsed Expr or panics.

func (ParseContext) Parse

func (pc ParseContext) Parse(s *parser.Scanner) (ast.Branch, error)

Parse parses input and returns the parsed Expr or an error.

func (ParseContext) ParseString

func (pc ParseContext) ParseString(s string) (ast.Branch, error)

ParseString parses input string and returns the parsed Expr or an error.

type Token

type Token rune

Token represents a lexical token.

const (
	NULL  Token = 0
	ERROR Token = 255 + iota
	EOF

	NUMBER
	IDENT
	STRING
	XML

	AND
	AS
	ELSE
	EXCEPT
	FOR
	IF
	IN
	MAX
	MEAN
	MEDIAN
	MIN
	NEST
	OR
	ORDER
	UNNEST
	WHERE
	WITH
	WITHOUT
	COUNT
	SUM

	ARROW   // ->
	ARROWST // ->*
	ATARROW // @>
	CSET    // |}
	DARROW  // =>
	GEQ     // >=
	DSLASH  // //
	JOIN    // <&>
	LEQ     // <=
	NEQ     // !=
	OSET    // {|
	PI      // π
	SQRT    // √
	SUBMOD  // -%
)

Non-character tokens

func LexerInitState

func LexerInitState(l *Lexer) (Token, interface{})

LexerInitState recognises the next input Token.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL