lexer

package
v0.0.0-...-6ec744d Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Feb 20, 2021 License: Apache-2.0 Imports: 1 Imported by: 0

Documentation

Index

Constants

View Source
const (
	// ILLEGAL - Unknown token
	ILLEGAL = "ILLEGAL"

	// EOF - End of file
	EOF = "EOF"

	// IDENT - Identifier (variable/function name)
	IDENT = "IDENT"
	// STRLIT - String literal
	STRLIT = "STRLIT"
	// INTLIT - Integer literal
	INTLIT = "INTLIT"
	// FLTLIT - Float literal
	FLTLIT = "FLTLIT"

	ASSIGN = "="

	ADD = "+"
	SUB = "-"
	MUL = "*"
	DIV = "/"

	ADDASSIGN = "+="
	SUBASSIGN = "-="
	MULASSIGN = "*="
	DIVASSIGN = "/="

	BSL = "<<"
	BSR = ">>"

	BWOR  = "|"
	BWAND = "&"
	BWNOT = "^"

	BWOASSIGN = "|="
	BWAASSIGN = "&="
	BWNASSIGN = "~="

	DOT = "."

	LT = "<"
	GT = ">"
	LE = "<="
	GE = ">="
	EQ = "=="
	NE = "!="

	LOR  = "||"
	LAND = "&&"

	BANG = "!"

	COMMA   = ","
	SEMICOL = ";"
	COLON   = ":"

	LPAREN = "("
	RPAREN = ")"
	LBRACE = "{"
	RBRACE = "}"
	LSBRKT = "["
	RSBRKT = "]"

	FUNCTION = "fn"
	LET      = "let"
	RETURN   = "return"
	RETSIG   = "->"
	IF       = "if"
	ELSE     = "else"
	WHILE    = "while"
	FOR      = "for"
	BREAK    = "break"
	IN       = "in"
	LOOP     = "loop"
	STRING   = "str"
	INT      = "int"
	FLOAT    = "flt"
	CLASS    = "class"
	BOOL     = "bool"
	TRUE     = "true"
	FALSE    = "false"
)

Types of tokens

Variables

This section is empty.

Functions

This section is empty.

Types

type Context

type Context struct {
	Line int
	Col  int
	Ctxt string
}

Context is the place of that particular token in the text

type Err

type Err struct {
	Msg string
	Con Context
}

Err represents the an error that the lexer can return

func (Err) Error

func (err Err) Error() string

type Lexer

type Lexer struct {
	// contains filtered or unexported fields
}

Lexer represents the FSM that tokenizes the input to the interpreter

func New

func New(input string) *Lexer

New returns a new uninitialized lexer

func (*Lexer) NextToken

func (l *Lexer) NextToken() (Token, error)

NextToken advances the lexer and produces a token

func (*Lexer) Tokenize

func (l *Lexer) Tokenize() (tokens []Token, err error)

Tokenize fully advances the lexer and returns a slice of tokens

type Token

type Token struct {
	Type    string
	Literal string
	Pos     Context
}

Token represents a single word in Monkey

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL