tokens

package
v0.0.0-...-67b6ef9 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jul 30, 2022 License: MIT Imports: 1 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type Pos

type Pos struct {
	File    string
	Line    int
	Char    int
	EndLine int
	EndChar int
}

func (*Pos) Contains

func (p *Pos) Contains(b *Pos) bool

func (*Pos) Dup

func (p *Pos) Dup() *Pos

func (*Pos) Error

func (p *Pos) Error(format string, args ...any) error

func (*Pos) Extend

func (a *Pos) Extend(b *Pos) *Pos

func (*Pos) String

func (p *Pos) String() string

type PosError

type PosError struct {
	Msg string
	Pos *Pos
}

func (*PosError) Error

func (p *PosError) Error() string

type Stream

type Stream struct {
	// contains filtered or unexported fields
}

func NewStream

func NewStream(file, code string) *Stream

func (*Stream) CanPeek

func (s *Stream) CanPeek(off int) bool

func (*Stream) Char

func (s *Stream) Char() rune

func (*Stream) Eat

func (s *Stream) Eat()

func (*Stream) HasNext

func (s *Stream) HasNext() bool

func (*Stream) Peek

func (s *Stream) Peek(off int) rune

func (*Stream) Pos

func (s *Stream) Pos() *Pos

type Token

type Token struct {
	Typ   TokenType
	Value string
	Pos   *Pos
}

func (Token) String

func (t Token) String() string

type TokenType

type TokenType int
const (
	TokenTypeIdent TokenType = iota
	TokenTypeNumber
	TokenTypeString
	TokenTypeByte
	TokenTypeLBrack
	TokenTypeRBrack
)

func (TokenType) String

func (t TokenType) String() string

type Tokenizer

type Tokenizer struct {
	Tokens []Token
	// contains filtered or unexported fields
}

func NewTokenizer

func NewTokenizer(s *Stream) *Tokenizer

func (*Tokenizer) Eat

func (t *Tokenizer) Eat()

func (*Tokenizer) Filename

func (t *Tokenizer) Filename() string

func (*Tokenizer) HasNext

func (t *Tokenizer) HasNext() bool

func (*Tokenizer) Last

func (t *Tokenizer) Last() *Pos

func (*Tokenizer) Tok

func (t *Tokenizer) Tok() Token

func (*Tokenizer) Tokenize

func (t *Tokenizer) Tokenize() error

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL