Documentation
¶
Index ¶
- Constants
- func DumpTokens(tokens <-chan Token, output io.Writer) error
- func DumpTokensFromFile(filename string, output io.Writer) error
- func FileToRuneChannel(filename string, output chan<- RuneLoc) error
- func IsValidInIdentifier(r rune) bool
- func ProcessIndentation(input <-chan Token, output chan<- Token) error
- func Tokenize(input <-chan RuneLoc, output chan<- Token) error
- type RuneLoc
- type SourceLocation
- type Token
- type TokenType
Constants ¶
View Source
const BeginTokens = "[begining of token stream]"
View Source
const EndTokens = "[end of token stream]"
View Source
const NoChar rune = 0
Variables ¶
This section is empty.
Functions ¶
func FileToRuneChannel ¶
func IsValidInIdentifier ¶
func ProcessIndentation ¶
Types ¶
type RuneLoc ¶
type RuneLoc struct { Rune rune Loc SourceLocation }
type SourceLocation ¶
func (SourceLocation) String ¶
func (s SourceLocation) String() string
type Token ¶
type Token struct { TokenType Int int64 Str string Loc SourceLocation // contains filtered or unexported fields }
type TokenType ¶
type TokenType uint8
const ( // we don't actually need TokNone; it's just useful to make sure that none of our tokens are equal to 0 TokNone TokenType = iota // symbols TokSlash TokMinus TokSetEqual TokParenOpen TokParenClose TokComma TokDot TokDotDot TokColon TokSemicolon TokEquals TokNotEquals TokLessThan TokGreaterThan TokLessThanOrEquals TokGreaterThanOrEquals TokLeftShift TokRightShift TokNot // keywords TokKeywordIf TokKeywordReturn TokKeywordSet TokKeywordIn TokKeywordNew TokKeywordDel TokKeywordFor TokKeywordAs TokKeywordVar TokKeywordProc TokKeywordVerb TokPreprocessorDefine TokPreprocessorInclude // literals TokInteger TokSymbol TokResource TokStringStart TokStringEnd TokStringInsertStart TokStringInsertEnd TokStringLiteral // spacing TokNewline TokSpaces TokTabs TokIndent TokUnindent )
Click to show internal directories.
Click to hide internal directories.