lexer

package
v0.8.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 25, 2022 License: MIT Imports: 0 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	// The default map that i18next uses
	DefaultI18NextLexerMap = map[string]TokenKind{
		"{{":  TokenPrefix,
		"}}":  TokenSuffix,
		",":   TokenFormatSeperator,
		"$t(": TokenNestingPrefix,
		")":   TokenNestingSuffix,
	}
)

Functions

This section is empty.

Types

type Lexer

type Lexer struct {
	Input string

	Tokens []Token
	// contains filtered or unexported fields
}

func NewLexer

func NewLexer(input string, tokenMap map[string]TokenKind) *Lexer

func (*Lexer) FindAllTokens

func (l *Lexer) FindAllTokens() []Token

func (*Lexer) NewInput

func (l *Lexer) NewInput(input string)

func (*Lexer) TokenMapLookup

func (l *Lexer) TokenMapLookup(token TokenKind) string

TokenMapLookup returns the literal currently assigned to a token Mostly used for improved error-messages.

type Token

type Token struct {
	Kind    TokenKind
	Literal string
	Start   int
	End     int
}

type TokenKind

type TokenKind string
const (
	TokenEOF      TokenKind = "EOF"
	TokenPrefix   TokenKind = "TokenPrefixInterpolation"
	TokenSuffix   TokenKind = "TokenSuffixInterpolation"
	TokenLiteral  TokenKind = "TokenLiteral"
	TokenArgument TokenKind = "TokenArgument" // Used by the parser
	// Can follow TokenPrefix, or TokenNestingPrefix
	TokenFormatSeperator  TokenKind = "(Format)/Nesting seperator" // These two Seperators are often, but not always the same token...
	TokenNestingSeperator TokenKind = "Format/(Nesting) seperator"
	TokenNestingPrefix    TokenKind = "NestingPrefix"
	TokenNestingSuffix    TokenKind = "NestingSuffix"
)

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL