Documentation ¶
Index ¶
- Constants
- Variables
- func GetBencodedInfo(tokens []Token) []byte
- type Container
- type ContainerType
- type LexFn
- func LexBegin(lex *Lexer) LexFn
- func LexDictEnd(lex *Lexer) LexFn
- func LexDictStart(lex *Lexer) LexFn
- func LexDictValue(lex *Lexer) LexFn
- func LexIntegerEnd(lex *Lexer) LexFn
- func LexIntegerStart(lex *Lexer) LexFn
- func LexIntegerValue(lex *Lexer) LexFn
- func LexListEnd(lex *Lexer) LexFn
- func LexListStart(lex *Lexer) LexFn
- func LexListValue(lex *Lexer) LexFn
- func LexStringStart(lex *Lexer) LexFn
- func LexStringValue(lex *Lexer) LexFn
- type Lexer
- func (lex *Lexer) Backup()
- func (lex *Lexer) CurrentInput() []byte
- func (lex *Lexer) Emit(tokenType TokenType)
- func (lex *Lexer) Errorf(format string, args ...interface{}) LexFn
- func (lex *Lexer) InputToEnd() []byte
- func (lex *Lexer) IsEOF() bool
- func (lex *Lexer) Next() byte
- func (lex *Lexer) NextToken() Token
- func (lex *Lexer) Peek() byte
- func (lex *Lexer) Shutdown()
- func (lex *Lexer) String() string
- type ParseFn
- type Parser
- type Token
- type TokenType
Constants ¶
Variables ¶
var ( LexErrInvalidStringLength string = "Invalid String Length" LexErrInvalidCharacter string = "Invalid Character" LexErrUnclosedDelimeter = "Unclosed Delimeter" LexErrUnexpectedEOF = "Unexpected EOF" )
var Collect = collect
var TokenNames = map[TokenType]string{ TOKEN_ERROR: "ERROR", TOKEN_EOF: "EOF", TOKEN_COLON: "COLON", TOKEN_STRING_LENGTH: "STRING_LENGTH", TOKEN_STRING_VALUE: "STRING_VALUE", TOKEN_INTEGER_START: "INTEGER START", TOKEN_INTEGER_VALUE: "INTEGER_VALUE", TOKEN_INTEGER_END: "INTEGER_END", TOKEN_LIST_START: "LIST_START", TOKEN_LIST_VALUE: "LIST_VALUE", TOKEN_LIST_END: "LIST_END", TOKEN_DICT_START: "DICT_START", TOKEN_DICT_VALUE: "DICT_VALUE", TOKEN_DICT_END: "DICT_END", }
Functions ¶
func GetBencodedInfo ¶
Get the raw bencoded info dictionary value. This is a bit of a hack, as it requires two passes through the tokens to get the original data. A better approach may have been to have the Lexer record the corresponding position and values of every token have the parser be able to build up the original representation of a parsed structure.
Types ¶
type Container ¶
type ContainerType ¶
type ContainerType int
const ( ContainerBString ContainerType = iota ContainerInteger ContainerList ContainerDict )
type LexFn ¶
func LexDictEnd ¶
func LexDictStart ¶
func LexDictValue ¶
func LexIntegerEnd ¶
func LexIntegerStart ¶
func LexIntegerValue ¶
func LexListEnd ¶
func LexListStart ¶
func LexListValue ¶
func LexStringStart ¶
func LexStringValue ¶
type Lexer ¶
type Lexer struct { Name string Input []byte Tokens chan Token State LexFn Start int Pos int Width int StringLength int NestedStack *lane.Stack }
func BeginLexing ¶
func (*Lexer) CurrentInput ¶
Returns a slice of the current input from the current lexer start position to the current position.
func (*Lexer) Emit ¶
Puts a token on the token channel. The value of this token is read from the input based on the current lexer position.
func (*Lexer) InputToEnd ¶
Return a slice of the input from the current lexer position to the end of the input string.
type Parser ¶
type Parser struct { Tokens []Token State ParseFn Output interface{} Stack *lane.Stack Pos int NextKey string }
Parser keeps track of parsing state, corresponding tokens, output data structure, etc.