bencoding

package
v0.0.0-...-9fe5d5b Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jan 22, 2016 License: MIT Imports: 6 Imported by: 0

Documentation

Index

Constants

View Source
const (
	COLON         string = ":"
	INTEGER_START string = "i"
	INTEGER_END   string = "e"
	LIST_START    string = "l"
	LIST_END      string = "e"
	DICT_START    string = "d"
	DICT_END      string = "e"
)

Variables

View Source
var (
	LexErrInvalidStringLength string = "Invalid String Length"
	LexErrInvalidCharacter    string = "Invalid Character"
	LexErrUnclosedDelimeter          = "Unclosed Delimeter"
	LexErrUnexpectedEOF              = "Unexpected EOF"
)
View Source
var Collect = collect
View Source
var TokenNames = map[TokenType]string{
	TOKEN_ERROR: "ERROR",
	TOKEN_EOF:   "EOF",

	TOKEN_COLON: "COLON",

	TOKEN_STRING_LENGTH: "STRING_LENGTH",
	TOKEN_STRING_VALUE:  "STRING_VALUE",

	TOKEN_INTEGER_START: "INTEGER START",
	TOKEN_INTEGER_VALUE: "INTEGER_VALUE",
	TOKEN_INTEGER_END:   "INTEGER_END",

	TOKEN_LIST_START: "LIST_START",
	TOKEN_LIST_VALUE: "LIST_VALUE",
	TOKEN_LIST_END:   "LIST_END",

	TOKEN_DICT_START: "DICT_START",
	TOKEN_DICT_VALUE: "DICT_VALUE",
	TOKEN_DICT_END:   "DICT_END",
}

Functions

func GetBencodedInfo

func GetBencodedInfo(tokens []Token) []byte

Get the raw bencoded info dictionary value. This is a bit of a hack, as it requires two passes through the tokens to get the original data. A better approach may have been to have the Lexer record the corresponding position and values of every token have the parser be able to build up the original representation of a parsed structure.

Types

type Container

type Container struct {
	Type    ContainerType
	BString []byte
	Integer int
	List    *[]Container
	Dict    map[string]Container
}

func (*Container) Append

func (c *Container) Append(val Container)

func (*Container) Collapse

func (c *Container) Collapse() interface{}

func (*Container) SetKey

func (c *Container) SetKey(key string, val Container)

func (*Container) String

func (c *Container) String() string

type ContainerType

type ContainerType int
const (
	ContainerBString ContainerType = iota
	ContainerInteger
	ContainerList
	ContainerDict
)

type LexFn

type LexFn func(*Lexer) LexFn

func LexBegin

func LexBegin(lex *Lexer) LexFn

func LexDictEnd

func LexDictEnd(lex *Lexer) LexFn

func LexDictStart

func LexDictStart(lex *Lexer) LexFn

func LexDictValue

func LexDictValue(lex *Lexer) LexFn

func LexIntegerEnd

func LexIntegerEnd(lex *Lexer) LexFn

func LexIntegerStart

func LexIntegerStart(lex *Lexer) LexFn

func LexIntegerValue

func LexIntegerValue(lex *Lexer) LexFn

func LexListEnd

func LexListEnd(lex *Lexer) LexFn

func LexListStart

func LexListStart(lex *Lexer) LexFn

func LexListValue

func LexListValue(lex *Lexer) LexFn

func LexStringStart

func LexStringStart(lex *Lexer) LexFn

func LexStringValue

func LexStringValue(lex *Lexer) LexFn

type Lexer

type Lexer struct {
	Name   string
	Input  []byte
	Tokens chan Token
	State  LexFn

	Start        int
	Pos          int
	Width        int
	StringLength int
	NestedStack  *lane.Stack
}

func BeginLexing

func BeginLexing(name, input string, state LexFn) *Lexer

func (*Lexer) Backup

func (lex *Lexer) Backup()

Backup to the beginning of the last read token.

func (*Lexer) CurrentInput

func (lex *Lexer) CurrentInput() []byte

Returns a slice of the current input from the current lexer start position to the current position.

func (*Lexer) Emit

func (lex *Lexer) Emit(tokenType TokenType)

Puts a token on the token channel. The value of this token is read from the input based on the current lexer position.

func (*Lexer) Errorf

func (lex *Lexer) Errorf(format string, args ...interface{}) LexFn

Returns a token with error information.

func (*Lexer) InputToEnd

func (lex *Lexer) InputToEnd() []byte

Return a slice of the input from the current lexer position to the end of the input string.

func (*Lexer) IsEOF

func (lex *Lexer) IsEOF() bool

Returns the true/false if the lexer is at the end of the input stream.

func (*Lexer) Next

func (lex *Lexer) Next() byte

Reads the next byte from the input and then advances the lexer position.

func (*Lexer) NextToken

func (lex *Lexer) NextToken() Token

Return the next token from the channel

func (*Lexer) Peek

func (lex *Lexer) Peek() byte

Returns the next rune in the stream, then puts the lexer position back. Basically reads the next rune without consuming it.

func (*Lexer) Shutdown

func (lex *Lexer) Shutdown()

Shuts down the token stream

func (*Lexer) String

func (lex *Lexer) String() string

type ParseFn

type ParseFn func(*Parser) ParseFn

type Parser

type Parser struct {
	Tokens []Token
	State  ParseFn
	Output interface{}
	Stack  *lane.Stack

	Pos     int
	NextKey string
}

Parser keeps track of parsing state, corresponding tokens, output data structure, etc.

func Parse

func Parse(tokens []Token) *Parser

Parse takes a list of Tokens from the lexer and creates the final data structure.

func (*Parser) CurrentType

func (parser *Parser) CurrentType() TokenType

func (*Parser) CurrentValue

func (parser *Parser) CurrentValue() []byte

type Token

type Token struct {
	Type  TokenType
	Value []byte
}

func NewToken

func NewToken(tokenType TokenType, val string) Token

func (Token) String

func (t Token) String() string

type TokenType

type TokenType int
const (
	TOKEN_ERROR TokenType = iota
	TOKEN_EOF

	TOKEN_COLON

	TOKEN_STRING_LENGTH
	TOKEN_STRING_VALUE

	TOKEN_INTEGER_START
	TOKEN_INTEGER_VALUE
	TOKEN_INTEGER_END

	TOKEN_LIST_START
	TOKEN_LIST_VALUE
	TOKEN_LIST_END

	TOKEN_DICT_START
	TOKEN_DICT_VALUE
	TOKEN_DICT_END
)

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL