Documentation ¶
Overview ¶
Package parser parses dosini-style files
Index ¶
Constants ¶
View Source
const ( RuneComment = ';' // RuneComment is the standard dosini comment character RuneCommentExtra = '#' // RuneCommentExtra is UNIX shell's comment character RuneSectionStart = '[' // RuneSectionStart indicates the start of a section declaration RuneSectionEnd = ']' // RuneSectionEnd indiciates the end of a section declaration RuneFieldEqual = '=' // RuneFieldEqual separates field keys from their values )
Variables ¶
View Source
var ( // RunesComment is a string containing all runes acceptable to start comments RunesComment = string([]rune{ RuneComment, RuneCommentExtra, }) // RunesSpecial is a string containing all the runes with special meaning RunesSpecial = string([]rune{ RuneComment, RuneCommentExtra, RuneSectionStart, RuneSectionEnd, RuneFieldEqual, }) )
View Source
var ( // IsNewLine tells if the rune indicates a line break or the start of one IsNewLine = lexer.NewIsIn("\r\n") // IsNotNewLine tells if the rune is not a line break nor the start of one IsNotNewLine = lexer.NewIsNot(IsNewLine) // IsSpace tells if the rune is considered whitespace by Unicode IsSpace = lexer.IsSpace // IsNotSpace tells if the rune is not considered whitespace by Unicode IsNotSpace = lexer.NewIsNot(IsSpace) // IsCommentStart ... IsCommentStart = lexer.NewIsIn(RunesComment) )
Functions ¶
func IsName ¶
IsName indicates a rune is acceptable for section or field names
func IsSectionEnd ¶
IsSectionEnd indicates the rune ends the section declaration
func IsSectionStart ¶
IsSectionStart indicates the rune starts the section declaration
Types ¶
type Parser ¶
type Parser struct { // OnToken is called for each identified token. if it returns an error // parsing is interrupted. OnToken func(pos lexer.Position, typ TokenType, value string) error // OnError is called in case of a parsing error, and it's allowed // to replace the error returned by [Parser.Run]. // OnError is called for io.EOF, but [Parser.Run] will consider it // normal termination. OnError func(pos lexer.Position, content string, err error) error // contains filtered or unexported fields }
Parser parses a dosini-style document
type TokenType ¶
type TokenType uint
A TokenType is a type of Token
const ( // TokenUnknown represents a Token that hasn't been identified TokenUnknown TokenType = iota // TokenSectionStart indicates the opening marker of a section declaration. // The left squared bracket. TokenSectionStart // TokenSectionEnd indicates the closing marker of a section declaration. // The right squared bracket. TokenSectionEnd // TokenSectionName represents the section name between the squared brackets TokenSectionName // TokenSectionSubname represents a secondary name in the section represented // between quotes after the section name. // e.g. // [section_name "section_subname"] TokenSectionSubname // TokenComment represents a comment, including the initial ';' or '#' until // the end of the line. TokenComment // TokenFieldKey represents a field name in a `key = value` entry TokenFieldKey // TokenFieldValue represents a field value in a `key = value` entry TokenFieldValue )
Click to show internal directories.
Click to hide internal directories.