Documentation ¶
Overview ¶
Package shlex provides a simple lexical analysis like Unix shell.
Index ¶
Examples ¶
Constants ¶
This section is empty.
Variables ¶
View Source
var ( ErrNoClosing = errors.New("no closing quotation") ErrNoEscaped = errors.New("no escaped character") )
Functions ¶
func Split ¶
Split splits a string according to posix or non-posix rules.
Example ¶
package main import ( "fmt" "log" shlex "github.com/desertbit/go-shlex" ) func main() { cmd := `cp -Rdp "file name" 'file name2' dir\ name` // Split of cmd with POSIX mode. words1, err := shlex.Split(cmd, true) if err != nil { log.Fatal(err) } // Split of cmd with Non-POSIX mode. words2, err := shlex.Split(cmd, false) if err != nil { log.Fatal(err) } fmt.Println("Source command:") fmt.Println(`cp -Rdp "file name" 'file name2' dir\ name`) fmt.Println() fmt.Println("POSIX mode:") for _, word := range words1 { fmt.Println(word) } fmt.Println() fmt.Println("Non-POSIX mode:") for _, word := range words2 { fmt.Println(word) } }
Output: Source command: cp -Rdp "file name" 'file name2' dir\ name POSIX mode: cp -Rdp file name file name2 dir name Non-POSIX mode: cp -Rdp "file name" 'file name2' dir\ name
Example (CompareFlynn) ¶
package main import ( "fmt" shlex "github.com/desertbit/go-shlex" fshlex "github.com/flynn/go-shlex" ) func main() { cmd := `English and 日本語` // Split for github.com/flynn/go-shlex imported as fshlex wordsFlynn, err1 := fshlex.Split(cmd) // Split for github.com/anmitsu/go-shlex wordsAnmitsu, err2 := shlex.Split(cmd, true) fmt.Println("Source string:") fmt.Println(cmd) fmt.Println() fmt.Println("Result of github.com/flynn/go-shlex:") for _, word := range wordsFlynn { fmt.Println(word) } fmt.Println(err1.Error()) fmt.Println() fmt.Println("Result of github.com/anmitsu/go-shlex:") for _, word := range wordsAnmitsu { fmt.Println(word) } if err2 != nil { fmt.Println(err2.Error()) } }
Output: Source string: English and 日本語 Result of github.com/flynn/go-shlex: English and Unknown rune: 26085 Result of github.com/anmitsu/go-shlex: English and 日本語
Types ¶
type DefaultTokenizer ¶
type DefaultTokenizer struct{}
DefaultTokenizer implements a simple tokenizer like Unix shell.
func (*DefaultTokenizer) IsEscape ¶
func (t *DefaultTokenizer) IsEscape(r rune) bool
func (*DefaultTokenizer) IsEscapedQuote ¶
func (t *DefaultTokenizer) IsEscapedQuote(r rune) bool
func (*DefaultTokenizer) IsQuote ¶
func (t *DefaultTokenizer) IsQuote(r rune) bool
func (*DefaultTokenizer) IsWhitespace ¶
func (t *DefaultTokenizer) IsWhitespace(r rune) bool
func (*DefaultTokenizer) IsWord ¶
func (t *DefaultTokenizer) IsWord(r rune) bool
type Lexer ¶
type Lexer struct {
// contains filtered or unexported fields
}
Lexer represents a lexical analyzer.
func NewLexer ¶
NewLexer creates a new Lexer reading from io.Reader. This Lexer has a DefaultTokenizer according to posix and whitespaceSplit rules.
func NewLexerString ¶
NewLexerString creates a new Lexer reading from a string. This Lexer has a DefaultTokenizer according to posix and whitespaceSplit rules.
func (*Lexer) SetTokenizer ¶
SetTokenizer sets a Tokenizer.
Click to show internal directories.
Click to hide internal directories.