shlex

package module
v0.1.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 10, 2020 License: MIT Imports: 5 Imported by: 14

README

go-shlex

go-shlex is a library to make a lexical analyzer like Unix shell for Go.

Install

go get -u "github.com/desertbit/go-shlex"

Usage

package main

import (
    "fmt"
    "log"

    "github.com/desertbit/go-shlex"
)

func main() {
    cmd := `cp -Rdp "file name" 'file name2' dir\ name`
    words, err := shlex.Split(cmd, true)
    if err != nil {
        log.Fatal(err)
    }

    for _, w := range words {
        fmt.Println(w)
    }
}

Documentation

http://godoc.org/github.com/desertbit/go-shlex

Documentation

Overview

Package shlex provides a simple lexical analysis like Unix shell.

Index

Examples

Constants

This section is empty.

Variables

View Source
var (
	ErrNoClosing = errors.New("no closing quotation")
	ErrNoEscaped = errors.New("no escaped character")
)

Functions

func Split

func Split(s string, posix bool) ([]string, error)

Split splits a string according to posix or non-posix rules.

Example
package main

import (
	"fmt"
	"log"

	shlex "github.com/desertbit/go-shlex"
)

func main() {
	cmd := `cp -Rdp "file name" 'file name2' dir\ name`

	// Split of cmd with POSIX mode.
	words1, err := shlex.Split(cmd, true)
	if err != nil {
		log.Fatal(err)
	}
	// Split of cmd with Non-POSIX mode.
	words2, err := shlex.Split(cmd, false)
	if err != nil {
		log.Fatal(err)
	}

	fmt.Println("Source command:")
	fmt.Println(`cp -Rdp "file name" 'file name2' dir\ name`)
	fmt.Println()

	fmt.Println("POSIX mode:")
	for _, word := range words1 {
		fmt.Println(word)
	}
	fmt.Println()
	fmt.Println("Non-POSIX mode:")
	for _, word := range words2 {
		fmt.Println(word)
	}

}
Output:

Source command:
cp -Rdp "file name" 'file name2' dir\ name

POSIX mode:
cp
-Rdp
file name
file name2
dir name

Non-POSIX mode:
cp
-Rdp
"file name"
'file name2'
dir\
name
Example (CompareFlynn)
package main

import (
	"fmt"

	shlex "github.com/desertbit/go-shlex"
	fshlex "github.com/flynn/go-shlex"
)

func main() {
	cmd := `English and 日本語`

	// Split for github.com/flynn/go-shlex imported as fshlex
	wordsFlynn, err1 := fshlex.Split(cmd)

	// Split for github.com/anmitsu/go-shlex
	wordsAnmitsu, err2 := shlex.Split(cmd, true)

	fmt.Println("Source string:")
	fmt.Println(cmd)
	fmt.Println()

	fmt.Println("Result of github.com/flynn/go-shlex:")
	for _, word := range wordsFlynn {
		fmt.Println(word)
	}
	fmt.Println(err1.Error())

	fmt.Println()
	fmt.Println("Result of github.com/anmitsu/go-shlex:")
	for _, word := range wordsAnmitsu {
		fmt.Println(word)
	}
	if err2 != nil {
		fmt.Println(err2.Error())
	}

}
Output:

Source string:
English and 日本語

Result of github.com/flynn/go-shlex:
English
and
Unknown rune: 26085

Result of github.com/anmitsu/go-shlex:
English
and
日本語

Types

type DefaultTokenizer

type DefaultTokenizer struct{}

DefaultTokenizer implements a simple tokenizer like Unix shell.

func (*DefaultTokenizer) IsEscape

func (t *DefaultTokenizer) IsEscape(r rune) bool

func (*DefaultTokenizer) IsEscapedQuote

func (t *DefaultTokenizer) IsEscapedQuote(r rune) bool

func (*DefaultTokenizer) IsQuote

func (t *DefaultTokenizer) IsQuote(r rune) bool

func (*DefaultTokenizer) IsWhitespace

func (t *DefaultTokenizer) IsWhitespace(r rune) bool

func (*DefaultTokenizer) IsWord

func (t *DefaultTokenizer) IsWord(r rune) bool

type Lexer

type Lexer struct {
	// contains filtered or unexported fields
}

Lexer represents a lexical analyzer.

func NewLexer

func NewLexer(r io.Reader, posix, whitespaceSplit bool) *Lexer

NewLexer creates a new Lexer reading from io.Reader. This Lexer has a DefaultTokenizer according to posix and whitespaceSplit rules.

func NewLexerString

func NewLexerString(s string, posix, whitespaceSplit bool) *Lexer

NewLexerString creates a new Lexer reading from a string. This Lexer has a DefaultTokenizer according to posix and whitespaceSplit rules.

func (*Lexer) SetTokenizer

func (l *Lexer) SetTokenizer(t Tokenizer)

SetTokenizer sets a Tokenizer.

func (*Lexer) Split

func (l *Lexer) Split() ([]string, error)

type Tokenizer

type Tokenizer interface {
	IsWord(rune) bool
	IsWhitespace(rune) bool
	IsQuote(rune) bool
	IsEscape(rune) bool
	IsEscapedQuote(rune) bool
}

Tokenizer is the interface that classifies a token according to words, whitespaces, quotations, escapes and escaped quotations.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL