txt

package module
v0.0.4 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Mar 10, 2024 License: MIT Imports: 8 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	FieldsFuncErr = errors.New("strings.FieldsFunc returned an empty slice or the string was empty")
	EmptyStrErr   = errors.New("empty string")
	NoMatchErr    = errors.New(`no matches found`)
)

Functions

func DefaultStopWords

func DefaultStopWords() []string

func SortByAlphaFunc added in v0.0.2

func SortByAlphaFunc(a *Token, b *Token) int

func Split added in v0.0.2

func Split(str string, s sep.Func) []string

func Stem

func Stem(token string) string

func StripPunct

func StripPunct(token string) string

func ToLower

func ToLower(ana *Analyzer)

func WithDefaultStopWords

func WithDefaultStopWords(ana *Analyzer)

func WithStemmer

func WithStemmer(ana *Analyzer)

func WithoutPunct

func WithoutPunct(ana *Analyzer)

Types

type Analyzer

type Analyzer struct {
	// contains filtered or unexported fields
}

func Keywords

func Keywords() *Analyzer

func New

func New(opts ...Option) *Analyzer

func NewNormalizer added in v0.0.2

func NewNormalizer(opts ...Option) *Analyzer

func (*Analyzer) AddNormalizer

func (ana *Analyzer) AddNormalizer(normies ...Normalizer) *Analyzer

func (*Analyzer) IsStopWord

func (ana *Analyzer) IsStopWord(token string) bool

func (*Analyzer) Keywords

func (ana *Analyzer) Keywords() *Analyzer

func (*Analyzer) SetStopWords

func (ana *Analyzer) SetStopWords(words []string) *Analyzer

func (*Analyzer) Stopwords added in v0.0.2

func (ana *Analyzer) Stopwords() Tokens

func (*Analyzer) Tokenize

func (ana *Analyzer) Tokenize(text string) (Tokens, error)

func (*Analyzer) WithNormalizer

func (ana *Analyzer) WithNormalizer(normies ...Normalizer) *Analyzer

func (*Analyzer) WithSep

func (ana *Analyzer) WithSep(sep sep.Func) *Analyzer

func (*Analyzer) WithoutStopWords added in v0.0.4

func (ana *Analyzer) WithoutStopWords() bool

type Normalizer

type Normalizer func(string) string

type Option

type Option func(*Analyzer)

func WithNormalizers

func WithNormalizers(n ...Normalizer) Option

func WithStopWords

func WithStopWords(words []string) Option

type Token

type Token struct {
	Value string `json:"value"`
	Label string `json:"label"`
	fuzzy.Match
}

func NewToken

func NewToken(label, val string) *Token

type Tokens

type Tokens []*Token

func Normalize

func Normalize(ls []string, normies []Normalizer) Tokens

func (Tokens) Find

func (toks Tokens) Find(q string) (Tokens, error)

func (Tokens) FindByIndex added in v0.0.2

func (toks Tokens) FindByIndex(ti []int) (Tokens, error)

func (Tokens) FindByLabel added in v0.0.2

func (toks Tokens) FindByLabel(label string) (*Token, error)

func (Tokens) FindByValue added in v0.0.2

func (toks Tokens) FindByValue(val string) (*Token, error)

func (Tokens) Labels added in v0.0.2

func (toks Tokens) Labels() []string

func (Tokens) Len

func (toks Tokens) Len() int

func (Tokens) Search added in v0.0.2

func (toks Tokens) Search(q string) (Tokens, error)

func (Tokens) Sort added in v0.0.2

func (toks Tokens) Sort(cmp func(a, b *Token) int, order string) Tokens

func (Tokens) SortAlphaAsc added in v0.0.3

func (toks Tokens) SortAlphaAsc() Tokens

func (Tokens) SortAlphaDesc added in v0.0.3

func (toks Tokens) SortAlphaDesc() Tokens

func (Tokens) SortStable added in v0.0.2

func (toks Tokens) SortStable(cmp func(a, b *Token) int, order string) Tokens

func (Tokens) String

func (toks Tokens) String(i int) string

func (Tokens) Values added in v0.0.2

func (toks Tokens) Values() []string

func (Tokens) Without added in v0.0.2

func (toks Tokens) Without(sw Tokens) Tokens

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL