Documentation ¶
Index ¶
- Variables
- func DefaultStopWords() []string
- func SortByAlphaFunc(a *Token, b *Token) int
- func Split(str string, s sep.Func) []string
- func Stem(token string) string
- func StripPunct(token string) string
- func ToLower(ana *Analyzer)
- func WithDefaultStopWords(ana *Analyzer)
- func WithStemmer(ana *Analyzer)
- func WithoutPunct(ana *Analyzer)
- type Analyzer
- func (ana *Analyzer) AddNormalizer(normies ...Normalizer) *Analyzer
- func (ana *Analyzer) IsStopWord(token string) bool
- func (ana *Analyzer) Keywords() *Analyzer
- func (ana *Analyzer) SetStopWords(words []string) *Analyzer
- func (ana *Analyzer) Stopwords() Tokens
- func (ana *Analyzer) Tokenize(text string) (Tokens, error)
- func (ana *Analyzer) WithNormalizer(normies ...Normalizer) *Analyzer
- func (ana *Analyzer) WithSep(sep sep.Func) *Analyzer
- func (ana *Analyzer) WithoutStopWords() bool
- type Normalizer
- type Option
- type Token
- type Tokens
- func (toks Tokens) Find(q string) (Tokens, error)
- func (toks Tokens) FindByIndex(ti []int) (Tokens, error)
- func (toks Tokens) FindByLabel(label string) (*Token, error)
- func (toks Tokens) FindByValue(val string) (*Token, error)
- func (toks Tokens) Labels() []string
- func (toks Tokens) Len() int
- func (toks Tokens) Search(q string) (Tokens, error)
- func (toks Tokens) Sort(cmp func(a, b *Token) int, order string) Tokens
- func (toks Tokens) SortAlphaAsc() Tokens
- func (toks Tokens) SortAlphaDesc() Tokens
- func (toks Tokens) SortStable(cmp func(a, b *Token) int, order string) Tokens
- func (toks Tokens) String(i int) string
- func (toks Tokens) Values() []string
- func (toks Tokens) Without(sw Tokens) Tokens
Constants ¶
This section is empty.
Variables ¶
Functions ¶
func DefaultStopWords ¶
func DefaultStopWords() []string
func SortByAlphaFunc ¶ added in v0.0.2
func StripPunct ¶
func WithDefaultStopWords ¶
func WithDefaultStopWords(ana *Analyzer)
func WithStemmer ¶
func WithStemmer(ana *Analyzer)
func WithoutPunct ¶
func WithoutPunct(ana *Analyzer)
Types ¶
type Analyzer ¶
type Analyzer struct {
// contains filtered or unexported fields
}
func NewNormalizer ¶ added in v0.0.2
func (*Analyzer) AddNormalizer ¶
func (ana *Analyzer) AddNormalizer(normies ...Normalizer) *Analyzer
func (*Analyzer) IsStopWord ¶
func (*Analyzer) SetStopWords ¶
func (*Analyzer) WithNormalizer ¶
func (ana *Analyzer) WithNormalizer(normies ...Normalizer) *Analyzer
func (*Analyzer) WithoutStopWords ¶ added in v0.0.4
type Normalizer ¶
type Option ¶
type Option func(*Analyzer)
func WithNormalizers ¶
func WithNormalizers(n ...Normalizer) Option
func WithStopWords ¶
type Tokens ¶
type Tokens []*Token
func Normalize ¶
func Normalize(ls []string, normies []Normalizer) Tokens
func (Tokens) FindByIndex ¶ added in v0.0.2
func (Tokens) FindByLabel ¶ added in v0.0.2
func (Tokens) FindByValue ¶ added in v0.0.2
func (Tokens) SortAlphaAsc ¶ added in v0.0.3
func (Tokens) SortAlphaDesc ¶ added in v0.0.3
func (Tokens) SortStable ¶ added in v0.0.2
Click to show internal directories.
Click to hide internal directories.