bluge

package module
v0.2.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 26, 2022 License: Apache-2.0 Imports: 20 Imported by: 78

README

Bluge Bluge

PkgGoDev Tests Lint

modern text indexing in go - blugelabs.com

Features

  • Supported field types:
    • Text, Numeric, Date, Geo Point
  • Supported query types:
    • Term, Phrase, Match, Match Phrase, Prefix
    • Conjunction, Disjunction, Boolean
    • Numeric Range, Date Range
  • BM25 Similarity/Scoring with pluggable interfaces
  • Search result match highlighting
  • Extendable Aggregations:
    • Bucketing
      • Terms
      • Numeric Range
      • Date Range
    • Metrics
      • Min/Max/Count/Sum
      • Avg/Weighted Avg
      • Cardinality Estimation (HyperLogLog++)
      • Quantile Approximation (T-Digest)

Indexing

    config := bluge.DefaultConfig(path)
    writer, err := bluge.OpenWriter(config)
    if err != nil {
        log.Fatalf("error opening writer: %v", err)
    }
    defer writer.Close()

    doc := bluge.NewDocument("example").
        AddField(bluge.NewTextField("name", "bluge"))

    err = writer.Update(doc.ID(), doc)
    if err != nil {
        log.Fatalf("error updating document: %v", err)
    }

Querying

    reader, err := writer.Reader()
    if err != nil {
        log.Fatalf("error getting index reader: %v", err)
    }
    defer reader.Close()

    query := bluge.NewMatchQuery("bluge").SetField("name")
    request := bluge.NewTopNSearch(10, query).
        WithStandardAggregations()
    documentMatchIterator, err := reader.Search(context.Background(), request)
    if err != nil {
        log.Fatalf("error executing search: %v", err)
    }
    match, err := documentMatchIterator.Next()
    for err == nil && match != nil {
        err = match.VisitStoredFields(func(field string, value []byte) bool {
            if field == "_id" {
                fmt.Printf("match: %s\n", string(value))
            }
            return true
        })
        if err != nil {
            log.Fatalf("error loading stored fields: %v", err)
        }
        match, err = documentMatchIterator.Next()
    }
    if err != nil {
        log.Fatalf("error iterator document matches: %v", err)
    }

Repobeats

Alt

License

Apache License Version 2.0

Documentation

Overview

Package bluge is a library for indexing and searching text.

Example Opening New Index, Indexing Data

config := bluge.DefaultConfig(path)
writer, err := bluge.OpenWriter(config)
if err != nil {
	log.Fatalf("error opening writer: %v", err)
}
defer writer.Close()

doc := bluge.NewDocument("example").
	AddField(bluge.NewTextField("name", "bluge"))

err = writer.Update(doc.ID(), doc)
if err != nil {
	log.Fatalf("error updating document: %v", err)
}

Example Getting Index Reader, Searching Data

    reader, err := writer.Reader()
	if err != nil {
		log.Fatalf("error getting index reader: %v", err)
	}
	defer reader.Close()

	query := bluge.NewMatchQuery("bluge").SetField("name")
	request := bluge.NewTopNSearch(10, query).
		WithStandardAggregations()
	documentMatchIterator, err := reader.Search(context.Background(), request)
	if err != nil {
		log.Fatalf("error executing search: %v", err)
	}
	match, err := documentMatchIterator.Next()
	for err == nil && match != nil {

		// load the identifier for this match
		err = match.VisitStoredFields(func(field string, value []byte) bool {
			if field == "_id" {
				fmt.Printf("match: %s\n", string(value))
			}
			return true
		})
		if err != nil {
			log.Fatalf("error loading stored fields: %v", err)
		}
		match, err = documentMatchIterator.Next()
	}
	if err != nil {
		log.Fatalf("error iterator document matches: %v", err)
	}

Index

Constants

View Source
const (
	// Document must satisfy AT LEAST ONE of term searches.
	MatchQueryOperatorOr = 0
	// Document must satisfy ALL of term searches.
	MatchQueryOperatorAnd = 1
)

Variables

View Source
var MaxNumeric = math.Inf(1)
View Source
var MinNumeric = math.Inf(-1)

Functions

func DecodeDateTime

func DecodeDateTime(value []byte) (time.Time, error)

func DecodeGeoLonLat

func DecodeGeoLonLat(value []byte) (lon, lat float64, err error)

func DecodeNumericFloat64

func DecodeNumericFloat64(value []byte) (float64, error)

func MultiSearch

func MultiSearch(ctx context.Context, req SearchRequest, readers ...*Reader) (search.DocumentMatchIterator, error)

func NewBatch

func NewBatch() *index.Batch

NewBatch creates a new empty batch.

Types

type AllMatches

type AllMatches struct {
	BaseSearch
}

func NewAllMatches

func NewAllMatches(q Query) *AllMatches

func (*AllMatches) AddAggregation

func (s *AllMatches) AddAggregation(name string, aggregation search.Aggregation)

func (*AllMatches) Collector

func (s *AllMatches) Collector() search.Collector

func (*AllMatches) ExplainScores

func (s *AllMatches) ExplainScores() *AllMatches

func (*AllMatches) IncludeLocations

func (s *AllMatches) IncludeLocations() *AllMatches

func (*AllMatches) WithStandardAggregations

func (s *AllMatches) WithStandardAggregations() *AllMatches

type Analyzer

type Analyzer interface {
	Analyze(input []byte) analysis.TokenStream
}

type BaseSearch

type BaseSearch struct {
	// contains filtered or unexported fields
}

func (BaseSearch) Aggregations

func (b BaseSearch) Aggregations() search.Aggregations

func (BaseSearch) Options

func (b BaseSearch) Options() SearchOptions

func (BaseSearch) Query

func (b BaseSearch) Query() Query

func (BaseSearch) Searcher

func (b BaseSearch) Searcher(i search.Reader, config Config) (search.Searcher, error)

type BooleanQuery

type BooleanQuery struct {
	// contains filtered or unexported fields
}

func NewBooleanQuery

func NewBooleanQuery() *BooleanQuery

NewBooleanQuery creates a compound Query composed of several other Query objects. These other query objects are added using the AddMust() AddShould() and AddMustNot() methods. Result documents must satisfy ALL of the must Queries. Result documents must satisfy NONE of the must not Queries. Result documents that ALSO satisfy any of the should Queries will score higher.

func (*BooleanQuery) AddMust

func (q *BooleanQuery) AddMust(m ...Query) *BooleanQuery

func (*BooleanQuery) AddMustNot

func (q *BooleanQuery) AddMustNot(m ...Query) *BooleanQuery

func (*BooleanQuery) AddShould

func (q *BooleanQuery) AddShould(m ...Query) *BooleanQuery

func (*BooleanQuery) Boost

func (q *BooleanQuery) Boost() float64

func (*BooleanQuery) MinShould added in v0.1.5

func (q *BooleanQuery) MinShould() int

MinShould returns the minimum number of should queries that need to match

func (*BooleanQuery) MustNots added in v0.1.5

func (q *BooleanQuery) MustNots() []Query

MustNots returns queries that the documents must not match

func (*BooleanQuery) Musts added in v0.1.5

func (q *BooleanQuery) Musts() []Query

Musts returns the queries that the documents must match

func (*BooleanQuery) Searcher

func (q *BooleanQuery) Searcher(i search.Reader, options search.SearcherOptions) (rv search.Searcher, err error)

func (*BooleanQuery) SetBoost

func (q *BooleanQuery) SetBoost(b float64) *BooleanQuery

func (*BooleanQuery) SetMinShould

func (q *BooleanQuery) SetMinShould(minShould int) *BooleanQuery

SetMinShould requires that at least minShould of the should Queries must be satisfied.

func (*BooleanQuery) Shoulds added in v0.1.5

func (q *BooleanQuery) Shoulds() []Query

Shoulds returns queries that the documents may match

func (*BooleanQuery) Validate

func (q *BooleanQuery) Validate() error

type CompositeField

type CompositeField struct {
	*TermField
	// contains filtered or unexported fields
}

func NewCompositeField

func NewCompositeField(name string, defaultInclude bool, include, exclude []string) *CompositeField

func NewCompositeFieldExcluding

func NewCompositeFieldExcluding(name string, excluding []string) *CompositeField

func NewCompositeFieldIncluding

func NewCompositeFieldIncluding(name string, including []string) *CompositeField

func (*CompositeField) Analyze

func (c *CompositeField) Analyze(int) int

func (*CompositeField) Consume

func (c *CompositeField) Consume(field Field)

func (*CompositeField) EachTerm

func (c *CompositeField) EachTerm(vt segment.VisitTerm)

func (*CompositeField) Length

func (c *CompositeField) Length() int

func (*CompositeField) PositionIncrementGap

func (c *CompositeField) PositionIncrementGap() int

func (*CompositeField) Size

func (c *CompositeField) Size() int

type Config

type Config struct {
	Logger *log.Logger

	DefaultSearchField    string
	DefaultSearchAnalyzer *analysis.Analyzer
	DefaultSimilarity     search.Similarity
	PerFieldSimilarity    map[string]search.Similarity

	SearchStartFunc func(size uint64) error
	SearchEndFunc   func(size uint64)
	// contains filtered or unexported fields
}

func DefaultConfig

func DefaultConfig(path string) Config

func DefaultConfigWithDirectory added in v0.1.3

func DefaultConfigWithDirectory(df func() index.Directory) Config

func InMemoryOnlyConfig

func InMemoryOnlyConfig() Config

func (Config) DisableOptimizeConjunction

func (config Config) DisableOptimizeConjunction() Config

func (Config) DisableOptimizeConjunctionUnadorned

func (config Config) DisableOptimizeConjunctionUnadorned() Config

func (Config) DisableOptimizeDisjunctionUnadorned

func (config Config) DisableOptimizeDisjunctionUnadorned() Config

func (Config) WithSearchStartFunc

func (config Config) WithSearchStartFunc(f func(size uint64) error) Config

func (Config) WithSegmentType

func (config Config) WithSegmentType(typ string) Config

func (Config) WithSegmentVersion

func (config Config) WithSegmentVersion(ver uint32) Config

func (Config) WithVirtualField

func (config Config) WithVirtualField(field Field) Config

WithVirtualField allows you to describe a field that the index will behave as if all documents in this index were indexed with these field/terms, even though nothing is physically persisted about them in the index.

type DateRangeQuery

type DateRangeQuery struct {
	// contains filtered or unexported fields
}

func NewDateRangeInclusiveQuery

func NewDateRangeInclusiveQuery(start, end time.Time, startInclusive, endInclusive bool) *DateRangeQuery

NewDateRangeInclusiveQuery creates a new Query for ranges of date values. Date strings are parsed using the DateTimeParser configured in the

top-level config.QueryDateTimeParser

Either, but not both endpoints can be nil. startInclusive and endInclusive control inclusion of the endpoints.

func NewDateRangeQuery

func NewDateRangeQuery(start, end time.Time) *DateRangeQuery

NewDateRangeQuery creates a new Query for ranges of date values. Date strings are parsed using the DateTimeParser configured in the

top-level config.QueryDateTimeParser

Either, but not both endpoints can be nil.

func (*DateRangeQuery) Boost

func (q *DateRangeQuery) Boost() float64

func (*DateRangeQuery) End added in v0.1.5

func (q *DateRangeQuery) End() (time.Time, bool)

End returns the date range end and if the end is included in the query

func (*DateRangeQuery) Field

func (q *DateRangeQuery) Field() string

func (*DateRangeQuery) Searcher

func (*DateRangeQuery) SetBoost

func (q *DateRangeQuery) SetBoost(b float64) *DateRangeQuery

func (*DateRangeQuery) SetField

func (q *DateRangeQuery) SetField(f string) *DateRangeQuery

func (*DateRangeQuery) Start added in v0.1.5

func (q *DateRangeQuery) Start() (time.Time, bool)

Start returns the date range start and if the start is included in the query

func (*DateRangeQuery) Validate

func (q *DateRangeQuery) Validate() error

type Document

type Document []Field

func NewDocument

func NewDocument(id string) *Document

func NewDocumentWithIdentifier

func NewDocumentWithIdentifier(id Identifier) *Document

func (*Document) AddField

func (d *Document) AddField(f Field) *Document

func (Document) Analyze

func (d Document) Analyze()

func (Document) EachField

func (d Document) EachField(vf segment.VisitField)

func (Document) ID

func (d Document) ID() segment.Term

ID is an experimental helper method to simplify common use cases

func (Document) Size

func (d Document) Size() int

type Field

type Field interface {
	segment.Field

	Analyze(int) int
	AnalyzedTokenFrequencies() analysis.TokenFrequencies

	PositionIncrementGap() int

	Size() int
}

type FieldConsumer

type FieldConsumer interface {
	Consume(Field)
}

FieldConsumer is anything which can consume a field Fields can implement this interface to consume the content of another field.

type FieldOptions

type FieldOptions int
const (
	Index FieldOptions = 1 << iota
	Store
	SearchTermPositions
	HighlightMatches
	Sortable
	Aggregatable
)

func (FieldOptions) IncludeLocations

func (o FieldOptions) IncludeLocations() bool

func (FieldOptions) Index

func (o FieldOptions) Index() bool

func (FieldOptions) IndexDocValues

func (o FieldOptions) IndexDocValues() bool

func (FieldOptions) Store

func (o FieldOptions) Store() bool

type FuzzyQuery

type FuzzyQuery struct {
	// contains filtered or unexported fields
}

func NewFuzzyQuery

func NewFuzzyQuery(term string) *FuzzyQuery

NewFuzzyQuery creates a new Query which finds documents containing terms within a specific fuzziness of the specified term. The default fuzziness is 1.

The current implementation uses Levenshtein edit distance as the fuzziness metric.

func (*FuzzyQuery) Boost

func (q *FuzzyQuery) Boost() float64

func (*FuzzyQuery) Field

func (q *FuzzyQuery) Field() string

func (*FuzzyQuery) Fuzziness added in v0.1.5

func (q *FuzzyQuery) Fuzziness() int

Fuzziness returns the fuzziness of the query

func (*FuzzyQuery) Prefix added in v0.1.5

func (q *FuzzyQuery) Prefix() int

PrefixLen returns the prefix match value

func (*FuzzyQuery) Searcher

func (q *FuzzyQuery) Searcher(i search.Reader, options search.SearcherOptions) (search.Searcher, error)

func (*FuzzyQuery) SetBoost

func (q *FuzzyQuery) SetBoost(b float64) *FuzzyQuery

func (*FuzzyQuery) SetField

func (q *FuzzyQuery) SetField(f string) *FuzzyQuery

func (*FuzzyQuery) SetFuzziness

func (q *FuzzyQuery) SetFuzziness(f int) *FuzzyQuery

func (*FuzzyQuery) SetPrefix

func (q *FuzzyQuery) SetPrefix(p int) *FuzzyQuery

func (*FuzzyQuery) Term added in v0.1.5

func (q *FuzzyQuery) Term() string

Term returns the term being queried

type GeoBoundingBoxQuery

type GeoBoundingBoxQuery struct {
	// contains filtered or unexported fields
}

func NewGeoBoundingBoxQuery

func NewGeoBoundingBoxQuery(topLeftLon, topLeftLat, bottomRightLon, bottomRightLat float64) *GeoBoundingBoxQuery

NewGeoBoundingBoxQuery creates a new Query for performing geo bounding box searches. The arguments describe the position of the box and documents which have an indexed geo point inside the box will be returned.

func (*GeoBoundingBoxQuery) Boost

func (q *GeoBoundingBoxQuery) Boost() float64

func (*GeoBoundingBoxQuery) BottomRight added in v0.1.5

func (q *GeoBoundingBoxQuery) BottomRight() []float64

BottomRight returns the end cornder of the bounding box

func (*GeoBoundingBoxQuery) Field

func (q *GeoBoundingBoxQuery) Field() string

func (*GeoBoundingBoxQuery) Searcher

func (*GeoBoundingBoxQuery) SetBoost

func (*GeoBoundingBoxQuery) SetField

func (*GeoBoundingBoxQuery) TopLeft added in v0.1.5

func (q *GeoBoundingBoxQuery) TopLeft() []float64

TopLeft returns the start corner of the bounding box

func (*GeoBoundingBoxQuery) Validate

func (q *GeoBoundingBoxQuery) Validate() error

type GeoBoundingPolygonQuery

type GeoBoundingPolygonQuery struct {
	// contains filtered or unexported fields
}

func NewGeoBoundingPolygonQuery

func NewGeoBoundingPolygonQuery(points []geo.Point) *GeoBoundingPolygonQuery

FIXME document like the others

func (*GeoBoundingPolygonQuery) Boost

func (q *GeoBoundingPolygonQuery) Boost() float64

func (*GeoBoundingPolygonQuery) Field

func (q *GeoBoundingPolygonQuery) Field() string

func (*GeoBoundingPolygonQuery) Points added in v0.1.5

func (q *GeoBoundingPolygonQuery) Points() []geo.Point

Points returns all the points being queried inside the bounding box

func (*GeoBoundingPolygonQuery) Searcher

func (*GeoBoundingPolygonQuery) SetBoost

func (*GeoBoundingPolygonQuery) SetField

func (*GeoBoundingPolygonQuery) Validate

func (q *GeoBoundingPolygonQuery) Validate() error

type GeoDistanceQuery

type GeoDistanceQuery struct {
	// contains filtered or unexported fields
}

func NewGeoDistanceQuery

func NewGeoDistanceQuery(lon, lat float64, distance string) *GeoDistanceQuery

NewGeoDistanceQuery creates a new Query for performing geo distance searches. The arguments describe a position and a distance. Documents which have an indexed geo point which is less than or equal to the provided distance from the given position will be returned.

func (*GeoDistanceQuery) Boost

func (q *GeoDistanceQuery) Boost() float64

func (*GeoDistanceQuery) Distance added in v0.1.5

func (q *GeoDistanceQuery) Distance() string

Distance returns the distance being queried

func (*GeoDistanceQuery) Field

func (q *GeoDistanceQuery) Field() string

func (*GeoDistanceQuery) Location added in v0.1.5

func (q *GeoDistanceQuery) Location() []float64

Location returns the location being queried

func (*GeoDistanceQuery) Searcher

func (*GeoDistanceQuery) SetBoost

func (q *GeoDistanceQuery) SetBoost(b float64) *GeoDistanceQuery

func (*GeoDistanceQuery) SetField

func (q *GeoDistanceQuery) SetField(f string) *GeoDistanceQuery

func (*GeoDistanceQuery) Validate

func (q *GeoDistanceQuery) Validate() error

type Identifier

type Identifier string

func (Identifier) Field

func (i Identifier) Field() string

func (Identifier) Term

func (i Identifier) Term() []byte

type MatchAllQuery

type MatchAllQuery struct {
	// contains filtered or unexported fields
}

func NewMatchAllQuery

func NewMatchAllQuery() *MatchAllQuery

NewMatchAllQuery creates a Query which will match all documents in the index.

func (*MatchAllQuery) Boost

func (q *MatchAllQuery) Boost() float64

func (*MatchAllQuery) Searcher

func (*MatchAllQuery) SetBoost

func (q *MatchAllQuery) SetBoost(b float64) *MatchAllQuery

type MatchNoneQuery

type MatchNoneQuery struct {
	// contains filtered or unexported fields
}

func NewMatchNoneQuery

func NewMatchNoneQuery() *MatchNoneQuery

NewMatchNoneQuery creates a Query which will not match any documents in the index.

func (*MatchNoneQuery) Boost

func (q *MatchNoneQuery) Boost() float64

func (*MatchNoneQuery) Searcher

func (*MatchNoneQuery) SetBoost

func (q *MatchNoneQuery) SetBoost(b float64) *MatchNoneQuery

type MatchPhraseQuery

type MatchPhraseQuery struct {
	// contains filtered or unexported fields
}

func NewMatchPhraseQuery

func NewMatchPhraseQuery(matchPhrase string) *MatchPhraseQuery

NewMatchPhraseQuery creates a new Query object for matching phrases in the index. An Analyzer is chosen based on the field. Input text is analyzed using this analyzer. Token terms resulting from this analysis are used to build a search phrase. Result documents must match this phrase. Queried field must have been indexed with IncludeTermVectors set to true.

func (*MatchPhraseQuery) Analyzer

func (q *MatchPhraseQuery) Analyzer() *analysis.Analyzer

func (*MatchPhraseQuery) Boost

func (q *MatchPhraseQuery) Boost() float64

func (*MatchPhraseQuery) Field

func (q *MatchPhraseQuery) Field() string

func (*MatchPhraseQuery) Phrase added in v0.1.5

func (q *MatchPhraseQuery) Phrase() string

Phrase returns the phrase being queried

func (*MatchPhraseQuery) Searcher

func (*MatchPhraseQuery) SetAnalyzer

func (q *MatchPhraseQuery) SetAnalyzer(a *analysis.Analyzer) *MatchPhraseQuery

func (*MatchPhraseQuery) SetBoost

func (q *MatchPhraseQuery) SetBoost(b float64) *MatchPhraseQuery

func (*MatchPhraseQuery) SetField

func (q *MatchPhraseQuery) SetField(f string) *MatchPhraseQuery

func (*MatchPhraseQuery) SetSlop added in v0.1.5

func (q *MatchPhraseQuery) SetSlop(dist int) *MatchPhraseQuery

SetSlop updates the sloppyness of the query the phrase terms can be as "dist" terms away from each other

func (*MatchPhraseQuery) Slop added in v0.1.5

func (q *MatchPhraseQuery) Slop() int

Slop returns the acceptable distance between tokens

type MatchQuery

type MatchQuery struct {
	// contains filtered or unexported fields
}

func NewMatchQuery

func NewMatchQuery(match string) *MatchQuery

NewMatchQuery creates a Query for matching text. An Analyzer is chosen based on the field. Input text is analyzed using this analyzer. Token terms resulting from this analysis are used to perform term searches. Result documents must satisfy at least one of these term searches.

func (*MatchQuery) Analyzer

func (q *MatchQuery) Analyzer() *analysis.Analyzer

func (*MatchQuery) Boost

func (q *MatchQuery) Boost() float64

func (*MatchQuery) Field

func (q *MatchQuery) Field() string

func (*MatchQuery) Fuzziness

func (q *MatchQuery) Fuzziness() int

func (*MatchQuery) Match added in v0.1.5

func (q *MatchQuery) Match() string

Match returns the term being queried

func (*MatchQuery) Operator

func (q *MatchQuery) Operator() MatchQueryOperator

func (*MatchQuery) Prefix

func (q *MatchQuery) Prefix() int

func (*MatchQuery) Searcher

func (q *MatchQuery) Searcher(i search.Reader, options search.SearcherOptions) (search.Searcher, error)

func (*MatchQuery) SetAnalyzer

func (q *MatchQuery) SetAnalyzer(a *analysis.Analyzer) *MatchQuery

func (*MatchQuery) SetBoost

func (q *MatchQuery) SetBoost(b float64) *MatchQuery

func (*MatchQuery) SetField

func (q *MatchQuery) SetField(f string) *MatchQuery

func (*MatchQuery) SetFuzziness

func (q *MatchQuery) SetFuzziness(f int) *MatchQuery

func (*MatchQuery) SetOperator

func (q *MatchQuery) SetOperator(operator MatchQueryOperator) *MatchQuery

func (*MatchQuery) SetPrefix

func (q *MatchQuery) SetPrefix(p int) *MatchQuery

type MatchQueryOperator

type MatchQueryOperator int

type MultiPhraseQuery

type MultiPhraseQuery struct {
	// contains filtered or unexported fields
}

func NewMultiPhraseQuery

func NewMultiPhraseQuery(terms [][]string) *MultiPhraseQuery

NewMultiPhraseQuery creates a new Query for finding term phrases in the index. It is like PhraseQuery, but each position in the phrase may be satisfied by a list of terms as opposed to just one. At least one of the terms must exist in the correct order, at the correct index offsets, in the specified field. Queried field must have been indexed with IncludeTermVectors set to true.

func (*MultiPhraseQuery) Boost

func (q *MultiPhraseQuery) Boost() float64

func (*MultiPhraseQuery) Field

func (q *MultiPhraseQuery) Field() string

func (*MultiPhraseQuery) Searcher

func (*MultiPhraseQuery) SetBoost

func (q *MultiPhraseQuery) SetBoost(b float64) *MultiPhraseQuery

func (*MultiPhraseQuery) SetField

func (q *MultiPhraseQuery) SetField(f string) *MultiPhraseQuery

func (*MultiPhraseQuery) SetSlop added in v0.1.5

func (q *MultiPhraseQuery) SetSlop(dist int) *MultiPhraseQuery

SetSlop updates the sloppyness of the query the phrase terms can be as "dist" terms away from each other

func (*MultiPhraseQuery) Slop added in v0.1.5

func (q *MultiPhraseQuery) Slop() int

Slop returns the acceptable distance between terms

func (*MultiPhraseQuery) Terms added in v0.1.5

func (q *MultiPhraseQuery) Terms() [][]string

Terms returns the term phrases being queried

func (*MultiPhraseQuery) Validate

func (q *MultiPhraseQuery) Validate() error

type MultiSearcherList

type MultiSearcherList struct {
	// contains filtered or unexported fields
}

func NewMultiSearcherList

func NewMultiSearcherList(searchers []search.Searcher) *MultiSearcherList

func (*MultiSearcherList) Close

func (m *MultiSearcherList) Close() (err error)

func (*MultiSearcherList) DocumentMatchPoolSize

func (m *MultiSearcherList) DocumentMatchPoolSize() int

func (*MultiSearcherList) Next

type NumericRangeQuery

type NumericRangeQuery struct {
	// contains filtered or unexported fields
}

func NewNumericRangeInclusiveQuery

func NewNumericRangeInclusiveQuery(min, max float64, minInclusive, maxInclusive bool) *NumericRangeQuery

NewNumericRangeInclusiveQuery creates a new Query for ranges of numeric values. Either, but not both endpoints can be nil. Control endpoint inclusion with inclusiveMin, inclusiveMax.

func NewNumericRangeQuery

func NewNumericRangeQuery(min, max float64) *NumericRangeQuery

NewNumericRangeQuery creates a new Query for ranges of numeric values. Either, but not both endpoints can be nil. The minimum value is inclusive. The maximum value is exclusive.

func (*NumericRangeQuery) Boost

func (q *NumericRangeQuery) Boost() float64

func (*NumericRangeQuery) Field

func (q *NumericRangeQuery) Field() string

func (*NumericRangeQuery) Max added in v0.1.5

func (q *NumericRangeQuery) Max() (float64, bool)

Max returns the numeric range upperbound and if the upperbound is included

func (*NumericRangeQuery) Min added in v0.1.5

func (q *NumericRangeQuery) Min() (float64, bool)

Min returns the numeric range lower bound and if the lowerbound is included

func (*NumericRangeQuery) Searcher

func (*NumericRangeQuery) SetBoost

func (*NumericRangeQuery) SetField

func (q *NumericRangeQuery) SetField(f string) *NumericRangeQuery

func (*NumericRangeQuery) Validate

func (q *NumericRangeQuery) Validate() error

type OfflineWriter

type OfflineWriter struct {
	// contains filtered or unexported fields
}

func OpenOfflineWriter

func OpenOfflineWriter(config Config, batchSize, maxSegmentsToMerge int) (*OfflineWriter, error)

func (*OfflineWriter) Close

func (w *OfflineWriter) Close() error

func (*OfflineWriter) Insert

func (w *OfflineWriter) Insert(doc segment.Document) error

type PrefixQuery

type PrefixQuery struct {
	// contains filtered or unexported fields
}

func NewPrefixQuery

func NewPrefixQuery(prefix string) *PrefixQuery

NewPrefixQuery creates a new Query which finds documents containing terms that start with the specified prefix.

func (*PrefixQuery) Boost

func (q *PrefixQuery) Boost() float64

func (*PrefixQuery) Field

func (q *PrefixQuery) Field() string

func (*PrefixQuery) Prefix added in v0.1.5

func (q *PrefixQuery) Prefix() string

Prefix return the prefix being queried

func (*PrefixQuery) Searcher

func (q *PrefixQuery) Searcher(i search.Reader, options search.SearcherOptions) (search.Searcher, error)

func (*PrefixQuery) SetBoost

func (q *PrefixQuery) SetBoost(b float64) *PrefixQuery

func (*PrefixQuery) SetField

func (q *PrefixQuery) SetField(f string) *PrefixQuery

type Query

type Query interface {
	Searcher(i search.Reader,
		options search.SearcherOptions) (search.Searcher, error)
}

A Query represents a description of the type and parameters for a query into the index.

type Reader

type Reader struct {
	// contains filtered or unexported fields
}

func OpenReader

func OpenReader(config Config) (*Reader, error)

func (*Reader) Backup

func (r *Reader) Backup(path string, cancel chan struct{}) error

func (*Reader) Close

func (r *Reader) Close() error

func (*Reader) Count

func (r *Reader) Count() (count uint64, err error)

func (*Reader) DictionaryIterator

func (r *Reader) DictionaryIterator(field string, automaton segment.Automaton, start, end []byte) (segment.DictionaryIterator, error)

func (*Reader) Fields

func (r *Reader) Fields() (fields []string, err error)

func (*Reader) Search

func (*Reader) VisitStoredFields

func (r *Reader) VisitStoredFields(number uint64, visitor StoredFieldVisitor) error

type RegexpQuery

type RegexpQuery struct {
	// contains filtered or unexported fields
}

func NewRegexpQuery

func NewRegexpQuery(regexp string) *RegexpQuery

NewRegexpQuery creates a new Query which finds documents containing terms that match the specified regular expression.

func (*RegexpQuery) Boost

func (q *RegexpQuery) Boost() float64

func (*RegexpQuery) Field

func (q *RegexpQuery) Field() string

func (*RegexpQuery) Regexp added in v0.1.5

func (q *RegexpQuery) Regexp() string

Regexp returns the regular expression being queried

func (*RegexpQuery) Searcher

func (q *RegexpQuery) Searcher(i search.Reader, options search.SearcherOptions) (search.Searcher, error)

func (*RegexpQuery) SetBoost

func (q *RegexpQuery) SetBoost(b float64) *RegexpQuery

func (*RegexpQuery) SetField

func (q *RegexpQuery) SetField(f string) *RegexpQuery

func (*RegexpQuery) Validate

func (q *RegexpQuery) Validate() error

type SearchOptions

type SearchOptions struct {
	ExplainScores    bool
	IncludeLocations bool
	Score            string // FIXME go away
}

type SearchRequest

type SearchRequest interface {
	Collector() search.Collector
	Searcher(i search.Reader, config Config) (search.Searcher, error)
	AddAggregation(name string, aggregation search.Aggregation)
	Aggregations() search.Aggregations
}

type StoredFieldVisitor

type StoredFieldVisitor func(field string, value []byte) bool

type TermField

type TermField struct {
	FieldOptions
	// contains filtered or unexported fields
}

func NewDateTimeField

func NewDateTimeField(name string, dt time.Time) *TermField

func NewGeoPointField

func NewGeoPointField(name string, lon, lat float64) *TermField

func NewKeywordField

func NewKeywordField(name, value string) *TermField

func NewKeywordFieldBytes

func NewKeywordFieldBytes(name string, value []byte) *TermField

func NewNumericField

func NewNumericField(name string, number float64) *TermField

func NewStoredOnlyField

func NewStoredOnlyField(name string, value []byte) *TermField

func NewTextField

func NewTextField(name, value string) *TermField

func NewTextFieldBytes

func NewTextFieldBytes(name string, value []byte) *TermField

func (*TermField) Aggregatable

func (b *TermField) Aggregatable() *TermField

func (*TermField) Analyze

func (b *TermField) Analyze(startOffset int) (lastPos int)

func (*TermField) AnalyzedLength

func (b *TermField) AnalyzedLength() int

func (*TermField) AnalyzedTokenFrequencies

func (b *TermField) AnalyzedTokenFrequencies() analysis.TokenFrequencies

func (*TermField) EachTerm

func (b *TermField) EachTerm(vt segment.VisitTerm)

func (*TermField) HighlightMatches

func (b *TermField) HighlightMatches() *TermField

func (*TermField) Length

func (b *TermField) Length() int

func (*TermField) Name

func (b *TermField) Name() string

func (*TermField) NumPlainTextBytes

func (b *TermField) NumPlainTextBytes() int

func (*TermField) PositionIncrementGap

func (b *TermField) PositionIncrementGap() int

func (*TermField) SearchTermPositions

func (b *TermField) SearchTermPositions() *TermField

func (*TermField) SetPositionIncrementGap

func (b *TermField) SetPositionIncrementGap(positionIncrementGap int) *TermField

func (*TermField) Size

func (b *TermField) Size() int

func (*TermField) Sortable

func (b *TermField) Sortable() *TermField

func (*TermField) StoreValue

func (b *TermField) StoreValue() *TermField

func (*TermField) Value

func (b *TermField) Value() []byte

func (*TermField) WithAnalyzer

func (b *TermField) WithAnalyzer(fieldAnalyzer Analyzer) *TermField

type TermQuery

type TermQuery struct {
	// contains filtered or unexported fields
}

func NewTermQuery

func NewTermQuery(term string) *TermQuery

NewTermQuery creates a new Query for finding an exact term match in the index.

func (*TermQuery) Boost

func (q *TermQuery) Boost() float64

func (*TermQuery) Field

func (q *TermQuery) Field() string

func (*TermQuery) Searcher

func (q *TermQuery) Searcher(i search.Reader, options search.SearcherOptions) (search.Searcher, error)

func (*TermQuery) SetBoost

func (q *TermQuery) SetBoost(b float64) *TermQuery

func (*TermQuery) SetField

func (q *TermQuery) SetField(f string) *TermQuery

func (*TermQuery) Term added in v0.1.5

func (q *TermQuery) Term() string

Term returns the exact term being queried

type TermRangeQuery

type TermRangeQuery struct {
	// contains filtered or unexported fields
}

func NewTermRangeInclusiveQuery

func NewTermRangeInclusiveQuery(min, max string, minInclusive, maxInclusive bool) *TermRangeQuery

NewTermRangeInclusiveQuery creates a new Query for ranges of text terms. Either, but not both endpoints can be "". Control endpoint inclusion with inclusiveMin, inclusiveMax.

func NewTermRangeQuery

func NewTermRangeQuery(min, max string) *TermRangeQuery

NewTermRangeQuery creates a new Query for ranges of text terms. Either, but not both endpoints can be "". The minimum value is inclusive. The maximum value is exclusive.

func (*TermRangeQuery) Boost

func (q *TermRangeQuery) Boost() float64

func (*TermRangeQuery) Field

func (q *TermRangeQuery) Field() string

func (*TermRangeQuery) Max added in v0.1.5

func (q *TermRangeQuery) Max() (string, bool)

Max returns the query upperbound and if the upper bound is included in the query

func (*TermRangeQuery) Min added in v0.1.5

func (q *TermRangeQuery) Min() (string, bool)

Min returns the query lower bound and if the lower bound is included in query

func (*TermRangeQuery) Searcher

func (*TermRangeQuery) SetBoost

func (q *TermRangeQuery) SetBoost(b float64) *TermRangeQuery

func (*TermRangeQuery) SetField

func (q *TermRangeQuery) SetField(f string) *TermRangeQuery

func (*TermRangeQuery) Validate

func (q *TermRangeQuery) Validate() error

type TopNSearch

type TopNSearch struct {
	BaseSearch
	// contains filtered or unexported fields
}

TopNSearch is used to search for a fixed number of matches which can be sorted by a custom sort order. It also allows for skipping a specified number of matches which can be used to enable pagination.

func NewTopNSearch

func NewTopNSearch(n int, q Query) *TopNSearch

NewTopNSearch creates a search which will find the matches and return the first N when ordered by the specified sort order (default: score descending)

func (*TopNSearch) AddAggregation

func (s *TopNSearch) AddAggregation(name string, aggregation search.Aggregation)

func (*TopNSearch) After

func (s *TopNSearch) After(after [][]byte) *TopNSearch

After can be used to specify a sort key, any match with a sort key less than this will be skipped

func (*TopNSearch) AllMatches

func (s *TopNSearch) AllMatches(i search.Reader, config Config) (search.Searcher, error)

func (*TopNSearch) Before

func (s *TopNSearch) Before(before [][]byte) *TopNSearch

Before can be used to specify a sort key, any match with a sort key greather than this will be skipped

func (*TopNSearch) Collector

func (s *TopNSearch) Collector() search.Collector

func (*TopNSearch) ExplainScores

func (s *TopNSearch) ExplainScores() *TopNSearch

ExplainScores enables the addition of scoring explanation to each match

func (*TopNSearch) From

func (s *TopNSearch) From() int

From returns the number of matches that will be skipped

func (*TopNSearch) IncludeLocations

func (s *TopNSearch) IncludeLocations() *TopNSearch

IncludeLocations enables the addition of match location in the original field

func (*TopNSearch) SetFrom

func (s *TopNSearch) SetFrom(from int) *TopNSearch

SetFrom sets the number of results to skip

func (*TopNSearch) SetScore

func (s *TopNSearch) SetScore(mode string) *TopNSearch

func (*TopNSearch) Size

func (s *TopNSearch) Size() int

Size returns the number of matches this search request will return

func (*TopNSearch) SortBy

func (s *TopNSearch) SortBy(order []string) *TopNSearch

SortBy is a convenience method to specify search result sort order using a simple string slice. Strings in the slice are interpreted as the name of a field to sort ascending. The following special cases are handled.

  • the prefix '-' will sort in descending order
  • the special field '_score' can be used sort by score

func (*TopNSearch) SortByCustom

func (s *TopNSearch) SortByCustom(order search.SortOrder) *TopNSearch

SortByCustom sets a custom sort order used to sort the matches of the search

func (*TopNSearch) SortOrder

func (s *TopNSearch) SortOrder() search.SortOrder

SortOrder returns the sort order of the current search

func (*TopNSearch) WithStandardAggregations

func (s *TopNSearch) WithStandardAggregations() *TopNSearch

WithStandardAggregations adds the standard aggregations in the search query The standard aggregations are:

  • count (total number of documents that matched the query)
  • max_score (the highest score of all the matched documents)
  • duration (time taken performing the search)

type WildcardQuery

type WildcardQuery struct {
	// contains filtered or unexported fields
}

func NewWildcardQuery

func NewWildcardQuery(wildcard string) *WildcardQuery

NewWildcardQuery creates a new Query which finds documents containing terms that match the specified wildcard. In the wildcard pattern '*' will match any sequence of 0 or more characters, and '?' will match any single character.

func (*WildcardQuery) Boost

func (q *WildcardQuery) Boost() float64

func (*WildcardQuery) Field

func (q *WildcardQuery) Field() string

func (*WildcardQuery) Searcher

func (*WildcardQuery) SetBoost

func (q *WildcardQuery) SetBoost(b float64) *WildcardQuery

func (*WildcardQuery) SetField

func (q *WildcardQuery) SetField(f string) *WildcardQuery

func (*WildcardQuery) Validate

func (q *WildcardQuery) Validate() error

func (*WildcardQuery) Wildcard added in v0.1.5

func (q *WildcardQuery) Wildcard() string

Wildcard returns the wildcard being queried

type Writer

type Writer struct {
	// contains filtered or unexported fields
}

func OpenWriter

func OpenWriter(config Config) (*Writer, error)

func (*Writer) Batch

func (w *Writer) Batch(batch *index.Batch) error

func (*Writer) Close

func (w *Writer) Close() error

func (*Writer) Delete

func (w *Writer) Delete(id segment.Term) error

func (*Writer) Insert

func (w *Writer) Insert(doc segment.Document) error

func (*Writer) Reader

func (w *Writer) Reader() (*Reader, error)

func (*Writer) Update

func (w *Writer) Update(id segment.Term, doc segment.Document) error

Directories

Path Synopsis
lang/en
Package en implements an analyzer with reasonable defaults for processing English text.
Package en implements an analyzer with reasonable defaults for processing English text.
token
Package lowercase implements a TokenFilter which converts tokens to lower case according to unicode rules.
Package lowercase implements a TokenFilter which converts tokens to lower case according to unicode rules.
cmd
mergeplan
Package mergeplan provides a segment merge planning approach that's inspired by Lucene's TieredMergePolicy.java and descriptions like http://blog.mikemccandless.com/2011/02/visualizing-lucenes-segment-merges.html
Package mergeplan provides a segment merge planning approach that's inspired by Lucene's TieredMergePolicy.java and descriptions like http://blog.mikemccandless.com/2011/02/visualizing-lucenes-segment-merges.html
geo

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL