Documentation ¶
Index ¶
- Constants
- Variables
- type Allocation
- func (allocation *Allocation) SetEnable(enable AllocationVal) *Allocation
- func (allocation *Allocation) SetExclude(exclude AllocationAttributes) *Allocation
- func (allocation *Allocation) SetInclude(include AllocationAttributes) *Allocation
- func (allocation *Allocation) SetRequire(require AllocationAttributes) *Allocation
- func (allocation *Allocation) SetTotalShardsPerNode(totalShardsPerNode uint) *Allocation
- func (allocation *Allocation) Source() (interface{}, error)
- type AllocationAttribute
- type AllocationAttributes
- func (attributes AllocationAttributes) HostIps(values ...interface{}) AllocationAttributes
- func (attributes AllocationAttributes) Hosts(values ...interface{}) AllocationAttributes
- func (attributes AllocationAttributes) Ids(values ...interface{}) AllocationAttributes
- func (attributes AllocationAttributes) Ips(values ...interface{}) AllocationAttributes
- func (attributes AllocationAttributes) Names(values ...interface{}) AllocationAttributes
- func (attributes AllocationAttributes) PublishIps(values ...interface{}) AllocationAttributes
- func (attributes AllocationAttributes) Set(name string, values ...interface{}) AllocationAttributes
- type AllocationVal
- type Analysis
- func (analysis *Analysis) AddAnalyzer(analyzer ...Analyzer) *Analysis
- func (analysis *Analysis) AddCharFilter(charFilter ...CharFilter) *Analysis
- func (analysis *Analysis) AddFilter(filter ...Filter) *Analysis
- func (analysis *Analysis) AddNormalizer(normalizer ...Normalizer) *Analysis
- func (analysis *Analysis) AddTokenizer(tokenizer ...Tokenizer) *Analysis
- func (analysis *Analysis) Source() (interface{}, error)
- type Analyze
- type Analyzer
- type AnalyzerFingerprint
- func (analyzer *AnalyzerFingerprint) AddStopWords(stopWords ...string) *AnalyzerFingerprint
- func (analyzer *AnalyzerFingerprint) Name() AnalyzerName
- func (analyzer *AnalyzerFingerprint) SetMaxOutputSize(maxOutputSize uint8) *AnalyzerFingerprint
- func (analyzer *AnalyzerFingerprint) SetSeparator(separator string) *AnalyzerFingerprint
- func (analyzer *AnalyzerFingerprint) SetStopWordsPath(stopWordsPath string) *AnalyzerFingerprint
- func (analyzer *AnalyzerFingerprint) Source() (interface{}, error)
- func (analyzer *AnalyzerFingerprint) Type() AnalyzerType
- type AnalyzerName
- type AnalyzerPattern
- func (analyzer *AnalyzerPattern) AddFlags(flags ...JavaRegularFlag) *AnalyzerPattern
- func (analyzer *AnalyzerPattern) AddStopWords(stopWords ...string) *AnalyzerPattern
- func (analyzer *AnalyzerPattern) Name() AnalyzerName
- func (analyzer *AnalyzerPattern) SetLowercase(lowercase bool) *AnalyzerPattern
- func (analyzer *AnalyzerPattern) SetPattern(pattern string) *AnalyzerPattern
- func (analyzer *AnalyzerPattern) SetStopWordsPath(stopWordsPath string) *AnalyzerPattern
- func (analyzer *AnalyzerPattern) Source() (interface{}, error)
- func (analyzer *AnalyzerPattern) Type() AnalyzerType
- type AnalyzerStandard
- func (analyzer *AnalyzerStandard) AddStopWords(stopWords ...string) *AnalyzerStandard
- func (analyzer *AnalyzerStandard) Name() AnalyzerName
- func (analyzer *AnalyzerStandard) SetMaxTokenLength(maxTokenLength uint8) *AnalyzerStandard
- func (analyzer *AnalyzerStandard) SetStopWordsPath(stopWordsPath string) *AnalyzerStandard
- func (analyzer *AnalyzerStandard) Source() (interface{}, error)
- func (analyzer *AnalyzerStandard) Type() AnalyzerType
- type AnalyzerStop
- func (analyzer *AnalyzerStop) AddStopWords(stopWords ...string) *AnalyzerStop
- func (analyzer *AnalyzerStop) Name() AnalyzerName
- func (analyzer *AnalyzerStop) SetStopWordsPath(stopWordsPath string) *AnalyzerStop
- func (analyzer *AnalyzerStop) Source() (interface{}, error)
- func (analyzer *AnalyzerStop) Type() AnalyzerType
- type AnalyzerType
- type Blocks
- func (blocks *Blocks) SetMetadata(metadata bool) *Blocks
- func (blocks *Blocks) SetRead(read bool) *Blocks
- func (blocks *Blocks) SetReadOnly(readOnly bool) *Blocks
- func (blocks *Blocks) SetReadOnlyAllowDelete(readOnlyAllowDelete bool) *Blocks
- func (blocks *Blocks) SetWrite(write bool) *Blocks
- func (blocks *Blocks) Source() (interface{}, error)
- type CharFilter
- type CharFilterHtmlStrip
- type CharFilterMapping
- func (filter *CharFilterMapping) AddMappings(mappings ...*CharMapping) *CharFilterMapping
- func (filter *CharFilterMapping) Name() CharFilterName
- func (filter *CharFilterMapping) SetMappingsPath(mappingsPath string) *CharFilterMapping
- func (filter *CharFilterMapping) Source() (interface{}, error)
- func (filter *CharFilterMapping) Type() CharFilterType
- type CharFilterName
- type CharFilterPatternReplace
- func (filter *CharFilterPatternReplace) AddFlags(flags ...JavaRegularFlag) *CharFilterPatternReplace
- func (filter *CharFilterPatternReplace) Name() CharFilterName
- func (filter *CharFilterPatternReplace) SetPattern(pattern string) *CharFilterPatternReplace
- func (filter *CharFilterPatternReplace) SetReplacement(replacement string) *CharFilterPatternReplace
- func (filter *CharFilterPatternReplace) Source() (interface{}, error)
- func (filter *CharFilterPatternReplace) Type() CharFilterType
- type CharFilterType
- type CharMapping
- type CheckOnStartup
- type EdgeNgramTokenChars
- type Filter
- type FilterAsciiFolding
- type FilterBigramIgnoredScripts
- type FilterCjkBigRam
- func (filter *FilterCjkBigRam) AddIgnoredScripts(ignoredScripts ...FilterBigramIgnoredScripts) *FilterCjkBigRam
- func (filter *FilterCjkBigRam) Name() FilterName
- func (filter *FilterCjkBigRam) SetOutputUnigrams(outputUnigrams bool) *FilterCjkBigRam
- func (filter *FilterCjkBigRam) Source() (interface{}, error)
- func (filter *FilterCjkBigRam) Type() FilterType
- type FilterCommonGrams
- func (filter *FilterCommonGrams) AddCommonWords(commonWords ...string) *FilterCommonGrams
- func (filter *FilterCommonGrams) Name() FilterName
- func (filter *FilterCommonGrams) SetCommonWordsPath(commonWordsPath string) *FilterCommonGrams
- func (filter *FilterCommonGrams) SetIgnoreCase(ignoreCase bool) *FilterCommonGrams
- func (filter *FilterCommonGrams) SetQueryMode(queryMode bool) *FilterCommonGrams
- func (filter *FilterCommonGrams) Source() (interface{}, error)
- func (filter *FilterCommonGrams) Type() FilterType
- type FilterCondition
- func (filter *FilterCondition) AddFilter(filters ...FilterName) *FilterCondition
- func (filter *FilterCondition) Name() FilterName
- func (filter *FilterCondition) SetScript(script *elastic.Script) *FilterCondition
- func (filter *FilterCondition) Source() (interface{}, error)
- func (filter *FilterCondition) Type() FilterType
- type FilterDelimitedPayload
- func (filter *FilterDelimitedPayload) Name() FilterName
- func (filter *FilterDelimitedPayload) SetDelimiter(delimiter string) *FilterDelimitedPayload
- func (filter *FilterDelimitedPayload) SetEncoding(encoding FilterDelimitedPayloadEncoding) *FilterDelimitedPayload
- func (filter *FilterDelimitedPayload) Source() (interface{}, error)
- func (filter *FilterDelimitedPayload) Type() FilterType
- type FilterDelimitedPayloadEncoding
- type FilterDictionaryDecompounder
- func (filter *FilterDictionaryDecompounder) AddWordList(wordList ...string) *FilterDictionaryDecompounder
- func (filter *FilterDictionaryDecompounder) Name() FilterName
- func (filter *FilterDictionaryDecompounder) SetMaxSubwordSize(maxSubwordSize uint32) *FilterDictionaryDecompounder
- func (filter *FilterDictionaryDecompounder) SetMinSubwordSize(minSubwordSize uint32) *FilterDictionaryDecompounder
- func (filter *FilterDictionaryDecompounder) SetMinWordSize(minWordSize uint32) *FilterDictionaryDecompounder
- func (filter *FilterDictionaryDecompounder) SetOnlyLongestMatch(onlyLongestMatch bool) *FilterDictionaryDecompounder
- func (filter *FilterDictionaryDecompounder) SetWordListPath(wordListPath string) *FilterDictionaryDecompounder
- func (filter *FilterDictionaryDecompounder) Source() (interface{}, error)
- func (filter *FilterDictionaryDecompounder) Type() FilterType
- type FilterEdgeNgram
- func (filter *FilterEdgeNgram) Name() FilterName
- func (filter *FilterEdgeNgram) SetMaxGram(maxGram uint32) *FilterEdgeNgram
- func (filter *FilterEdgeNgram) SetMinGram(minGram uint32) *FilterEdgeNgram
- func (filter *FilterEdgeNgram) SetPreserveOriginal(preserveOriginal bool) *FilterEdgeNgram
- func (filter *FilterEdgeNgram) SetSide(side FilterEdgeNgramSide) *FilterEdgeNgram
- func (filter *FilterEdgeNgram) Source() (interface{}, error)
- func (filter *FilterEdgeNgram) Type() FilterType
- type FilterEdgeNgramSide
- type FilterElision
- func (filter *FilterElision) AddArticles(articles ...string) *FilterElision
- func (filter *FilterElision) Name() FilterName
- func (filter *FilterElision) SetArticlesCase(articlesCase bool) *FilterElision
- func (filter *FilterElision) SetArticlesPath(articlesPath string) *FilterElision
- func (filter *FilterElision) Source() (interface{}, error)
- func (filter *FilterElision) Type() FilterType
- type FilterFingerprint
- func (filter *FilterFingerprint) Name() FilterName
- func (filter *FilterFingerprint) SetMaxOutputSize(maxOutputSize uint32) *FilterFingerprint
- func (filter *FilterFingerprint) SetSeparator(separator string) *FilterFingerprint
- func (filter *FilterFingerprint) Source() (interface{}, error)
- func (filter *FilterFingerprint) Type() FilterType
- type FilterHyphenationDecompounder
- func (filter *FilterHyphenationDecompounder) AddWordList(wordList ...string) *FilterHyphenationDecompounder
- func (filter *FilterHyphenationDecompounder) Name() FilterName
- func (filter *FilterHyphenationDecompounder) SetHyphenationPatternsPath(hyphenationPatternsPath string) *FilterHyphenationDecompounder
- func (filter *FilterHyphenationDecompounder) SetMaxSubwordSize(maxSubwordSize uint32) *FilterHyphenationDecompounder
- func (filter *FilterHyphenationDecompounder) SetMinSubwordSize(minSubwordSize uint32) *FilterHyphenationDecompounder
- func (filter *FilterHyphenationDecompounder) SetMinWordSize(minWordSize uint32) *FilterHyphenationDecompounder
- func (filter *FilterHyphenationDecompounder) SetOnlyLongestMatch(onlyLongestMatch bool) *FilterHyphenationDecompounder
- func (filter *FilterHyphenationDecompounder) SetWordListPath(wordListPath string) *FilterHyphenationDecompounder
- func (filter *FilterHyphenationDecompounder) Source() (interface{}, error)
- func (filter *FilterHyphenationDecompounder) Type() FilterType
- type FilterKeep
- func (filter *FilterKeep) AddKeepWords(keepWords ...string) *FilterKeep
- func (filter *FilterKeep) Name() FilterName
- func (filter *FilterKeep) SetKeepWordsCase(keepWordsCase bool) *FilterKeep
- func (filter *FilterKeep) SetKeepWordsPath(keepWordsPath string) *FilterKeep
- func (filter *FilterKeep) Source() (interface{}, error)
- func (filter *FilterKeep) Type() FilterType
- type FilterKeepTypes
- func (filter *FilterKeepTypes) AddTypes(types ...string) *FilterKeepTypes
- func (filter *FilterKeepTypes) Name() FilterName
- func (filter *FilterKeepTypes) SetMode(mode FilterKeepTypesMode) *FilterKeepTypes
- func (filter *FilterKeepTypes) Source() (interface{}, error)
- func (filter *FilterKeepTypes) Type() FilterType
- type FilterKeepTypesMode
- type FilterKeywordMarker
- func (filter *FilterKeywordMarker) AddKeywords(keywords ...string) *FilterKeywordMarker
- func (filter *FilterKeywordMarker) Name() FilterName
- func (filter *FilterKeywordMarker) SetIgnoreCase(ignoreCase bool) *FilterKeywordMarker
- func (filter *FilterKeywordMarker) SetKeywordsPath(keywordsPath string) *FilterKeywordMarker
- func (filter *FilterKeywordMarker) SetKeywordsPattern(keywordsPattern string) *FilterKeywordMarker
- func (filter *FilterKeywordMarker) Source() (interface{}, error)
- func (filter *FilterKeywordMarker) Type() FilterType
- type FilterLength
- type FilterLimit
- func (filter *FilterLimit) Name() FilterName
- func (filter *FilterLimit) SetConsumeAllTokens(consumeAllTokens bool) *FilterLimit
- func (filter *FilterLimit) SetMaxTokenCount(maxTokenCount uint32) *FilterLimit
- func (filter *FilterLimit) Source() (interface{}, error)
- func (filter *FilterLimit) Type() FilterType
- type FilterLowercase
- type FilterLowercaseLanguageType
- type FilterMinHash
- func (filter *FilterMinHash) Name() FilterName
- func (filter *FilterMinHash) SetBucketCount(bucketCount uint32) *FilterMinHash
- func (filter *FilterMinHash) SetHashCount(hashCount uint32) *FilterMinHash
- func (filter *FilterMinHash) SetHashSetSize(hashSetSize uint32) *FilterMinHash
- func (filter *FilterMinHash) SetWithRotation(withRotation bool) *FilterMinHash
- func (filter *FilterMinHash) Source() (interface{}, error)
- func (filter *FilterMinHash) Type() FilterType
- type FilterMultiplexer
- type FilterName
- type FilterNgram
- func (filter *FilterNgram) Name() FilterName
- func (filter *FilterNgram) SetMaxGram(maxGram uint8) *FilterNgram
- func (filter *FilterNgram) SetMinGram(minGram uint8) *FilterNgram
- func (filter *FilterNgram) SetPreserveOriginal(preserveOriginal bool) *FilterNgram
- func (filter *FilterNgram) Source() (interface{}, error)
- func (filter *FilterNgram) Type() FilterType
- type FilterPatternCapture
- func (filter *FilterPatternCapture) AddPatterns(patterns ...string) *FilterPatternCapture
- func (filter *FilterPatternCapture) Name() FilterName
- func (filter *FilterPatternCapture) SetPreserveOriginal(preserveOriginal bool) *FilterPatternCapture
- func (filter *FilterPatternCapture) Source() (interface{}, error)
- func (filter *FilterPatternCapture) Type() FilterType
- type FilterPatternReplace
- func (filter *FilterPatternReplace) Name() FilterName
- func (filter *FilterPatternReplace) SetAll(all bool) *FilterPatternReplace
- func (filter *FilterPatternReplace) SetPattern(pattern string) *FilterPatternReplace
- func (filter *FilterPatternReplace) SetReplacement(replacement string) *FilterPatternReplace
- func (filter *FilterPatternReplace) Source() (interface{}, error)
- func (filter *FilterPatternReplace) Type() FilterType
- type FilterPhonetic
- func (filter *FilterPhonetic) Name() FilterName
- func (filter *FilterPhonetic) SetEncoder(encoder FilterPhoneticEncoder) *FilterPhonetic
- func (filter *FilterPhonetic) SetLanguageSet(languageSet FilterPhoneticLanguageSet) *FilterPhonetic
- func (filter *FilterPhonetic) SetMaxCodeLen(maxCodeLen uint32) *FilterPhonetic
- func (filter *FilterPhonetic) SetNameType(nameType FilterPhoneticNameType) *FilterPhonetic
- func (filter *FilterPhonetic) SetReplace(replace bool) *FilterPhonetic
- func (filter *FilterPhonetic) SetRuleType(ruleType FilterPhoneticRuleType) *FilterPhonetic
- func (filter *FilterPhonetic) Source() (interface{}, error)
- func (filter *FilterPhonetic) Type() FilterType
- type FilterPhoneticEncoder
- type FilterPhoneticLanguageSet
- type FilterPhoneticNameType
- type FilterPhoneticRuleType
- type FilterPredicateTokenFilter
- type FilterShingle
- func (filter *FilterShingle) Name() FilterName
- func (filter *FilterShingle) SetFillerToken(fillerToken string) *FilterShingle
- func (filter *FilterShingle) SetMaxShingleSize(maxShingleSize uint32) *FilterShingle
- func (filter *FilterShingle) SetMinShingleSize(minShingleSize uint32) *FilterShingle
- func (filter *FilterShingle) SetOutputUnigrams(outputUnigrams bool) *FilterShingle
- func (filter *FilterShingle) SetOutputUnigramsIfNoShingles(outputUnigramsIfNoShingles bool) *FilterShingle
- func (filter *FilterShingle) SetTokenSeparator(tokenSeparator string) *FilterShingle
- func (filter *FilterShingle) Source() (interface{}, error)
- func (filter *FilterShingle) Type() FilterType
- type FilterSnowball
- type FilterSnowballLanguage
- type FilterStemmer
- type FilterStemmerLanguage
- type FilterStemmerOverride
- func (filter *FilterStemmerOverride) AddRules(rules ...*StemmerOverrideRule) *FilterStemmerOverride
- func (filter *FilterStemmerOverride) Name() FilterName
- func (filter *FilterStemmerOverride) SetRulesPath(rulesPath string) *FilterStemmerOverride
- func (filter *FilterStemmerOverride) Source() (interface{}, error)
- func (filter *FilterStemmerOverride) Type() FilterType
- type FilterStop
- func (filter *FilterStop) AddStopWords(stopWords ...string) *FilterStop
- func (filter *FilterStop) Name() FilterName
- func (filter *FilterStop) SetIgnoreCase(ignoreCase bool) *FilterStop
- func (filter *FilterStop) SetRemoveTrailing(removeTrailing bool) *FilterStop
- func (filter *FilterStop) SetStopWordsPath(stopWordsPath string) *FilterStop
- func (filter *FilterStop) Source() (interface{}, error)
- func (filter *FilterStop) Type() FilterType
- type FilterSynonym
- func (filter *FilterSynonym) AddSynonyms(synonyms ...Synonym) *FilterSynonym
- func (filter *FilterSynonym) Name() FilterName
- func (filter *FilterSynonym) SetExpand(expand bool) *FilterSynonym
- func (filter *FilterSynonym) SetFormat(format FilterSynonymFormat) *FilterSynonym
- func (filter *FilterSynonym) SetLenient(lenient bool) *FilterSynonym
- func (filter *FilterSynonym) SetSynonymsPath(synonymsPath string) *FilterSynonym
- func (filter *FilterSynonym) Source() (interface{}, error)
- func (filter *FilterSynonym) Type() FilterType
- type FilterSynonymFormat
- type FilterTruncate
- type FilterType
- type FilterUnique
- type FilterWordDelimiter
- func (filter *FilterWordDelimiter) AddProtectedWords(protectedWords ...string) *FilterWordDelimiter
- func (filter *FilterWordDelimiter) AddTypeTable(typeTable ...*WordDelimiterType) *FilterWordDelimiter
- func (filter *FilterWordDelimiter) Name() FilterName
- func (filter *FilterWordDelimiter) SetCatenateAll(catenateAll bool) *FilterWordDelimiter
- func (filter *FilterWordDelimiter) SetCatenateNumbers(catenateNumbers bool) *FilterWordDelimiter
- func (filter *FilterWordDelimiter) SetCatenateWords(catenateWords bool) *FilterWordDelimiter
- func (filter *FilterWordDelimiter) SetGenerateNumberParts(generateNumberParts bool) *FilterWordDelimiter
- func (filter *FilterWordDelimiter) SetGenerateWordParts(generateWordParts bool) *FilterWordDelimiter
- func (filter *FilterWordDelimiter) SetPreserveOriginal(preserveOriginal bool) *FilterWordDelimiter
- func (filter *FilterWordDelimiter) SetProtectedWordsPath(protectedWordsPath string) *FilterWordDelimiter
- func (filter *FilterWordDelimiter) SetSplitOnCaseChange(splitOnCaseChange bool) *FilterWordDelimiter
- func (filter *FilterWordDelimiter) SetSplitOnNumerics(splitOnNumerics bool) *FilterWordDelimiter
- func (filter *FilterWordDelimiter) SetStemEnglishPossessive(stemEnglishPossessive bool) *FilterWordDelimiter
- func (filter *FilterWordDelimiter) SetTypeTablePath(typeTablePath string) *FilterWordDelimiter
- func (filter *FilterWordDelimiter) Source() (interface{}, error)
- func (filter *FilterWordDelimiter) Type() FilterType
- type FilterWordDelimiterGraph
- func (filter *FilterWordDelimiterGraph) SetAdjustOffsets(adjustOffsets bool) *FilterWordDelimiterGraph
- func (filter *FilterWordDelimiterGraph) SetIgnoreKeywords(ignoreKeywords bool) *FilterWordDelimiterGraph
- func (filter *FilterWordDelimiterGraph) Source() (interface{}, error)
- func (filter *FilterWordDelimiterGraph) Type() FilterType
- type Highlight
- type Idle
- type Index
- func (index *Index) AddSimilarity(similarity ...Similarity) *Index
- func (index *Index) SetAnalysis(analysis *Analysis) *Index
- func (index *Index) SetAnalyze(analyze *Analyze) *Index
- func (index *Index) SetAutoExpandReplicas(autoExpandReplicas string) *Index
- func (index *Index) SetBlocks(blocks *Blocks) *Index
- func (index *Index) SetCodec(codec IndexCode) *Index
- func (index *Index) SetDefaultPipeline(defaultPipeline PipelineName) *Index
- func (index *Index) SetFinalPipeline(finalPipeline PipelineName) *Index
- func (index *Index) SetFlushAfterMerge(flushAfterMerge *Size) *Index
- func (index *Index) SetGcDeletes(gcDeletes *Interval) *Index
- func (index *Index) SetHidden(hidden bool) *Index
- func (index *Index) SetHighlight(highlight *Highlight) *Index
- func (index *Index) SetIndexing(indexing *Slowlog) *Index
- func (index *Index) SetLoadFixedBitsetFiltersEagerly(loadFixedBitsetFiltersEagerly bool) *Index
- func (index *Index) SetMapping(mapping *Mapping) *Index
- func (index *Index) SetMaxAdjacencyMatrixFilters(maxAdjacencyMatrixFilters uint32) *Index
- func (index *Index) SetMaxDocvalueFieldsSearch(maxDocvalueFieldsSearch uint32) *Index
- func (index *Index) SetMaxInnerResultWindow(maxInnerResultWindow uint32) *Index
- func (index *Index) SetMaxNgramDiff(maxNgramDiff uint32) *Index
- func (index *Index) SetMaxRefreshListeners(maxRefreshListeners uint32) *Index
- func (index *Index) SetMaxRegexLength(maxRegexLength uint32) *Index
- func (index *Index) SetMaxRescoreWindow(maxRescoreWindow uint32) *Index
- func (index *Index) SetMaxResultWindow(maxResultWindow uint32) *Index
- func (index *Index) SetMaxScriptFields(maxScriptFields uint32) *Index
- func (index *Index) SetMaxShingleDiff(maxShingleDiff uint32) *Index
- func (index *Index) SetMaxSlicesPerScroll(maxSlicesPerScroll uint32) *Index
- func (index *Index) SetMaxTermsCount(maxTermsCount uint32) *Index
- func (index *Index) SetMerge(merge *Merge) *Index
- func (index *Index) SetNumberOfReplicas(numberOfReplicas uint32) *Index
- func (index *Index) SetNumberOfRoutingShards(numberOfRoutingShards uint32) *Index
- func (index *Index) SetNumberOfShards(numberOfShards uint32) *Index
- func (index *Index) SetRefreshInterval(refreshInterval *Interval) *Index
- func (index *Index) SetRouting(routing *Routing) *Index
- func (index *Index) SetRoutingPartitionSize(routingPartitionSize uint32) *Index
- func (index *Index) SetSearch(search *Search) *Index
- func (index *Index) SetShard(shard *Shard) *Index
- func (index *Index) SetSoftDeletes(softDeletes *SoftDeletes) *Index
- func (index *Index) SetSourceOnly(sourceOnly bool) *Index
- func (index *Index) SetTranslog(translog *Translog) *Index
- func (index *Index) SetUnassigned(unassigned *Unassigned) *Index
- func (index *Index) SetVerifiedBeforeClose(verifiedBeforeClose bool) *Index
- func (index *Index) SetWrite(write *Write) *Index
- func (index *Index) Source() (interface{}, error)
- type IndexCode
- type Interval
- type JavaRegularFlag
- type Limit
- type LogLevel
- type Mapping
- func (mapping *Mapping) SetCoerce(coerce bool) *Mapping
- func (mapping *Mapping) SetDepth(depth *Limit) *Mapping
- func (mapping *Mapping) SetFieldNameLength(fieldNameLength *Limit) *Mapping
- func (mapping *Mapping) SetIgnoreMalformed(ignoreMalformed bool) *Mapping
- func (mapping *Mapping) SetNestedFields(nestedFields *Limit) *Mapping
- func (mapping *Mapping) SetNestedObjects(nestedObjects *Limit) *Mapping
- func (mapping *Mapping) SetTotalFields(totalFields *Limit) *Mapping
- func (mapping *Mapping) Source() (interface{}, error)
- type Merge
- type NodeLeft
- type Normalizer
- type NormalizerCustom
- func (normalizer *NormalizerCustom) AddCharFilter(charFilter ...CharFilterName) *NormalizerCustom
- func (normalizer *NormalizerCustom) AddFilter(filter ...FilterName) *NormalizerCustom
- func (normalizer *NormalizerCustom) Name() NormalizerName
- func (normalizer *NormalizerCustom) Source() (interface{}, error)
- func (normalizer *NormalizerCustom) Type() NormalizerType
- type NormalizerName
- type NormalizerType
- type PipelineName
- type Policy
- func (policy *Policy) SetDeletesPctAllowed(deletesPctAllowed float32) *Policy
- func (policy *Policy) SetExpungeDeletesAllowed(expungeDeletesAllowed float32) *Policy
- func (policy *Policy) SetFloorSegment(floorSegment *Size) *Policy
- func (policy *Policy) SetMaxMergeAtOnce(maxMergeAtOnce uint) *Policy
- func (policy *Policy) SetMaxMergeAtOnceExplicit(maxMergeAtOnceExplicit uint) *Policy
- func (policy *Policy) SetMaxMergedSegment(maxMergedSegment *Size) *Policy
- func (policy *Policy) SetReclaimDeletesWeight(reclaimDeletesWeight float32) *Policy
- func (policy *Policy) SetSegmentsPerTier(segmentsPerTier float32) *Policy
- func (policy *Policy) Source() (interface{}, error)
- type Rebalance
- type RebalanceVal
- type Retention
- type RetentionLease
- type Routing
- type Scheduler
- type Search
- type Settings
- type Shard
- type Similarity
- type SimilarityBM25
- func (similarity *SimilarityBM25) Name() SimilarityName
- func (similarity *SimilarityBM25) SetB(b float32) *SimilarityBM25
- func (similarity *SimilarityBM25) SetDiscountOverlaps(discountOverlaps bool) *SimilarityBM25
- func (similarity *SimilarityBM25) SetK1(k1 float32) *SimilarityBM25
- func (similarity *SimilarityBM25) Source() (interface{}, error)
- func (similarity *SimilarityBM25) Type() SimilarityType
- type SimilarityDFI
- func (similarity *SimilarityDFI) Name() SimilarityName
- func (similarity *SimilarityDFI) SetIndependenceMeasure(independenceMeasure SimilarityDFIIndependenceMeasureType) *SimilarityDFI
- func (similarity *SimilarityDFI) Source() (interface{}, error)
- func (similarity *SimilarityDFI) Type() SimilarityType
- type SimilarityDFIIndependenceMeasureType
- type SimilarityDFR
- func (similarity *SimilarityDFR) Name() SimilarityName
- func (similarity *SimilarityDFR) SetAfterEffect(afterEffect SimilarityDFRAfterEffectType) *SimilarityDFR
- func (similarity *SimilarityDFR) SetBasicModel(basicModel SimilarityDFRBasicModelType) *SimilarityDFR
- func (similarity *SimilarityDFR) SetNormalization(normalization SimilarityDFRNormalizationType, value float32) *SimilarityDFR
- func (similarity *SimilarityDFR) Source() (interface{}, error)
- func (similarity *SimilarityDFR) Type() SimilarityType
- type SimilarityDFRAfterEffectType
- type SimilarityDFRBasicModelType
- type SimilarityDFRNormalizationType
- type SimilarityDefault
- type SimilarityIB
- func (similarity *SimilarityIB) Name() SimilarityName
- func (similarity *SimilarityIB) SetDistribution(distribution SimilarityIBDistributionType) *SimilarityIB
- func (similarity *SimilarityIB) SetLambda(lambda SimilarityIBLambdaType) *SimilarityIB
- func (similarity *SimilarityIB) SetNormalization(normalization SimilarityDFRNormalizationType, value float32) *SimilarityIB
- func (similarity *SimilarityIB) Source() (interface{}, error)
- func (similarity *SimilarityIB) Type() SimilarityType
- type SimilarityIBDistributionType
- type SimilarityIBLambdaType
- type SimilarityLMDirichlet
- type SimilarityLMJelinekMercer
- func (similarity *SimilarityLMJelinekMercer) Name() SimilarityName
- func (similarity *SimilarityLMJelinekMercer) SetLambda(lambda float32) *SimilarityLMJelinekMercer
- func (similarity *SimilarityLMJelinekMercer) Source() (interface{}, error)
- func (similarity *SimilarityLMJelinekMercer) Type() SimilarityType
- type SimilarityName
- type SimilarityScript
- type SimilarityScripted
- func (similarity *SimilarityScripted) Name() SimilarityName
- func (similarity *SimilarityScripted) SetScript(script *SimilarityScript) *SimilarityScripted
- func (similarity *SimilarityScripted) SetWeightScript(script *SimilarityScript) *SimilarityScripted
- func (similarity *SimilarityScripted) Source() (interface{}, error)
- func (similarity *SimilarityScripted) Type() SimilarityType
- type SimilarityType
- type Size
- type SlowLogLevel
- type Slowlog
- func (slowlog *Slowlog) SetLevel(level SlowLogLevel) *Slowlog
- func (slowlog *Slowlog) SetReformat(reformat bool) *Slowlog
- func (slowlog *Slowlog) SetSource(source uint32) *Slowlog
- func (slowlog *Slowlog) SetThreshold(threshold *Threshold) *Slowlog
- func (slowlog *Slowlog) Source() (interface{}, error)
- type SoftDeletes
- func (softDeletes *SoftDeletes) SetEnabled(enabled bool) *SoftDeletes
- func (softDeletes *SoftDeletes) SetRetention(retention *Retention) *SoftDeletes
- func (softDeletes *SoftDeletes) SetRetentionLease(retentionLease *RetentionLease) *SoftDeletes
- func (softDeletes *SoftDeletes) Source() (interface{}, error)
- type StemmerOverrideRule
- type Synonym
- type SynonymSolr
- type SynonymWordnet
- func (synonym *SynonymWordnet) SetSenseNumber(senseNumber uint32) *SynonymWordnet
- func (synonym *SynonymWordnet) SetSynsetId(synsetId uint32) *SynonymWordnet
- func (synonym *SynonymWordnet) SetSynsetType(synsetType string) *SynonymWordnet
- func (synonym *SynonymWordnet) SetTagCount(tagCount uint32) *SynonymWordnet
- func (synonym *SynonymWordnet) SetWord(word string) *SynonymWordnet
- func (synonym *SynonymWordnet) SetWordNumber(wordNumber uint32) *SynonymWordnet
- func (synonym *SynonymWordnet) Synonym() string
- type Threshold
- type Tokenizer
- type TokenizerCharGroup
- func (tokenizer *TokenizerCharGroup) AddTokenizeOnChars(tokenizeOnChars ...string) *TokenizerCharGroup
- func (tokenizer *TokenizerCharGroup) Name() TokenizerName
- func (tokenizer *TokenizerCharGroup) SetMaxTokenLength(maxTokenLength uint32) *TokenizerCharGroup
- func (tokenizer *TokenizerCharGroup) Source() (interface{}, error)
- func (tokenizer *TokenizerCharGroup) Type() TokenizerType
- type TokenizerClassic
- type TokenizerKeyword
- type TokenizerName
- type TokenizerNgram
- func (tokenizer *TokenizerNgram) AddCustomTokenChars(customTokenChars ...string) *TokenizerNgram
- func (tokenizer *TokenizerNgram) AddTokenChars(tokenChars ...EdgeNgramTokenChars) *TokenizerNgram
- func (tokenizer *TokenizerNgram) Name() TokenizerName
- func (tokenizer *TokenizerNgram) SetMaxGram(maxGram uint32) *TokenizerNgram
- func (tokenizer *TokenizerNgram) SetMinGram(minGram uint32) *TokenizerNgram
- func (tokenizer *TokenizerNgram) Source() (interface{}, error)
- func (tokenizer *TokenizerNgram) Type() TokenizerType
- type TokenizerPathHierarchy
- func (tokenizer *TokenizerPathHierarchy) Name() TokenizerName
- func (tokenizer *TokenizerPathHierarchy) SetBufferSize(bufferSize uint32) *TokenizerPathHierarchy
- func (tokenizer *TokenizerPathHierarchy) SetDelimiter(delimiter string) *TokenizerPathHierarchy
- func (tokenizer *TokenizerPathHierarchy) SetReplacement(replacement string) *TokenizerPathHierarchy
- func (tokenizer *TokenizerPathHierarchy) SetReverse(reverse bool) *TokenizerPathHierarchy
- func (tokenizer *TokenizerPathHierarchy) SetSkip(skip uint32) *TokenizerPathHierarchy
- func (tokenizer *TokenizerPathHierarchy) Source() (interface{}, error)
- func (tokenizer *TokenizerPathHierarchy) Type() TokenizerType
- type TokenizerPattern
- func (tokenizer *TokenizerPattern) AddFlags(flags ...JavaRegularFlag) *TokenizerPattern
- func (tokenizer *TokenizerPattern) Name() TokenizerName
- func (tokenizer *TokenizerPattern) SetGroup(group uint8) *TokenizerPattern
- func (tokenizer *TokenizerPattern) SetPattern(pattern string) *TokenizerPattern
- func (tokenizer *TokenizerPattern) Source() (interface{}, error)
- func (tokenizer *TokenizerPattern) Type() TokenizerType
- type TokenizerSimplePattern
- type TokenizerStandard
- type TokenizerType
- type Translog
- type TranslogDurability
- type Unassigned
- type WordDelimiter
- type WordDelimiterType
- type Write
Constants ¶
const ( AllocationAttributeName AllocationAttribute = "_name" AllocationAttributeHostIp AllocationAttribute = "_host_ip" AllocationAttributePublishIp AllocationAttribute = "_publish_ip" AllocationAttributeIp AllocationAttribute = "_ip" AllocationAttributeHost AllocationAttribute = "_host" AllocationAttributeId AllocationAttribute = "_id" AllocationAll AllocationVal = "all" AllocationPrimaries AllocationVal = "primaries" AllocationNewPrimaries AllocationVal = "new_primaries" AllocationNone AllocationVal = "none" )
const ( AnalyzerNameStandard AnalyzerName = "standard" AnalyzerNameSimple AnalyzerName = "simple" AnalyzerNameWhitespace AnalyzerName = "whitespace" AnalyzerNameStop AnalyzerName = "stop" AnalyzerNameKeyword AnalyzerName = "keyword" // see https://www.elastic.co/guide/en/elasticsearch/reference/7.x/analysis-lang-analyzer.html AnalyzerNameArabic AnalyzerName = "arabic" AnalyzerNameArmenian AnalyzerName = "armenian" AnalyzerNameBasque AnalyzerName = "basque" AnalyzerNameBengali AnalyzerName = "bengali" AnalyzerNameBulgarian AnalyzerName = "bulgarian" AnalyzerNameCatalan AnalyzerName = "catalan" AnalyzerNameCzech AnalyzerName = "czech" AnalyzerNameDutch AnalyzerName = "dutch" AnalyzerNameEnglish AnalyzerName = "english" AnalyzerNameFinnish AnalyzerName = "finnish" AnalyzerNameFrench AnalyzerName = "french" AnalyzerNameGalician AnalyzerName = "galician" AnalyzerNameGerman AnalyzerName = "german" AnalyzerNameHindi AnalyzerName = "hindi" AnalyzerNameHungarian AnalyzerName = "hungarian" AnalyzerNameIndonesian AnalyzerName = "indonesian" AnalyzerNameIrish AnalyzerName = "irish" AnalyzerNameItalian AnalyzerName = "italian" AnalyzerNameLatvian AnalyzerName = "latvian" AnalyzerNameLithuanian AnalyzerName = "lithuanian" AnalyzerNameNorwegian AnalyzerName = "norwegian" AnalyzerNamePortuguese AnalyzerName = "portuguese" AnalyzerNameRomanian AnalyzerName = "romanian" AnalyzerNameRussian AnalyzerName = "russian" AnalyzerNameSorani AnalyzerName = "sorani" AnalyzerNameSpanish AnalyzerName = "spanish" AnalyzerNameSwedish AnalyzerName = "swedish" AnalyzerNameTurkish AnalyzerName = "turkish" AnalyzerTypeStandard AnalyzerType = "standard" AnalyzerTypeStop AnalyzerType = "stop" AnalyzerTypePattern AnalyzerType = "pattern" AnalyzerTypeFingerprint AnalyzerType = "fingerprint" )
const ( CharFilterNameHtmlStrip CharFilterName = "html_strip" CharFilterTypeHtmlStrip CharFilterType = "html_strip" CharFilterTypeMapping CharFilterType = "mapping" CharFilterTypePatternReplace CharFilterType = "pattern_replace" )
const ( FilterNameArabic FilterName = "arabic_normalization" FilterNameAsciifolding FilterName = "asciifolding" FilterNameBengali FilterName = "bengali_normalization" FilterNameCjkWidth FilterName = "cjk_width" FilterNameDecimalDigit FilterName = "decimal_digit" FilterNameDelimitedPayload FilterName = "delimited_payload" FilterNameElision FilterName = "elision" FilterNameGerman FilterName = "german_normalization" FilterNameHindi FilterName = "hindi_normalization" FilterNameIndic FilterName = "indic_normalization" FilterNameLowercase FilterName = "lowercase" FilterNamePersian FilterName = "persian_normalization" FilterNameSerbian FilterName = "serbian_normalization" FilterNameSorani FilterName = "sorani_normalization" FilterNameUppercase FilterName = "uppercase" FilterNameApostrophe FilterName = "apostrophe" FilterNameAsciiFolding FilterName = "asciifolding" FilterNameCjkBigram FilterName = "cjk_bigram" FilterNameClassic FilterName = "classic" FilterNameEdgeNgram FilterName = "edge_ngram" FilterNameFingerprint FilterName = "fingerprint" FilterNameFlattenGraph FilterName = "flatten_graph" FilterNameKeywordRepeat FilterName = "keyword_repeat" FilterNameKstem FilterName = "kstem" FilterNameLength FilterName = "length" FilterNameLimit FilterName = "limit" FilterNameNgram FilterName = "ngram" FilterNamePorterStem FilterName = "porter_stem" FilterNameRemoveDuplicates FilterName = "remove_duplicates" FilterNameReverse FilterName = "reverse" FilterNameShingle FilterName = "shingle" FilterNameStemmer FilterName = "stemmer" FilterNameStop FilterName = "stop" FilterNameTrim FilterName = "trim" FilterNameTruncate FilterName = "truncate" FilterNameUnique FilterName = "unique" FilterNameWordDelimiter FilterName = "word_delimiter" FilterNameWordDelimiterGraph FilterName = "word_delimiter_graph" FilterTypeAsciiFolding FilterType = "asciifolding" FilterTypeCjkBigram FilterType = "cjk_bigram" FilterTypeCommonGrams FilterType = "common_grams" FilterTypeCondition FilterType = "condition" FilterTypeDelimitedPayload FilterType = "delimited_payload" FilterTypeDictionaryDecompounder FilterType = "dictionary_decompounder" FilterTypeEdgeNgram FilterType = "edge_ngram" FilterTypeElision FilterType = "elision" FilterTypeFingerprint FilterType = "fingerprint" FilterTypeHyphenationDecompounder FilterType = "hyphenation_decompounder" FilterTypeKeepTypes FilterType = "keep_types" FilterTypeKeep FilterType = "keep" FilterTypeKeywordMarker FilterType = "keyword_marker" FilterTypeLength FilterType = "length" FilterTypeLimit FilterType = "limit" FilterTypeLowercase FilterType = "lowercase" FilterTypeMinHash FilterType = "min_hash" FilterTypeMultiplexer FilterType = "multiplexer" FilterTypeNgram FilterType = "ngram" FilterTypePatternCapture FilterType = "pattern_capture" FilterTypePatternReplace FilterType = "pattern_replace" FilterTypePhonetic FilterType = "phonetic" FilterTypePredicateTokenFilter FilterType = "predicate_token_filter" FilterTypeShingle FilterType = "shingle" FilterTypeSnowball FilterType = "snowball" FilterTypeStemmer FilterType = "stemmer" FilterTypeStemmerOverride FilterType = "stemmer_override" FilterTypeStop FilterType = "stop" FilterTypeSynonym FilterType = "synonyms" FilterTypeSynonymGraph FilterType = "synonym_graph" FilterTypeTruncate FilterType = "truncate" FilterTypeUnique FilterType = "unique" FilterTypeWordDelimiter FilterType = "word_delimiter" FilterTypeWordDelimiterGraph FilterType = "word_delimiter_graph" BigramIgnoredScriptsHan FilterBigramIgnoredScripts = "han" BigramIgnoredScriptsHangul FilterBigramIgnoredScripts = "hangul" BigramIgnoredScriptsHiragana FilterBigramIgnoredScripts = "hiragana" BigramIgnoredScriptsKatakana FilterBigramIgnoredScripts = "katakana" DelimitedPayloadEncodingFloat FilterDelimitedPayloadEncoding = "float" DelimitedPayloadEncodingInteger FilterDelimitedPayloadEncoding = "int" DelimitedPayloadEncodingIdentity FilterDelimitedPayloadEncoding = "identity" EdgeNgramSideFront FilterEdgeNgramSide = "front" EdgeNgramSideBack FilterEdgeNgramSide = "back" KeepTypesModeInclude FilterKeepTypesMode = "include" KeepTypesModeExclude FilterKeepTypesMode = "exclude" LowercaseLanguageTypeGreek FilterLowercaseLanguageType = "greek" LowercaseLanguageTypeIrish FilterLowercaseLanguageType = "irish" LowercaseLanguageTypeTurkish FilterLowercaseLanguageType = "turkish" PhoneticEncoderMetaphone FilterPhoneticEncoder = "metaphone" PhoneticEncoderDoubleMetaphone FilterPhoneticEncoder = "double_metaphone" PhoneticEncoderSoundex FilterPhoneticEncoder = "soundex" PhoneticEncoderRefinedSoundex FilterPhoneticEncoder = "refined_soundex" PhoneticEncoderCaverphone1 FilterPhoneticEncoder = "caverphone1" PhoneticEncoderCaverphone2 FilterPhoneticEncoder = "caverphone2" PhoneticEncoderCologne FilterPhoneticEncoder = "cologne" PhoneticEncoderNysiis FilterPhoneticEncoder = "nysiis" PhoneticEncoderKoelnerphonetik FilterPhoneticEncoder = "koelnerphonetik" PhoneticEncoderHaasephonetik FilterPhoneticEncoder = "haasephonetik" PhoneticEncoderBeiderMorse FilterPhoneticEncoder = "beider_morse" PhoneticEncoderDaitchMokotoff FilterPhoneticEncoder = "daitch_mokotoff" PhoneticRuleTypeExact FilterPhoneticRuleType = "exact" PhoneticRuleTypeApprox FilterPhoneticRuleType = "approx" PhoneticNameTypeAshkenazi FilterPhoneticNameType = "ashkenazi" PhoneticNameTypeSephardic FilterPhoneticNameType = "sephardic" PhoneticNameTypeGeneric FilterPhoneticNameType = "generic" PhoneticLanguageSetAny FilterPhoneticLanguageSet = "any" PhoneticLanguageSetCommon FilterPhoneticLanguageSet = "common" PhoneticLanguageSetCyrillic FilterPhoneticLanguageSet = "cyrillic" PhoneticLanguageSetEnglish FilterPhoneticLanguageSet = "english" PhoneticLanguageSetFrench FilterPhoneticLanguageSet = "french" PhoneticLanguageSetGerman FilterPhoneticLanguageSet = "german" PhoneticLanguageSetHebrew FilterPhoneticLanguageSet = "hebrew" PhoneticLanguageSetHungarian FilterPhoneticLanguageSet = "hungarian" PhoneticLanguageSetPolish FilterPhoneticLanguageSet = "polish" PhoneticLanguageSetRomanian FilterPhoneticLanguageSet = "romanian" PhoneticLanguageSetRussian FilterPhoneticLanguageSet = "russian" PhoneticLanguageSetSpanish FilterPhoneticLanguageSet = "spanish" SnowballLanguageArabic FilterSnowballLanguage = "Arabic" SnowballLanguageArmenian FilterSnowballLanguage = "Armenian" SnowballLanguageBasque FilterSnowballLanguage = "Basque" SnowballLanguageCatalan FilterSnowballLanguage = "Catalan" SnowballLanguageDanish FilterSnowballLanguage = "Danish" SnowballLanguageDutch FilterSnowballLanguage = "Dutch" SnowballLanguageEnglish FilterSnowballLanguage = "English" SnowballLanguageEstonian FilterSnowballLanguage = "Estonian" SnowballLanguageFinnish FilterSnowballLanguage = "Finnish" SnowballLanguageFrench FilterSnowballLanguage = "French" SnowballLanguageGerman FilterSnowballLanguage = "German" SnowballLanguageGerman2 FilterSnowballLanguage = "German2" SnowballLanguageHungarian FilterSnowballLanguage = "Hungarian" SnowballLanguageItalian FilterSnowballLanguage = "Italian" SnowballLanguageIrish FilterSnowballLanguage = "Irish" SnowballLanguageKp FilterSnowballLanguage = "Kp" SnowballLanguageLithuanian FilterSnowballLanguage = "Lithuanian" SnowballLanguageLovins FilterSnowballLanguage = "Lovins" SnowballLanguageNorwegian FilterSnowballLanguage = "Norwegian" SnowballLanguagePorter FilterSnowballLanguage = "Porter" SnowballLanguagePortuguese FilterSnowballLanguage = "Portuguese" SnowballLanguageRomanian FilterSnowballLanguage = "Romanian" SnowballLanguageRussian FilterSnowballLanguage = "Russian" SnowballLanguageSpanish FilterSnowballLanguage = "Spanish" SnowballLanguageSwedish FilterSnowballLanguage = "Swedish" SnowballLanguageTurkish FilterSnowballLanguage = "Turkish" StemmerLanguageArabic FilterStemmerLanguage = "arabic" StemmerLanguageArmenian FilterStemmerLanguage = "armenian" StemmerLanguageBasque FilterStemmerLanguage = "basque" StemmerLanguageBengali FilterStemmerLanguage = "bengali" StemmerLanguageBrazilian FilterStemmerLanguage = "brazilian" StemmerLanguageBulgarian FilterStemmerLanguage = "bulgarian" StemmerLanguageCatalan FilterStemmerLanguage = "catalan" StemmerLanguageCzech FilterStemmerLanguage = "czech" StemmerLanguageDanish FilterStemmerLanguage = "danish" StemmerLanguageDutch FilterStemmerLanguage = "dutch" StemmerLanguageDutchKp FilterStemmerLanguage = "dutch_kp" StemmerLanguageEnglish FilterStemmerLanguage = "english" StemmerLanguageLightEnglish FilterStemmerLanguage = "light_english" StemmerLanguageLovins FilterStemmerLanguage = "lovins" StemmerLanguageMinimalEnglish FilterStemmerLanguage = "minimal_english" StemmerLanguagePorter2 FilterStemmerLanguage = "porter2" StemmerLanguagePossessiveEnglish FilterStemmerLanguage = "possessive_english" StemmerLanguageEstonian FilterStemmerLanguage = "estonian" StemmerLanguageFinnish FilterStemmerLanguage = "finnish" StemmerLanguageLightFinnish FilterStemmerLanguage = "light_finnish" StemmerLanguageLightFrench FilterStemmerLanguage = "light_french" StemmerLanguageFrench FilterStemmerLanguage = "french" StemmerLanguageMinimalFrench FilterStemmerLanguage = "minimal_french" StemmerLanguageGalician FilterStemmerLanguage = "galician" StemmerLanguageMinimalGalician FilterStemmerLanguage = "minimal_galician" StemmerLanguageLightGerman FilterStemmerLanguage = "light_german" StemmerLanguageGerman FilterStemmerLanguage = "german" StemmerLanguageGerman2 FilterStemmerLanguage = "german2" StemmerLanguageMinimalGerman FilterStemmerLanguage = "minimal_german" StemmerLanguageGreek FilterStemmerLanguage = "greek" StemmerLanguageHindi FilterStemmerLanguage = "hindi" StemmerLanguageHungarian FilterStemmerLanguage = "hungarian" StemmerLanguageLightHungarian FilterStemmerLanguage = "light_hungarian" StemmerLanguageIndonesian FilterStemmerLanguage = "indonesian" StemmerLanguageIrish FilterStemmerLanguage = "irish" StemmerLanguageLightItalian FilterStemmerLanguage = "light_italian" StemmerLanguageItalian FilterStemmerLanguage = "italian" StemmerLanguageSorani FilterStemmerLanguage = "sorani" StemmerLanguageLatvian FilterStemmerLanguage = "latvian" StemmerLanguageLithuanian FilterStemmerLanguage = "lithuanian" StemmerLanguageNorwegian FilterStemmerLanguage = "norwegian" StemmerLanguageLightNorwegian FilterStemmerLanguage = "light_norwegian" StemmerLanguageMinimalNorwegian FilterStemmerLanguage = "minimal_norwegian" StemmerLanguageLightNynorsk FilterStemmerLanguage = "light_nynorsk" StemmerLanguageMinimalNynorsk FilterStemmerLanguage = "minimal_nynorsk" StemmerLanguageLightPortuguese FilterStemmerLanguage = "light_portuguese" StemmerLanguageMinimalPortuguese FilterStemmerLanguage = "minimal_portuguese" StemmerLanguagePortuguese FilterStemmerLanguage = "portuguese" StemmerLanguagePortugueseRslp FilterStemmerLanguage = "portuguese_rslp" StemmerLanguageRomanian FilterStemmerLanguage = "romanian" StemmerLanguageRussian FilterStemmerLanguage = "russian" StemmerLanguageLightRussian FilterStemmerLanguage = "light_russian" StemmerLanguageLightSpanish FilterStemmerLanguage = "light_spanish" StemmerLanguageSpanish FilterStemmerLanguage = "spanish" StemmerLanguageSwedish FilterStemmerLanguage = "swedish" StemmerLanguageLightSwedish FilterStemmerLanguage = "light_swedish" StemmerLanguageTurkish FilterStemmerLanguage = "turkish" StopWordsArabic string = "_arabic_" StopWordsArmenian string = "_armenian_" StopWordsBasque string = "_basque_" StopWordsBengali string = "_bengali_" StopWordsBrazilian string = "_brazilian_" StopWordsBulgarian string = "_bulgarian_" StopWordsCatalan string = "_catalan_" StopWordsCjk string = "_cjk_" StopWordsCzech string = "_czech_" StopWordsDanish string = "_danish_" StopWordsDutch string = "_dutch_" StopWordsEnglish string = "_english_" StopWordsEstonian string = "_estonian_" StopWordsFinnish string = "_finnish_" StopWordsFrench string = "_french_" StopWordsGalician string = "_galician_" StopWordsGerman string = "_german_" StopWordsGreek string = "_greek_" StopWordsHindi string = "_hindi_" StopWordsHungarian string = "_hungarian_" StopWordsIndonesian string = "_indonesian_" StopWordsIrish string = "_irish_" StopWordsItalian string = "_italian_" StopWordsLatvian string = "_latvian_" StopWordsLithuanian string = "_lithuanian_" StopWordsNorwegian string = "_norwegian_" StopWordsPersian string = "_persian_" StopWordsPortuguese string = "_portuguese_" StopWordsRomanian string = "_romanian_" StopWordsRussian string = "_russian_" StopWordsSorani string = "_sorani_" StopWordsSpanish string = "_spanish_" StopWordsSwedish string = "_swedish_" StopWordsThai string = "_thai_" StopWordsTurkish string = "_turkish_" SynonymFormatWordnet FilterSynonymFormat = "wordnet" SynonymFormatSolr FilterSynonymFormat = "solr" WordDelimiterAlpha WordDelimiter = "ALPHA" WordDelimiterAlphanum WordDelimiter = "ALPHANUM" WordDelimiterDigit WordDelimiter = "DIGIT" WordDelimiterLower WordDelimiter = "LOWER" WordDelimiterSubwordDelim WordDelimiter = "SUBWORD_DELIM" WordDelimiterUpper WordDelimiter = "UPPER" )
const ( CodecBestCompression IndexCode = "best_compression" PipelineNameNone PipelineName = "_none" )
const ( NormalizerNameLowercase NormalizerName = "lowercase" NormalizerTypeCustom NormalizerType = "custom" )
const ( RegularFlagCanonEq JavaRegularFlag = "CANON_EQ" RegularFlagCaseInsensitive JavaRegularFlag = "CASE_INSENSITIVE" RegularFlagComments JavaRegularFlag = "COMMENTS" RegularFlagDotAll JavaRegularFlag = "DOTALL" RegularFlagLiteral JavaRegularFlag = "LITERAL" RegularFlagMultiline JavaRegularFlag = "MULTILINE" RegularFlagUnicodeCase JavaRegularFlag = "UNICODE_CASE" RegularFlagUnicodeCharacterClass JavaRegularFlag = "UNICODE_CHARACTER_CLASS" RegularFlagUnixLines JavaRegularFlag = "UNIX_LINES" JavaRegularFlagSeparator string = "|" )
see https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html#field.summary
const ( SimilarityNameDefault SimilarityName = "default" SimilarityTypeBM25 SimilarityType = "BM25" SimilarityTypeClassic SimilarityType = "classic" // TF/IDF SimilarityTypeBoolean SimilarityType = "boolean" SimilarityTypeDFR SimilarityType = "DFR" SimilarityTypeDFI SimilarityType = "DFI" SimilarityTypeIB SimilarityType = "IB" SimilarityTypeLMDirichlet SimilarityType = "LMDirichlet" SimilarityTypeLMJelinekMercer SimilarityType = "LMJelinekMercer" SimilarityTypeScripted SimilarityType = "scripted" DFRBasicModelG SimilarityDFRBasicModelType = "g" DFRBasicModelIf SimilarityDFRBasicModelType = "if" DFRBasicModelIn SimilarityDFRBasicModelType = "in" DFRBasicModelIne SimilarityDFRBasicModelType = "ine" DFRAfterEffectB SimilarityDFRAfterEffectType = "b" DFRAfterEffectL SimilarityDFRAfterEffectType = "l" DFRNormalizationNo SimilarityDFRNormalizationType = "no" DFRNormalizationH1 SimilarityDFRNormalizationType = "h1" DFRNormalizationH2 SimilarityDFRNormalizationType = "h2" DFRNormalizationH3 SimilarityDFRNormalizationType = "h3" DFRNormalizationZ SimilarityDFRNormalizationType = "z" DFIIndependenceMeasureStandardized SimilarityDFIIndependenceMeasureType = "standardized" DFIIndependenceMeasureSaturated SimilarityDFIIndependenceMeasureType = "saturated" DFIIndependenceMeasureChisquared SimilarityDFIIndependenceMeasureType = "chisquared" IBDistributionLL SimilarityIBDistributionType = "ll" IBDistributionSPL SimilarityIBDistributionType = "spl" IBLambdaDF SimilarityIBLambdaType = "df" IBLambdaTTF SimilarityIBLambdaType = "ttf" )
const ( TokenizerNameClassic TokenizerName = "classic" TokenizerNameEdgeNgram TokenizerName = "edge_ngram" TokenizerNameNgram TokenizerName = "ngram" TokenizerNameKeyword TokenizerName = "keyword" TokenizerNameLetter TokenizerName = "letter" TokenizerNameLowercase TokenizerName = "lowercase" TokenizerNamePathHierarchy TokenizerName = "path_hierarchy" TokenizerNamePattern TokenizerName = "pattern" TokenizerNameStandard TokenizerName = "standard" TokenizerNameThai TokenizerName = "thai" TokenizerNameUaxUrlEmail TokenizerName = "uax_url_email" TokenizerTypeCharGroup TokenizerType = "char_group" TokenizerTypeClassic TokenizerType = "classic" TokenizerTypeEdgeNgram TokenizerType = "edge_ngram" TokenizerTypeNgram TokenizerType = "ngram" TokenizerTypeKeyword TokenizerType = "keyword" TokenizerTypePathHierarchy TokenizerType = "path_hierarchy" TokenizerTypePattern TokenizerType = "pattern" TokenizerTypeSimplePattern TokenizerType = "simple_pattern" TokenizerTypeSimplePatternSplit TokenizerType = "simple_pattern_split" TokenizerTypeStandard TokenizerType = "standard" TokenizerTypeUaxUrlEmail TokenizerType = "uax_url_email" TokenizerTypeWhitespace TokenizerType = "whitespace" EdgeNgramTokenCharsLetter EdgeNgramTokenChars = "letter" EdgeNgramTokenCharsDigit EdgeNgramTokenChars = "digit" EdgeNgramTokenCharsWhitespace EdgeNgramTokenChars = "whitespace" EdgeNgramTokenCharsPunctuation EdgeNgramTokenChars = "punctuation" EdgeNgramTokenCharsSymbol EdgeNgramTokenChars = "symbol" EdgeNgramTokenCharsCustom EdgeNgramTokenChars = "custom" )
Variables ¶
var FilterFiltersConditionError = errors.New("filters is empty")
var FilterScriptConditionError = errors.New("script is empty")
Functions ¶
This section is empty.
Types ¶
type Allocation ¶
type Allocation struct {
// contains filtered or unexported fields
}
func NewAllocation ¶
func NewAllocation() *Allocation
func (*Allocation) SetEnable ¶
func (allocation *Allocation) SetEnable(enable AllocationVal) *Allocation
func (*Allocation) SetExclude ¶
func (allocation *Allocation) SetExclude(exclude AllocationAttributes) *Allocation
func (*Allocation) SetInclude ¶
func (allocation *Allocation) SetInclude(include AllocationAttributes) *Allocation
func (*Allocation) SetRequire ¶
func (allocation *Allocation) SetRequire(require AllocationAttributes) *Allocation
func (*Allocation) SetTotalShardsPerNode ¶
func (allocation *Allocation) SetTotalShardsPerNode(totalShardsPerNode uint) *Allocation
func (*Allocation) Source ¶
func (allocation *Allocation) Source() (interface{}, error)
type AllocationAttribute ¶
type AllocationAttribute string
type AllocationAttributes ¶
type AllocationAttributes map[AllocationAttribute][]interface{}
func NewAllocationAttributes ¶
func NewAllocationAttributes() AllocationAttributes
func (AllocationAttributes) HostIps ¶
func (attributes AllocationAttributes) HostIps(values ...interface{}) AllocationAttributes
func (AllocationAttributes) Hosts ¶
func (attributes AllocationAttributes) Hosts(values ...interface{}) AllocationAttributes
func (AllocationAttributes) Ids ¶
func (attributes AllocationAttributes) Ids(values ...interface{}) AllocationAttributes
func (AllocationAttributes) Ips ¶
func (attributes AllocationAttributes) Ips(values ...interface{}) AllocationAttributes
func (AllocationAttributes) Names ¶
func (attributes AllocationAttributes) Names(values ...interface{}) AllocationAttributes
func (AllocationAttributes) PublishIps ¶
func (attributes AllocationAttributes) PublishIps(values ...interface{}) AllocationAttributes
func (AllocationAttributes) Set ¶
func (attributes AllocationAttributes) Set(name string, values ...interface{}) AllocationAttributes
type AllocationVal ¶
type AllocationVal string
type Analysis ¶
type Analysis struct {
// contains filtered or unexported fields
}
func NewAnalysis ¶
func NewAnalysis() *Analysis
func (*Analysis) AddAnalyzer ¶
func (*Analysis) AddCharFilter ¶
func (analysis *Analysis) AddCharFilter(charFilter ...CharFilter) *Analysis
func (*Analysis) AddNormalizer ¶
func (analysis *Analysis) AddNormalizer(normalizer ...Normalizer) *Analysis
func (*Analysis) AddTokenizer ¶
type Analyzer ¶
type Analyzer interface { Type() AnalyzerType Name() AnalyzerName Source() (interface{}, error) }
type AnalyzerFingerprint ¶
type AnalyzerFingerprint struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/7.x/analysis-fingerprint-analyzer.html
func NewAnalyzerFingerprint ¶
func NewAnalyzerFingerprint(name string) *AnalyzerFingerprint
func (*AnalyzerFingerprint) AddStopWords ¶
func (analyzer *AnalyzerFingerprint) AddStopWords(stopWords ...string) *AnalyzerFingerprint
func (*AnalyzerFingerprint) Name ¶
func (analyzer *AnalyzerFingerprint) Name() AnalyzerName
func (*AnalyzerFingerprint) SetMaxOutputSize ¶
func (analyzer *AnalyzerFingerprint) SetMaxOutputSize(maxOutputSize uint8) *AnalyzerFingerprint
func (*AnalyzerFingerprint) SetSeparator ¶
func (analyzer *AnalyzerFingerprint) SetSeparator(separator string) *AnalyzerFingerprint
func (*AnalyzerFingerprint) SetStopWordsPath ¶
func (analyzer *AnalyzerFingerprint) SetStopWordsPath(stopWordsPath string) *AnalyzerFingerprint
func (*AnalyzerFingerprint) Source ¶
func (analyzer *AnalyzerFingerprint) Source() (interface{}, error)
func (*AnalyzerFingerprint) Type ¶
func (analyzer *AnalyzerFingerprint) Type() AnalyzerType
type AnalyzerName ¶
type AnalyzerName string
type AnalyzerPattern ¶
type AnalyzerPattern struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/7.x/analysis-pattern-analyzer.html
func NewAnalyzerPattern ¶
func NewAnalyzerPattern(name string) *AnalyzerPattern
func (*AnalyzerPattern) AddFlags ¶
func (analyzer *AnalyzerPattern) AddFlags(flags ...JavaRegularFlag) *AnalyzerPattern
func (*AnalyzerPattern) AddStopWords ¶
func (analyzer *AnalyzerPattern) AddStopWords(stopWords ...string) *AnalyzerPattern
func (*AnalyzerPattern) Name ¶
func (analyzer *AnalyzerPattern) Name() AnalyzerName
func (*AnalyzerPattern) SetLowercase ¶
func (analyzer *AnalyzerPattern) SetLowercase(lowercase bool) *AnalyzerPattern
func (*AnalyzerPattern) SetPattern ¶
func (analyzer *AnalyzerPattern) SetPattern(pattern string) *AnalyzerPattern
func (*AnalyzerPattern) SetStopWordsPath ¶
func (analyzer *AnalyzerPattern) SetStopWordsPath(stopWordsPath string) *AnalyzerPattern
func (*AnalyzerPattern) Source ¶
func (analyzer *AnalyzerPattern) Source() (interface{}, error)
func (*AnalyzerPattern) Type ¶
func (analyzer *AnalyzerPattern) Type() AnalyzerType
type AnalyzerStandard ¶
type AnalyzerStandard struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/7.x/analysis-standard-analyzer.html
func NewAnalyzerStandard ¶
func NewAnalyzerStandard(name string) *AnalyzerStandard
func (*AnalyzerStandard) AddStopWords ¶
func (analyzer *AnalyzerStandard) AddStopWords(stopWords ...string) *AnalyzerStandard
func (*AnalyzerStandard) Name ¶
func (analyzer *AnalyzerStandard) Name() AnalyzerName
func (*AnalyzerStandard) SetMaxTokenLength ¶
func (analyzer *AnalyzerStandard) SetMaxTokenLength(maxTokenLength uint8) *AnalyzerStandard
func (*AnalyzerStandard) SetStopWordsPath ¶
func (analyzer *AnalyzerStandard) SetStopWordsPath(stopWordsPath string) *AnalyzerStandard
func (*AnalyzerStandard) Source ¶
func (analyzer *AnalyzerStandard) Source() (interface{}, error)
func (*AnalyzerStandard) Type ¶
func (analyzer *AnalyzerStandard) Type() AnalyzerType
type AnalyzerStop ¶
type AnalyzerStop struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/7.x/analysis-stop-analyzer.html
func NewAnalyzerStop ¶
func NewAnalyzerStop(name string) *AnalyzerStop
func (*AnalyzerStop) AddStopWords ¶
func (analyzer *AnalyzerStop) AddStopWords(stopWords ...string) *AnalyzerStop
func (*AnalyzerStop) Name ¶
func (analyzer *AnalyzerStop) Name() AnalyzerName
func (*AnalyzerStop) SetStopWordsPath ¶
func (analyzer *AnalyzerStop) SetStopWordsPath(stopWordsPath string) *AnalyzerStop
func (*AnalyzerStop) Source ¶
func (analyzer *AnalyzerStop) Source() (interface{}, error)
func (*AnalyzerStop) Type ¶
func (analyzer *AnalyzerStop) Type() AnalyzerType
type AnalyzerType ¶
type AnalyzerType string
type Blocks ¶
type Blocks struct {
// contains filtered or unexported fields
}
func (*Blocks) SetMetadata ¶
func (*Blocks) SetReadOnly ¶
func (*Blocks) SetReadOnlyAllowDelete ¶
type CharFilter ¶
type CharFilter interface { Type() CharFilterType Name() CharFilterName Source() (interface{}, error) }
type CharFilterHtmlStrip ¶
type CharFilterHtmlStrip struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-htmlstrip-charfilter.html
func NewCharFilterHtmlStrip ¶
func NewCharFilterHtmlStrip(name string) *CharFilterHtmlStrip
func (*CharFilterHtmlStrip) AddEscapedTags ¶
func (filter *CharFilterHtmlStrip) AddEscapedTags(escapedTags ...string) *CharFilterHtmlStrip
func (*CharFilterHtmlStrip) Name ¶
func (filter *CharFilterHtmlStrip) Name() CharFilterName
func (*CharFilterHtmlStrip) Source ¶
func (filter *CharFilterHtmlStrip) Source() (interface{}, error)
func (*CharFilterHtmlStrip) Type ¶
func (filter *CharFilterHtmlStrip) Type() CharFilterType
type CharFilterMapping ¶
type CharFilterMapping struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-mapping-charfilter.html
func NewCharFilterMapping ¶
func NewCharFilterMapping(name string) *CharFilterMapping
func (*CharFilterMapping) AddMappings ¶
func (filter *CharFilterMapping) AddMappings(mappings ...*CharMapping) *CharFilterMapping
func (*CharFilterMapping) Name ¶
func (filter *CharFilterMapping) Name() CharFilterName
func (*CharFilterMapping) SetMappingsPath ¶
func (filter *CharFilterMapping) SetMappingsPath(mappingsPath string) *CharFilterMapping
func (*CharFilterMapping) Source ¶
func (filter *CharFilterMapping) Source() (interface{}, error)
func (*CharFilterMapping) Type ¶
func (filter *CharFilterMapping) Type() CharFilterType
type CharFilterName ¶
type CharFilterName string
type CharFilterPatternReplace ¶
type CharFilterPatternReplace struct {
// contains filtered or unexported fields
}
func NewCharFilterPatternReplace ¶
func NewCharFilterPatternReplace(name string) *CharFilterPatternReplace
func (*CharFilterPatternReplace) AddFlags ¶
func (filter *CharFilterPatternReplace) AddFlags(flags ...JavaRegularFlag) *CharFilterPatternReplace
func (*CharFilterPatternReplace) Name ¶
func (filter *CharFilterPatternReplace) Name() CharFilterName
func (*CharFilterPatternReplace) SetPattern ¶
func (filter *CharFilterPatternReplace) SetPattern(pattern string) *CharFilterPatternReplace
func (*CharFilterPatternReplace) SetReplacement ¶
func (filter *CharFilterPatternReplace) SetReplacement(replacement string) *CharFilterPatternReplace
func (*CharFilterPatternReplace) Source ¶
func (filter *CharFilterPatternReplace) Source() (interface{}, error)
func (*CharFilterPatternReplace) Type ¶
func (filter *CharFilterPatternReplace) Type() CharFilterType
type CharFilterType ¶
type CharFilterType string
type CharMapping ¶
type CharMapping struct {
// contains filtered or unexported fields
}
func NewCharMapping ¶
func NewCharMapping(from string, to string) *CharMapping
func (*CharMapping) String ¶
func (charMapping *CharMapping) String() string
type CheckOnStartup ¶
type CheckOnStartup string
const ( CheckOnStartupFalse CheckOnStartup = "false" CheckOnStartupTrue CheckOnStartup = "true" CheckOnStartupChecksum CheckOnStartup = "checksum" )
type EdgeNgramTokenChars ¶
type EdgeNgramTokenChars string
type Filter ¶
type Filter interface { Type() FilterType Name() FilterName Source() (interface{}, error) }
type FilterAsciiFolding ¶
type FilterAsciiFolding struct {
// contains filtered or unexported fields
}
func NewFilterAsciiFolding ¶
func NewFilterAsciiFolding(name string) *FilterAsciiFolding
func (*FilterAsciiFolding) Name ¶
func (filter *FilterAsciiFolding) Name() FilterName
func (*FilterAsciiFolding) SetPreserveOriginal ¶
func (filter *FilterAsciiFolding) SetPreserveOriginal(preserveOriginal bool) *FilterAsciiFolding
func (*FilterAsciiFolding) Source ¶
func (filter *FilterAsciiFolding) Source() (interface{}, error)
func (*FilterAsciiFolding) Type ¶
func (filter *FilterAsciiFolding) Type() FilterType
type FilterBigramIgnoredScripts ¶
type FilterBigramIgnoredScripts string
type FilterCjkBigRam ¶
type FilterCjkBigRam struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-cjk-bigram-tokenfilter.html
func NewFilterCjkBigRam ¶
func NewFilterCjkBigRam(name string) *FilterCjkBigRam
func (*FilterCjkBigRam) AddIgnoredScripts ¶
func (filter *FilterCjkBigRam) AddIgnoredScripts(ignoredScripts ...FilterBigramIgnoredScripts) *FilterCjkBigRam
func (*FilterCjkBigRam) Name ¶
func (filter *FilterCjkBigRam) Name() FilterName
func (*FilterCjkBigRam) SetOutputUnigrams ¶
func (filter *FilterCjkBigRam) SetOutputUnigrams(outputUnigrams bool) *FilterCjkBigRam
func (*FilterCjkBigRam) Source ¶
func (filter *FilterCjkBigRam) Source() (interface{}, error)
func (*FilterCjkBigRam) Type ¶
func (filter *FilterCjkBigRam) Type() FilterType
type FilterCommonGrams ¶
type FilterCommonGrams struct {
// contains filtered or unexported fields
}
func NewFilterCommonGrams ¶
func NewFilterCommonGrams(name string) *FilterCommonGrams
func (*FilterCommonGrams) AddCommonWords ¶
func (filter *FilterCommonGrams) AddCommonWords(commonWords ...string) *FilterCommonGrams
func (*FilterCommonGrams) Name ¶
func (filter *FilterCommonGrams) Name() FilterName
func (*FilterCommonGrams) SetCommonWordsPath ¶
func (filter *FilterCommonGrams) SetCommonWordsPath(commonWordsPath string) *FilterCommonGrams
func (*FilterCommonGrams) SetIgnoreCase ¶
func (filter *FilterCommonGrams) SetIgnoreCase(ignoreCase bool) *FilterCommonGrams
func (*FilterCommonGrams) SetQueryMode ¶
func (filter *FilterCommonGrams) SetQueryMode(queryMode bool) *FilterCommonGrams
func (*FilterCommonGrams) Source ¶
func (filter *FilterCommonGrams) Source() (interface{}, error)
func (*FilterCommonGrams) Type ¶
func (filter *FilterCommonGrams) Type() FilterType
type FilterCondition ¶
type FilterCondition struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-condition-tokenfilter.html
func NewFilterCondition ¶
func NewFilterCondition(name string) *FilterCondition
func (*FilterCondition) AddFilter ¶
func (filter *FilterCondition) AddFilter(filters ...FilterName) *FilterCondition
func (*FilterCondition) Name ¶
func (filter *FilterCondition) Name() FilterName
func (*FilterCondition) SetScript ¶
func (filter *FilterCondition) SetScript(script *elastic.Script) *FilterCondition
func (*FilterCondition) Source ¶
func (filter *FilterCondition) Source() (interface{}, error)
func (*FilterCondition) Type ¶
func (filter *FilterCondition) Type() FilterType
type FilterDelimitedPayload ¶
type FilterDelimitedPayload struct {
// contains filtered or unexported fields
}
func NewFilterDelimitedPayload ¶
func NewFilterDelimitedPayload(name string) *FilterDelimitedPayload
func (*FilterDelimitedPayload) Name ¶
func (filter *FilterDelimitedPayload) Name() FilterName
func (*FilterDelimitedPayload) SetDelimiter ¶
func (filter *FilterDelimitedPayload) SetDelimiter(delimiter string) *FilterDelimitedPayload
func (*FilterDelimitedPayload) SetEncoding ¶
func (filter *FilterDelimitedPayload) SetEncoding(encoding FilterDelimitedPayloadEncoding) *FilterDelimitedPayload
func (*FilterDelimitedPayload) Source ¶
func (filter *FilterDelimitedPayload) Source() (interface{}, error)
func (*FilterDelimitedPayload) Type ¶
func (filter *FilterDelimitedPayload) Type() FilterType
type FilterDelimitedPayloadEncoding ¶
type FilterDelimitedPayloadEncoding string
type FilterDictionaryDecompounder ¶
type FilterDictionaryDecompounder struct {
// contains filtered or unexported fields
}
func NewFilterDictionaryDecompounder ¶
func NewFilterDictionaryDecompounder(name string) *FilterDictionaryDecompounder
func (*FilterDictionaryDecompounder) AddWordList ¶
func (filter *FilterDictionaryDecompounder) AddWordList(wordList ...string) *FilterDictionaryDecompounder
func (*FilterDictionaryDecompounder) Name ¶
func (filter *FilterDictionaryDecompounder) Name() FilterName
func (*FilterDictionaryDecompounder) SetMaxSubwordSize ¶
func (filter *FilterDictionaryDecompounder) SetMaxSubwordSize(maxSubwordSize uint32) *FilterDictionaryDecompounder
func (*FilterDictionaryDecompounder) SetMinSubwordSize ¶
func (filter *FilterDictionaryDecompounder) SetMinSubwordSize(minSubwordSize uint32) *FilterDictionaryDecompounder
func (*FilterDictionaryDecompounder) SetMinWordSize ¶
func (filter *FilterDictionaryDecompounder) SetMinWordSize(minWordSize uint32) *FilterDictionaryDecompounder
func (*FilterDictionaryDecompounder) SetOnlyLongestMatch ¶
func (filter *FilterDictionaryDecompounder) SetOnlyLongestMatch(onlyLongestMatch bool) *FilterDictionaryDecompounder
func (*FilterDictionaryDecompounder) SetWordListPath ¶
func (filter *FilterDictionaryDecompounder) SetWordListPath(wordListPath string) *FilterDictionaryDecompounder
func (*FilterDictionaryDecompounder) Source ¶
func (filter *FilterDictionaryDecompounder) Source() (interface{}, error)
func (*FilterDictionaryDecompounder) Type ¶
func (filter *FilterDictionaryDecompounder) Type() FilterType
type FilterEdgeNgram ¶
type FilterEdgeNgram struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-edgengram-tokenfilter.html
func NewFilterEdgeNgram ¶
func NewFilterEdgeNgram(name string) *FilterEdgeNgram
func (*FilterEdgeNgram) Name ¶
func (filter *FilterEdgeNgram) Name() FilterName
func (*FilterEdgeNgram) SetMaxGram ¶
func (filter *FilterEdgeNgram) SetMaxGram(maxGram uint32) *FilterEdgeNgram
func (*FilterEdgeNgram) SetMinGram ¶
func (filter *FilterEdgeNgram) SetMinGram(minGram uint32) *FilterEdgeNgram
func (*FilterEdgeNgram) SetPreserveOriginal ¶
func (filter *FilterEdgeNgram) SetPreserveOriginal(preserveOriginal bool) *FilterEdgeNgram
func (*FilterEdgeNgram) SetSide ¶
func (filter *FilterEdgeNgram) SetSide(side FilterEdgeNgramSide) *FilterEdgeNgram
func (*FilterEdgeNgram) Source ¶
func (filter *FilterEdgeNgram) Source() (interface{}, error)
func (*FilterEdgeNgram) Type ¶
func (filter *FilterEdgeNgram) Type() FilterType
type FilterEdgeNgramSide ¶
type FilterEdgeNgramSide string
type FilterElision ¶
type FilterElision struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-elision-tokenfilter.html
func NewFilterElision ¶
func NewFilterElision(name string) *FilterElision
func (*FilterElision) AddArticles ¶
func (filter *FilterElision) AddArticles(articles ...string) *FilterElision
func (*FilterElision) Name ¶
func (filter *FilterElision) Name() FilterName
func (*FilterElision) SetArticlesCase ¶
func (filter *FilterElision) SetArticlesCase(articlesCase bool) *FilterElision
func (*FilterElision) SetArticlesPath ¶
func (filter *FilterElision) SetArticlesPath(articlesPath string) *FilterElision
func (*FilterElision) Source ¶
func (filter *FilterElision) Source() (interface{}, error)
func (*FilterElision) Type ¶
func (filter *FilterElision) Type() FilterType
type FilterFingerprint ¶
type FilterFingerprint struct {
// contains filtered or unexported fields
}
func NewFilterFingerprint ¶
func NewFilterFingerprint(name string) *FilterFingerprint
func (*FilterFingerprint) Name ¶
func (filter *FilterFingerprint) Name() FilterName
func (*FilterFingerprint) SetMaxOutputSize ¶
func (filter *FilterFingerprint) SetMaxOutputSize(maxOutputSize uint32) *FilterFingerprint
func (*FilterFingerprint) SetSeparator ¶
func (filter *FilterFingerprint) SetSeparator(separator string) *FilterFingerprint
func (*FilterFingerprint) Source ¶
func (filter *FilterFingerprint) Source() (interface{}, error)
func (*FilterFingerprint) Type ¶
func (filter *FilterFingerprint) Type() FilterType
type FilterHyphenationDecompounder ¶
type FilterHyphenationDecompounder struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-hyp-decomp-tokenfilter.html
func NewFilterHyphenationDecompounder ¶
func NewFilterHyphenationDecompounder(name string) *FilterHyphenationDecompounder
func (*FilterHyphenationDecompounder) AddWordList ¶
func (filter *FilterHyphenationDecompounder) AddWordList(wordList ...string) *FilterHyphenationDecompounder
func (*FilterHyphenationDecompounder) Name ¶
func (filter *FilterHyphenationDecompounder) Name() FilterName
func (*FilterHyphenationDecompounder) SetHyphenationPatternsPath ¶
func (filter *FilterHyphenationDecompounder) SetHyphenationPatternsPath(hyphenationPatternsPath string) *FilterHyphenationDecompounder
func (*FilterHyphenationDecompounder) SetMaxSubwordSize ¶
func (filter *FilterHyphenationDecompounder) SetMaxSubwordSize(maxSubwordSize uint32) *FilterHyphenationDecompounder
func (*FilterHyphenationDecompounder) SetMinSubwordSize ¶
func (filter *FilterHyphenationDecompounder) SetMinSubwordSize(minSubwordSize uint32) *FilterHyphenationDecompounder
func (*FilterHyphenationDecompounder) SetMinWordSize ¶
func (filter *FilterHyphenationDecompounder) SetMinWordSize(minWordSize uint32) *FilterHyphenationDecompounder
func (*FilterHyphenationDecompounder) SetOnlyLongestMatch ¶
func (filter *FilterHyphenationDecompounder) SetOnlyLongestMatch(onlyLongestMatch bool) *FilterHyphenationDecompounder
func (*FilterHyphenationDecompounder) SetWordListPath ¶
func (filter *FilterHyphenationDecompounder) SetWordListPath(wordListPath string) *FilterHyphenationDecompounder
func (*FilterHyphenationDecompounder) Source ¶
func (filter *FilterHyphenationDecompounder) Source() (interface{}, error)
func (*FilterHyphenationDecompounder) Type ¶
func (filter *FilterHyphenationDecompounder) Type() FilterType
type FilterKeep ¶
type FilterKeep struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-keep-words-tokenfilter.html
func NewFilterKeep ¶
func NewFilterKeep(name string) *FilterKeep
func (*FilterKeep) AddKeepWords ¶
func (filter *FilterKeep) AddKeepWords(keepWords ...string) *FilterKeep
func (*FilterKeep) Name ¶
func (filter *FilterKeep) Name() FilterName
func (*FilterKeep) SetKeepWordsCase ¶
func (filter *FilterKeep) SetKeepWordsCase(keepWordsCase bool) *FilterKeep
func (*FilterKeep) SetKeepWordsPath ¶
func (filter *FilterKeep) SetKeepWordsPath(keepWordsPath string) *FilterKeep
func (*FilterKeep) Source ¶
func (filter *FilterKeep) Source() (interface{}, error)
func (*FilterKeep) Type ¶
func (filter *FilterKeep) Type() FilterType
type FilterKeepTypes ¶
type FilterKeepTypes struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-keep-types-tokenfilter.html
func NewFilterKeepTypes ¶
func NewFilterKeepTypes(name string) *FilterKeepTypes
func (*FilterKeepTypes) AddTypes ¶
func (filter *FilterKeepTypes) AddTypes(types ...string) *FilterKeepTypes
func (*FilterKeepTypes) Name ¶
func (filter *FilterKeepTypes) Name() FilterName
func (*FilterKeepTypes) SetMode ¶
func (filter *FilterKeepTypes) SetMode(mode FilterKeepTypesMode) *FilterKeepTypes
func (*FilterKeepTypes) Source ¶
func (filter *FilterKeepTypes) Source() (interface{}, error)
func (*FilterKeepTypes) Type ¶
func (filter *FilterKeepTypes) Type() FilterType
type FilterKeepTypesMode ¶
type FilterKeepTypesMode string
type FilterKeywordMarker ¶
type FilterKeywordMarker struct {
// contains filtered or unexported fields
}
func NewFilterKeywordMarker ¶
func NewFilterKeywordMarker(name string) *FilterKeywordMarker
func (*FilterKeywordMarker) AddKeywords ¶
func (filter *FilterKeywordMarker) AddKeywords(keywords ...string) *FilterKeywordMarker
func (*FilterKeywordMarker) Name ¶
func (filter *FilterKeywordMarker) Name() FilterName
func (*FilterKeywordMarker) SetIgnoreCase ¶
func (filter *FilterKeywordMarker) SetIgnoreCase(ignoreCase bool) *FilterKeywordMarker
func (*FilterKeywordMarker) SetKeywordsPath ¶
func (filter *FilterKeywordMarker) SetKeywordsPath(keywordsPath string) *FilterKeywordMarker
func (*FilterKeywordMarker) SetKeywordsPattern ¶
func (filter *FilterKeywordMarker) SetKeywordsPattern(keywordsPattern string) *FilterKeywordMarker
func (*FilterKeywordMarker) Source ¶
func (filter *FilterKeywordMarker) Source() (interface{}, error)
func (*FilterKeywordMarker) Type ¶
func (filter *FilterKeywordMarker) Type() FilterType
type FilterLength ¶
type FilterLength struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-length-tokenfilter.html
func NewFilterLength ¶
func NewFilterLength(name string) *FilterLength
func (*FilterLength) Name ¶
func (filter *FilterLength) Name() FilterName
func (*FilterLength) SetMax ¶
func (filter *FilterLength) SetMax(max uint32) *FilterLength
func (*FilterLength) SetMin ¶
func (filter *FilterLength) SetMin(min uint32) *FilterLength
func (*FilterLength) Source ¶
func (filter *FilterLength) Source() (interface{}, error)
func (*FilterLength) Type ¶
func (filter *FilterLength) Type() FilterType
type FilterLimit ¶
type FilterLimit struct {
// contains filtered or unexported fields
}
func NewFilterLimit ¶
func NewFilterLimit(name string) *FilterLimit
func (*FilterLimit) Name ¶
func (filter *FilterLimit) Name() FilterName
func (*FilterLimit) SetConsumeAllTokens ¶
func (filter *FilterLimit) SetConsumeAllTokens(consumeAllTokens bool) *FilterLimit
func (*FilterLimit) SetMaxTokenCount ¶
func (filter *FilterLimit) SetMaxTokenCount(maxTokenCount uint32) *FilterLimit
func (*FilterLimit) Source ¶
func (filter *FilterLimit) Source() (interface{}, error)
func (*FilterLimit) Type ¶
func (filter *FilterLimit) Type() FilterType
type FilterLowercase ¶
type FilterLowercase struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-lowercase-tokenfilter.html
func NewFilterLowercase ¶
func NewFilterLowercase(name string) *FilterLowercase
func (*FilterLowercase) Name ¶
func (filter *FilterLowercase) Name() FilterName
func (*FilterLowercase) SetLanguage ¶
func (filter *FilterLowercase) SetLanguage(language FilterLowercaseLanguageType) *FilterLowercase
func (*FilterLowercase) Source ¶
func (filter *FilterLowercase) Source() (interface{}, error)
func (*FilterLowercase) Type ¶
func (filter *FilterLowercase) Type() FilterType
type FilterLowercaseLanguageType ¶
type FilterLowercaseLanguageType string
type FilterMinHash ¶
type FilterMinHash struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-minhash-tokenfilter.html
func NewFilterMinHash ¶
func NewFilterMinHash(name string) *FilterMinHash
func (*FilterMinHash) Name ¶
func (filter *FilterMinHash) Name() FilterName
func (*FilterMinHash) SetBucketCount ¶
func (filter *FilterMinHash) SetBucketCount(bucketCount uint32) *FilterMinHash
func (*FilterMinHash) SetHashCount ¶
func (filter *FilterMinHash) SetHashCount(hashCount uint32) *FilterMinHash
func (*FilterMinHash) SetHashSetSize ¶
func (filter *FilterMinHash) SetHashSetSize(hashSetSize uint32) *FilterMinHash
func (*FilterMinHash) SetWithRotation ¶
func (filter *FilterMinHash) SetWithRotation(withRotation bool) *FilterMinHash
func (*FilterMinHash) Source ¶
func (filter *FilterMinHash) Source() (interface{}, error)
func (*FilterMinHash) Type ¶
func (filter *FilterMinHash) Type() FilterType
type FilterMultiplexer ¶
type FilterMultiplexer struct {
// contains filtered or unexported fields
}
func NewFilterMultiplexer ¶
func NewFilterMultiplexer(name string) *FilterMultiplexer
func (*FilterMultiplexer) AddFilters ¶
func (filter *FilterMultiplexer) AddFilters(filters ...FilterName) *FilterMultiplexer
func (*FilterMultiplexer) Name ¶
func (filter *FilterMultiplexer) Name() FilterName
func (*FilterMultiplexer) Source ¶
func (filter *FilterMultiplexer) Source() (interface{}, error)
func (*FilterMultiplexer) Type ¶
func (filter *FilterMultiplexer) Type() FilterType
type FilterName ¶
type FilterName string
type FilterNgram ¶
type FilterNgram struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-ngram-tokenfilter.html
func NewFilterNgram ¶
func NewFilterNgram(name string) *FilterNgram
func (*FilterNgram) Name ¶
func (filter *FilterNgram) Name() FilterName
func (*FilterNgram) SetMaxGram ¶
func (filter *FilterNgram) SetMaxGram(maxGram uint8) *FilterNgram
func (*FilterNgram) SetMinGram ¶
func (filter *FilterNgram) SetMinGram(minGram uint8) *FilterNgram
func (*FilterNgram) SetPreserveOriginal ¶
func (filter *FilterNgram) SetPreserveOriginal(preserveOriginal bool) *FilterNgram
func (*FilterNgram) Source ¶
func (filter *FilterNgram) Source() (interface{}, error)
func (*FilterNgram) Type ¶
func (filter *FilterNgram) Type() FilterType
type FilterPatternCapture ¶
type FilterPatternCapture struct {
// contains filtered or unexported fields
}
func NewFilterPatternCapture ¶
func NewFilterPatternCapture(name string) *FilterPatternCapture
func (*FilterPatternCapture) AddPatterns ¶
func (filter *FilterPatternCapture) AddPatterns(patterns ...string) *FilterPatternCapture
func (*FilterPatternCapture) Name ¶
func (filter *FilterPatternCapture) Name() FilterName
func (*FilterPatternCapture) SetPreserveOriginal ¶
func (filter *FilterPatternCapture) SetPreserveOriginal(preserveOriginal bool) *FilterPatternCapture
func (*FilterPatternCapture) Source ¶
func (filter *FilterPatternCapture) Source() (interface{}, error)
func (*FilterPatternCapture) Type ¶
func (filter *FilterPatternCapture) Type() FilterType
type FilterPatternReplace ¶
type FilterPatternReplace struct {
// contains filtered or unexported fields
}
func NewFilterPatternReplace ¶
func NewFilterPatternReplace(name string) *FilterPatternReplace
func (*FilterPatternReplace) Name ¶
func (filter *FilterPatternReplace) Name() FilterName
func (*FilterPatternReplace) SetAll ¶
func (filter *FilterPatternReplace) SetAll(all bool) *FilterPatternReplace
func (*FilterPatternReplace) SetPattern ¶
func (filter *FilterPatternReplace) SetPattern(pattern string) *FilterPatternReplace
func (*FilterPatternReplace) SetReplacement ¶
func (filter *FilterPatternReplace) SetReplacement(replacement string) *FilterPatternReplace
func (*FilterPatternReplace) Source ¶
func (filter *FilterPatternReplace) Source() (interface{}, error)
func (*FilterPatternReplace) Type ¶
func (filter *FilterPatternReplace) Type() FilterType
type FilterPhonetic ¶
type FilterPhonetic struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/plugins/7.10/analysis-phonetic-token-filter.html
func NewFilterPhonetic ¶
func NewFilterPhonetic(name string) *FilterPhonetic
func (*FilterPhonetic) Name ¶
func (filter *FilterPhonetic) Name() FilterName
func (*FilterPhonetic) SetEncoder ¶
func (filter *FilterPhonetic) SetEncoder(encoder FilterPhoneticEncoder) *FilterPhonetic
func (*FilterPhonetic) SetLanguageSet ¶
func (filter *FilterPhonetic) SetLanguageSet(languageSet FilterPhoneticLanguageSet) *FilterPhonetic
func (*FilterPhonetic) SetMaxCodeLen ¶
func (filter *FilterPhonetic) SetMaxCodeLen(maxCodeLen uint32) *FilterPhonetic
func (*FilterPhonetic) SetNameType ¶
func (filter *FilterPhonetic) SetNameType(nameType FilterPhoneticNameType) *FilterPhonetic
func (*FilterPhonetic) SetReplace ¶
func (filter *FilterPhonetic) SetReplace(replace bool) *FilterPhonetic
func (*FilterPhonetic) SetRuleType ¶
func (filter *FilterPhonetic) SetRuleType(ruleType FilterPhoneticRuleType) *FilterPhonetic
func (*FilterPhonetic) Source ¶
func (filter *FilterPhonetic) Source() (interface{}, error)
func (*FilterPhonetic) Type ¶
func (filter *FilterPhonetic) Type() FilterType
type FilterPhoneticEncoder ¶
type FilterPhoneticEncoder string
type FilterPhoneticLanguageSet ¶
type FilterPhoneticLanguageSet string
type FilterPhoneticNameType ¶
type FilterPhoneticNameType string
type FilterPhoneticRuleType ¶
type FilterPhoneticRuleType string
type FilterPredicateTokenFilter ¶
type FilterPredicateTokenFilter struct {
// contains filtered or unexported fields
}
func NewFilterPredicateTokenFilter ¶
func NewFilterPredicateTokenFilter(name string) *FilterPredicateTokenFilter
func (*FilterPredicateTokenFilter) Name ¶
func (filter *FilterPredicateTokenFilter) Name() FilterName
func (*FilterPredicateTokenFilter) SetScript ¶
func (filter *FilterPredicateTokenFilter) SetScript(script *elastic.Script) *FilterPredicateTokenFilter
func (*FilterPredicateTokenFilter) Source ¶
func (filter *FilterPredicateTokenFilter) Source() (interface{}, error)
func (*FilterPredicateTokenFilter) Type ¶
func (filter *FilterPredicateTokenFilter) Type() FilterType
type FilterShingle ¶
type FilterShingle struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-shingle-tokenfilter.html
func NewFilterShingle ¶
func NewFilterShingle(name string) *FilterShingle
func (*FilterShingle) Name ¶
func (filter *FilterShingle) Name() FilterName
func (*FilterShingle) SetFillerToken ¶
func (filter *FilterShingle) SetFillerToken(fillerToken string) *FilterShingle
func (*FilterShingle) SetMaxShingleSize ¶
func (filter *FilterShingle) SetMaxShingleSize(maxShingleSize uint32) *FilterShingle
func (*FilterShingle) SetMinShingleSize ¶
func (filter *FilterShingle) SetMinShingleSize(minShingleSize uint32) *FilterShingle
func (*FilterShingle) SetOutputUnigrams ¶
func (filter *FilterShingle) SetOutputUnigrams(outputUnigrams bool) *FilterShingle
func (*FilterShingle) SetOutputUnigramsIfNoShingles ¶
func (filter *FilterShingle) SetOutputUnigramsIfNoShingles(outputUnigramsIfNoShingles bool) *FilterShingle
func (*FilterShingle) SetTokenSeparator ¶
func (filter *FilterShingle) SetTokenSeparator(tokenSeparator string) *FilterShingle
func (*FilterShingle) Source ¶
func (filter *FilterShingle) Source() (interface{}, error)
func (*FilterShingle) Type ¶
func (filter *FilterShingle) Type() FilterType
type FilterSnowball ¶
type FilterSnowball struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-snowball-tokenfilter.html
func NewFilterSnowball ¶
func NewFilterSnowball(name string) *FilterSnowball
func (*FilterSnowball) Name ¶
func (filter *FilterSnowball) Name() FilterName
func (*FilterSnowball) SetLanguage ¶
func (filter *FilterSnowball) SetLanguage(language FilterSnowballLanguage) *FilterSnowball
func (*FilterSnowball) Source ¶
func (filter *FilterSnowball) Source() (interface{}, error)
func (*FilterSnowball) Type ¶
func (filter *FilterSnowball) Type() FilterType
type FilterSnowballLanguage ¶
type FilterSnowballLanguage string
type FilterStemmer ¶
type FilterStemmer struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-stemmer-tokenfilter.html
func NewFilterStemmer ¶
func NewFilterStemmer(name string) *FilterStemmer
func (*FilterStemmer) Name ¶
func (filter *FilterStemmer) Name() FilterName
func (*FilterStemmer) SetLanguage ¶
func (filter *FilterStemmer) SetLanguage(language FilterStemmerLanguage) *FilterStemmer
func (*FilterStemmer) Source ¶
func (filter *FilterStemmer) Source() (interface{}, error)
func (*FilterStemmer) Type ¶
func (filter *FilterStemmer) Type() FilterType
type FilterStemmerLanguage ¶
type FilterStemmerLanguage string
type FilterStemmerOverride ¶
type FilterStemmerOverride struct {
// contains filtered or unexported fields
}
func NewFilterStemmerOverride ¶
func NewFilterStemmerOverride(name string) *FilterStemmerOverride
func (*FilterStemmerOverride) AddRules ¶
func (filter *FilterStemmerOverride) AddRules(rules ...*StemmerOverrideRule) *FilterStemmerOverride
func (*FilterStemmerOverride) Name ¶
func (filter *FilterStemmerOverride) Name() FilterName
func (*FilterStemmerOverride) SetRulesPath ¶
func (filter *FilterStemmerOverride) SetRulesPath(rulesPath string) *FilterStemmerOverride
func (*FilterStemmerOverride) Source ¶
func (filter *FilterStemmerOverride) Source() (interface{}, error)
func (*FilterStemmerOverride) Type ¶
func (filter *FilterStemmerOverride) Type() FilterType
type FilterStop ¶
type FilterStop struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-stop-tokenfilter.html
func NewFilterStop ¶
func NewFilterStop(name string) *FilterStop
func (*FilterStop) AddStopWords ¶
func (filter *FilterStop) AddStopWords(stopWords ...string) *FilterStop
func (*FilterStop) Name ¶
func (filter *FilterStop) Name() FilterName
func (*FilterStop) SetIgnoreCase ¶
func (filter *FilterStop) SetIgnoreCase(ignoreCase bool) *FilterStop
func (*FilterStop) SetRemoveTrailing ¶
func (filter *FilterStop) SetRemoveTrailing(removeTrailing bool) *FilterStop
func (*FilterStop) SetStopWordsPath ¶
func (filter *FilterStop) SetStopWordsPath(stopWordsPath string) *FilterStop
func (*FilterStop) Source ¶
func (filter *FilterStop) Source() (interface{}, error)
func (*FilterStop) Type ¶
func (filter *FilterStop) Type() FilterType
type FilterSynonym ¶
type FilterSynonym struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-synonym-tokenfilter.html or https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-synonym-graph-tokenfilter.html
func NewFilterSynonym ¶
func NewFilterSynonym(name string) *FilterSynonym
func NewFilterSynonymGraph ¶
func NewFilterSynonymGraph(name string) *FilterSynonym
func (*FilterSynonym) AddSynonyms ¶
func (filter *FilterSynonym) AddSynonyms(synonyms ...Synonym) *FilterSynonym
func (*FilterSynonym) Name ¶
func (filter *FilterSynonym) Name() FilterName
func (*FilterSynonym) SetExpand ¶
func (filter *FilterSynonym) SetExpand(expand bool) *FilterSynonym
func (*FilterSynonym) SetFormat ¶
func (filter *FilterSynonym) SetFormat(format FilterSynonymFormat) *FilterSynonym
func (*FilterSynonym) SetLenient ¶
func (filter *FilterSynonym) SetLenient(lenient bool) *FilterSynonym
func (*FilterSynonym) SetSynonymsPath ¶
func (filter *FilterSynonym) SetSynonymsPath(synonymsPath string) *FilterSynonym
func (*FilterSynonym) Source ¶
func (filter *FilterSynonym) Source() (interface{}, error)
func (*FilterSynonym) Type ¶
func (filter *FilterSynonym) Type() FilterType
type FilterSynonymFormat ¶
type FilterSynonymFormat string
type FilterTruncate ¶
type FilterTruncate struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-truncate-tokenfilter.html
func NewFilterTruncate ¶
func NewFilterTruncate(name string) *FilterTruncate
func (*FilterTruncate) Name ¶
func (filter *FilterTruncate) Name() FilterName
func (*FilterTruncate) SetLength ¶
func (filter *FilterTruncate) SetLength(length uint32) *FilterTruncate
func (*FilterTruncate) Source ¶
func (filter *FilterTruncate) Source() (interface{}, error)
func (*FilterTruncate) Type ¶
func (filter *FilterTruncate) Type() FilterType
type FilterType ¶
type FilterType string
type FilterUnique ¶
type FilterUnique struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-unique-tokenfilter.html
func NewFilterUnique ¶
func NewFilterUnique(name string) *FilterUnique
func (*FilterUnique) Name ¶
func (filter *FilterUnique) Name() FilterName
func (*FilterUnique) SetOnlyOnSamePosition ¶
func (filter *FilterUnique) SetOnlyOnSamePosition(onlyOnSamePosition bool) *FilterUnique
func (*FilterUnique) Source ¶
func (filter *FilterUnique) Source() (interface{}, error)
func (*FilterUnique) Type ¶
func (filter *FilterUnique) Type() FilterType
type FilterWordDelimiter ¶
type FilterWordDelimiter struct {
// contains filtered or unexported fields
}
func NewFilterWordDelimiter ¶
func NewFilterWordDelimiter(name string) *FilterWordDelimiter
func (*FilterWordDelimiter) AddProtectedWords ¶
func (filter *FilterWordDelimiter) AddProtectedWords(protectedWords ...string) *FilterWordDelimiter
func (*FilterWordDelimiter) AddTypeTable ¶
func (filter *FilterWordDelimiter) AddTypeTable(typeTable ...*WordDelimiterType) *FilterWordDelimiter
func (*FilterWordDelimiter) Name ¶
func (filter *FilterWordDelimiter) Name() FilterName
func (*FilterWordDelimiter) SetCatenateAll ¶
func (filter *FilterWordDelimiter) SetCatenateAll(catenateAll bool) *FilterWordDelimiter
func (*FilterWordDelimiter) SetCatenateNumbers ¶
func (filter *FilterWordDelimiter) SetCatenateNumbers(catenateNumbers bool) *FilterWordDelimiter
func (*FilterWordDelimiter) SetCatenateWords ¶
func (filter *FilterWordDelimiter) SetCatenateWords(catenateWords bool) *FilterWordDelimiter
func (*FilterWordDelimiter) SetGenerateNumberParts ¶
func (filter *FilterWordDelimiter) SetGenerateNumberParts(generateNumberParts bool) *FilterWordDelimiter
func (*FilterWordDelimiter) SetGenerateWordParts ¶
func (filter *FilterWordDelimiter) SetGenerateWordParts(generateWordParts bool) *FilterWordDelimiter
func (*FilterWordDelimiter) SetPreserveOriginal ¶
func (filter *FilterWordDelimiter) SetPreserveOriginal(preserveOriginal bool) *FilterWordDelimiter
func (*FilterWordDelimiter) SetProtectedWordsPath ¶
func (filter *FilterWordDelimiter) SetProtectedWordsPath(protectedWordsPath string) *FilterWordDelimiter
func (*FilterWordDelimiter) SetSplitOnCaseChange ¶
func (filter *FilterWordDelimiter) SetSplitOnCaseChange(splitOnCaseChange bool) *FilterWordDelimiter
func (*FilterWordDelimiter) SetSplitOnNumerics ¶
func (filter *FilterWordDelimiter) SetSplitOnNumerics(splitOnNumerics bool) *FilterWordDelimiter
func (*FilterWordDelimiter) SetStemEnglishPossessive ¶
func (filter *FilterWordDelimiter) SetStemEnglishPossessive(stemEnglishPossessive bool) *FilterWordDelimiter
func (*FilterWordDelimiter) SetTypeTablePath ¶
func (filter *FilterWordDelimiter) SetTypeTablePath(typeTablePath string) *FilterWordDelimiter
func (*FilterWordDelimiter) Source ¶
func (filter *FilterWordDelimiter) Source() (interface{}, error)
func (*FilterWordDelimiter) Type ¶
func (filter *FilterWordDelimiter) Type() FilterType
type FilterWordDelimiterGraph ¶
type FilterWordDelimiterGraph struct { FilterWordDelimiter // contains filtered or unexported fields }
func NewFilterWordDelimiterGraph ¶
func NewFilterWordDelimiterGraph(name string) *FilterWordDelimiterGraph
func (*FilterWordDelimiterGraph) SetAdjustOffsets ¶
func (filter *FilterWordDelimiterGraph) SetAdjustOffsets(adjustOffsets bool) *FilterWordDelimiterGraph
func (*FilterWordDelimiterGraph) SetIgnoreKeywords ¶
func (filter *FilterWordDelimiterGraph) SetIgnoreKeywords(ignoreKeywords bool) *FilterWordDelimiterGraph
func (*FilterWordDelimiterGraph) Source ¶
func (filter *FilterWordDelimiterGraph) Source() (interface{}, error)
func (*FilterWordDelimiterGraph) Type ¶
func (filter *FilterWordDelimiterGraph) Type() FilterType
type Highlight ¶
type Highlight struct {
// contains filtered or unexported fields
}
func NewHighlight ¶
type Index ¶
type Index struct {
// contains filtered or unexported fields
}
func (*Index) AddSimilarity ¶
func (index *Index) AddSimilarity(similarity ...Similarity) *Index
func (*Index) SetAnalysis ¶
func (*Index) SetAnalyze ¶
func (*Index) SetAutoExpandReplicas ¶
func (*Index) SetDefaultPipeline ¶
func (index *Index) SetDefaultPipeline(defaultPipeline PipelineName) *Index
func (*Index) SetFinalPipeline ¶
func (index *Index) SetFinalPipeline(finalPipeline PipelineName) *Index
func (*Index) SetFlushAfterMerge ¶
func (*Index) SetGcDeletes ¶
func (*Index) SetHighlight ¶
func (*Index) SetIndexing ¶
func (*Index) SetLoadFixedBitsetFiltersEagerly ¶
func (*Index) SetMapping ¶
func (*Index) SetMaxAdjacencyMatrixFilters ¶
func (*Index) SetMaxDocvalueFieldsSearch ¶
func (*Index) SetMaxInnerResultWindow ¶
func (*Index) SetMaxNgramDiff ¶
func (*Index) SetMaxRefreshListeners ¶
func (*Index) SetMaxRegexLength ¶
func (*Index) SetMaxRescoreWindow ¶
func (*Index) SetMaxResultWindow ¶
func (*Index) SetMaxScriptFields ¶
func (*Index) SetMaxShingleDiff ¶
func (*Index) SetMaxSlicesPerScroll ¶
func (*Index) SetMaxTermsCount ¶
func (*Index) SetNumberOfReplicas ¶
func (*Index) SetNumberOfRoutingShards ¶
func (*Index) SetNumberOfShards ¶
func (*Index) SetRefreshInterval ¶
func (*Index) SetRouting ¶
func (*Index) SetRoutingPartitionSize ¶
func (*Index) SetSoftDeletes ¶
func (index *Index) SetSoftDeletes(softDeletes *SoftDeletes) *Index
func (*Index) SetSourceOnly ¶
func (*Index) SetTranslog ¶
func (*Index) SetUnassigned ¶
func (index *Index) SetUnassigned(unassigned *Unassigned) *Index
func (*Index) SetVerifiedBeforeClose ¶
type Interval ¶
type Interval struct {
// contains filtered or unexported fields
}
func NewInterval ¶
type JavaRegularFlag ¶
type JavaRegularFlag string
type LogLevel ¶
type LogLevel struct {
// contains filtered or unexported fields
}
func NewLogLevel ¶
func NewLogLevel() *LogLevel
type Mapping ¶
type Mapping struct {
// contains filtered or unexported fields
}
func NewMapping ¶
func NewMapping() *Mapping
func (*Mapping) SetFieldNameLength ¶
func (*Mapping) SetIgnoreMalformed ¶
func (*Mapping) SetNestedFields ¶
func (*Mapping) SetNestedObjects ¶
func (*Mapping) SetTotalFields ¶
type Merge ¶
type Merge struct {
// contains filtered or unexported fields
}
func (*Merge) SetScheduler ¶
type NodeLeft ¶
type NodeLeft struct {
// contains filtered or unexported fields
}
func NewNodeLeft ¶
type Normalizer ¶
type Normalizer interface { Type() NormalizerType Name() NormalizerName Source() (interface{}, error) }
type NormalizerCustom ¶
type NormalizerCustom struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-normalizers.html
func NewNormalizerCustom ¶
func NewNormalizerCustom(name string) *NormalizerCustom
func (*NormalizerCustom) AddCharFilter ¶
func (normalizer *NormalizerCustom) AddCharFilter(charFilter ...CharFilterName) *NormalizerCustom
func (*NormalizerCustom) AddFilter ¶
func (normalizer *NormalizerCustom) AddFilter(filter ...FilterName) *NormalizerCustom
func (*NormalizerCustom) Name ¶
func (normalizer *NormalizerCustom) Name() NormalizerName
func (*NormalizerCustom) Source ¶
func (normalizer *NormalizerCustom) Source() (interface{}, error)
func (*NormalizerCustom) Type ¶
func (normalizer *NormalizerCustom) Type() NormalizerType
type NormalizerName ¶
type NormalizerName string
type NormalizerType ¶
type NormalizerType string
type PipelineName ¶
type PipelineName string
type Policy ¶
type Policy struct {
// contains filtered or unexported fields
}
func (*Policy) SetDeletesPctAllowed ¶
func (*Policy) SetExpungeDeletesAllowed ¶
func (*Policy) SetFloorSegment ¶
func (*Policy) SetMaxMergeAtOnce ¶
func (*Policy) SetMaxMergeAtOnceExplicit ¶
func (*Policy) SetMaxMergedSegment ¶
func (*Policy) SetReclaimDeletesWeight ¶
func (*Policy) SetSegmentsPerTier ¶
type Rebalance ¶
type Rebalance struct {
// contains filtered or unexported fields
}
func NewRebalance ¶
func NewRebalance(enable RebalanceVal) *Rebalance
type RebalanceVal ¶
type RebalanceVal string
const ( RebalanceAll RebalanceVal = "all" RebalancePrimaries RebalanceVal = "primaries" RebalanceReplicas RebalanceVal = "replicas" RebalanceNone RebalanceVal = "none" )
type Retention ¶
type Retention struct {
// contains filtered or unexported fields
}
func NewRetention ¶
type RetentionLease ¶
type RetentionLease struct {
// contains filtered or unexported fields
}
func NewRetentionLease ¶
func NewRetentionLease(period *Interval) *RetentionLease
func (*RetentionLease) Source ¶
func (retentionLease *RetentionLease) Source() (interface{}, error)
type Routing ¶
type Routing struct {
// contains filtered or unexported fields
}
func NewRouting ¶
func NewRouting() *Routing
func (*Routing) SetAllocation ¶
func (routing *Routing) SetAllocation(allocation *Allocation) *Routing
func (*Routing) SetRebalance ¶
type Scheduler ¶
type Scheduler struct {
// contains filtered or unexported fields
}
func NewScheduler ¶
func NewScheduler() *Scheduler
func (*Scheduler) SetAutoThrottle ¶
func (*Scheduler) SetMaxMergeCount ¶
func (*Scheduler) SetMaxThreadCount ¶
type Search ¶
type Search struct {
// contains filtered or unexported fields
}
func (*Search) SetSlowlog ¶
func (*Search) SetThrottled ¶
type Settings ¶
type Settings struct {
// contains filtered or unexported fields
}
func NewSettings ¶
type Shard ¶
type Shard struct {
// contains filtered or unexported fields
}
func NewShard ¶
func NewShard(checkOnStartup CheckOnStartup) *Shard
type Similarity ¶
type Similarity interface { Type() SimilarityType Name() SimilarityName Source() (interface{}, error) }
type SimilarityBM25 ¶
type SimilarityBM25 struct {
// contains filtered or unexported fields
}
see https://en.wikipedia.org/wiki/Okapi_BM25
func NewSimilarityBM25 ¶
func NewSimilarityBM25(name string) *SimilarityBM25
func (*SimilarityBM25) Name ¶
func (similarity *SimilarityBM25) Name() SimilarityName
func (*SimilarityBM25) SetB ¶
func (similarity *SimilarityBM25) SetB(b float32) *SimilarityBM25
func (*SimilarityBM25) SetDiscountOverlaps ¶
func (similarity *SimilarityBM25) SetDiscountOverlaps(discountOverlaps bool) *SimilarityBM25
func (*SimilarityBM25) SetK1 ¶
func (similarity *SimilarityBM25) SetK1(k1 float32) *SimilarityBM25
func (*SimilarityBM25) Source ¶
func (similarity *SimilarityBM25) Source() (interface{}, error)
func (*SimilarityBM25) Type ¶
func (similarity *SimilarityBM25) Type() SimilarityType
type SimilarityDFI ¶
type SimilarityDFI struct {
// contains filtered or unexported fields
}
see https://trec.nist.gov/pubs/trec21/papers/irra.web.nb.pdf
func NewSimilarityDFI ¶
func NewSimilarityDFI(name string) *SimilarityDFI
func (*SimilarityDFI) Name ¶
func (similarity *SimilarityDFI) Name() SimilarityName
func (*SimilarityDFI) SetIndependenceMeasure ¶
func (similarity *SimilarityDFI) SetIndependenceMeasure(independenceMeasure SimilarityDFIIndependenceMeasureType) *SimilarityDFI
func (*SimilarityDFI) Source ¶
func (similarity *SimilarityDFI) Source() (interface{}, error)
func (*SimilarityDFI) Type ¶
func (similarity *SimilarityDFI) Type() SimilarityType
type SimilarityDFIIndependenceMeasureType ¶
type SimilarityDFIIndependenceMeasureType string
type SimilarityDFR ¶
type SimilarityDFR struct {
// contains filtered or unexported fields
}
see https://en.wikipedia.org/wiki/Divergence-from-randomness_model
func NewSimilarityDFR ¶
func NewSimilarityDFR(name string) *SimilarityDFR
func (*SimilarityDFR) Name ¶
func (similarity *SimilarityDFR) Name() SimilarityName
func (*SimilarityDFR) SetAfterEffect ¶
func (similarity *SimilarityDFR) SetAfterEffect(afterEffect SimilarityDFRAfterEffectType) *SimilarityDFR
func (*SimilarityDFR) SetBasicModel ¶
func (similarity *SimilarityDFR) SetBasicModel(basicModel SimilarityDFRBasicModelType) *SimilarityDFR
func (*SimilarityDFR) SetNormalization ¶
func (similarity *SimilarityDFR) SetNormalization(normalization SimilarityDFRNormalizationType, value float32) *SimilarityDFR
func (*SimilarityDFR) Source ¶
func (similarity *SimilarityDFR) Source() (interface{}, error)
func (*SimilarityDFR) Type ¶
func (similarity *SimilarityDFR) Type() SimilarityType
type SimilarityDFRAfterEffectType ¶
type SimilarityDFRAfterEffectType string
type SimilarityDFRBasicModelType ¶
type SimilarityDFRBasicModelType string
type SimilarityDFRNormalizationType ¶
type SimilarityDFRNormalizationType string
type SimilarityDefault ¶
type SimilarityDefault struct {
// contains filtered or unexported fields
}
func NewSimilarityBoolean ¶
func NewSimilarityBoolean(name string) *SimilarityDefault
func NewSimilarityClassic ¶
func NewSimilarityClassic(name string) *SimilarityDefault
func (*SimilarityDefault) Name ¶
func (similarity *SimilarityDefault) Name() SimilarityName
func (*SimilarityDefault) Source ¶
func (similarity *SimilarityDefault) Source() (interface{}, error)
func (*SimilarityDefault) Type ¶
func (similarity *SimilarityDefault) Type() SimilarityType
type SimilarityIB ¶
type SimilarityIB struct {
// contains filtered or unexported fields
}
see https://lucene.apache.org/core/8_1_0/core/org/apache/lucene/search/similarities/IBSimilarity.html
func NewSimilarityIB ¶
func NewSimilarityIB(name string) *SimilarityIB
func (*SimilarityIB) Name ¶
func (similarity *SimilarityIB) Name() SimilarityName
func (*SimilarityIB) SetDistribution ¶
func (similarity *SimilarityIB) SetDistribution(distribution SimilarityIBDistributionType) *SimilarityIB
func (*SimilarityIB) SetLambda ¶
func (similarity *SimilarityIB) SetLambda(lambda SimilarityIBLambdaType) *SimilarityIB
func (*SimilarityIB) SetNormalization ¶
func (similarity *SimilarityIB) SetNormalization(normalization SimilarityDFRNormalizationType, value float32) *SimilarityIB
func (*SimilarityIB) Source ¶
func (similarity *SimilarityIB) Source() (interface{}, error)
func (*SimilarityIB) Type ¶
func (similarity *SimilarityIB) Type() SimilarityType
type SimilarityIBDistributionType ¶
type SimilarityIBDistributionType string
type SimilarityIBLambdaType ¶
type SimilarityIBLambdaType string
type SimilarityLMDirichlet ¶
type SimilarityLMDirichlet struct {
// contains filtered or unexported fields
}
func NewSimilarityLMDirichlet ¶
func NewSimilarityLMDirichlet(name string) *SimilarityLMDirichlet
func (*SimilarityLMDirichlet) Name ¶
func (similarity *SimilarityLMDirichlet) Name() SimilarityName
func (*SimilarityLMDirichlet) SetMU ¶
func (similarity *SimilarityLMDirichlet) SetMU(mu float32) *SimilarityLMDirichlet
func (*SimilarityLMDirichlet) Source ¶
func (similarity *SimilarityLMDirichlet) Source() (interface{}, error)
func (*SimilarityLMDirichlet) Type ¶
func (similarity *SimilarityLMDirichlet) Type() SimilarityType
type SimilarityLMJelinekMercer ¶
type SimilarityLMJelinekMercer struct {
// contains filtered or unexported fields
}
see https://lucene.apache.org/core/8_6_2/core/org/apache/lucene/search/similarities/LMJelinekMercerSimilarity.html or see https://www.researchgate.net/publication/221152474_A_Comparative_Study_of_Probabalistic_and_Language_Models_for_Information_Retrieval
func NewSimilarityLMJelinekMercer ¶
func NewSimilarityLMJelinekMercer(name string) *SimilarityLMJelinekMercer
func (*SimilarityLMJelinekMercer) Name ¶
func (similarity *SimilarityLMJelinekMercer) Name() SimilarityName
func (*SimilarityLMJelinekMercer) SetLambda ¶
func (similarity *SimilarityLMJelinekMercer) SetLambda(lambda float32) *SimilarityLMJelinekMercer
func (*SimilarityLMJelinekMercer) Source ¶
func (similarity *SimilarityLMJelinekMercer) Source() (interface{}, error)
func (*SimilarityLMJelinekMercer) Type ¶
func (similarity *SimilarityLMJelinekMercer) Type() SimilarityType
type SimilarityName ¶
type SimilarityName string
type SimilarityScript ¶
type SimilarityScript struct {
// contains filtered or unexported fields
}
func NewSimilarityScript ¶
func NewSimilarityScript(script *elastic.Script) *SimilarityScript
func (*SimilarityScript) Source ¶
func (script *SimilarityScript) Source() (interface{}, error)
type SimilarityScripted ¶
type SimilarityScripted struct {
// contains filtered or unexported fields
}
func NewSimilarityScripted ¶
func NewSimilarityScripted(name string) *SimilarityScripted
func (*SimilarityScripted) Name ¶
func (similarity *SimilarityScripted) Name() SimilarityName
func (*SimilarityScripted) SetScript ¶
func (similarity *SimilarityScripted) SetScript(script *SimilarityScript) *SimilarityScripted
func (*SimilarityScripted) SetWeightScript ¶
func (similarity *SimilarityScripted) SetWeightScript(script *SimilarityScript) *SimilarityScripted
func (*SimilarityScripted) Source ¶
func (similarity *SimilarityScripted) Source() (interface{}, error)
func (*SimilarityScripted) Type ¶
func (similarity *SimilarityScripted) Type() SimilarityType
type SimilarityType ¶
type SimilarityType string
type SlowLogLevel ¶
type SlowLogLevel string
const ( SlowLogWarn SlowLogLevel = "WARN" SlowLogTrace SlowLogLevel = "TRACE" SlowLogDebug SlowLogLevel = "DEBUG" SlowLogInfo SlowLogLevel = "INFO" )
type Slowlog ¶
type Slowlog struct {
// contains filtered or unexported fields
}
func NewSlowlog ¶
func NewSlowlog() *Slowlog
func (*Slowlog) SetLevel ¶
func (slowlog *Slowlog) SetLevel(level SlowLogLevel) *Slowlog
func (*Slowlog) SetReformat ¶
func (*Slowlog) SetThreshold ¶
type SoftDeletes ¶
type SoftDeletes struct {
// contains filtered or unexported fields
}
func NewSoftDeletes ¶
func NewSoftDeletes() *SoftDeletes
func (*SoftDeletes) SetEnabled ¶
func (softDeletes *SoftDeletes) SetEnabled(enabled bool) *SoftDeletes
func (*SoftDeletes) SetRetention ¶
func (softDeletes *SoftDeletes) SetRetention(retention *Retention) *SoftDeletes
func (*SoftDeletes) SetRetentionLease ¶
func (softDeletes *SoftDeletes) SetRetentionLease(retentionLease *RetentionLease) *SoftDeletes
func (*SoftDeletes) Source ¶
func (softDeletes *SoftDeletes) Source() (interface{}, error)
type StemmerOverrideRule ¶
type StemmerOverrideRule struct {
// contains filtered or unexported fields
}
func NewStemmerOverrideRule ¶
func NewStemmerOverrideRule(stemmer string, worlds ...string) *StemmerOverrideRule
func (*StemmerOverrideRule) String ¶
func (rule *StemmerOverrideRule) String() string
type SynonymSolr ¶
type SynonymSolr struct {
// contains filtered or unexported fields
}
func NewSynonymSolr ¶
func NewSynonymSolr() *SynonymSolr
func (*SynonymSolr) AddSynonyms ¶
func (synonym *SynonymSolr) AddSynonyms(synonyms ...string) *SynonymSolr
func (*SynonymSolr) AddWords ¶
func (synonym *SynonymSolr) AddWords(words ...string) *SynonymSolr
func (*SynonymSolr) Synonym ¶
func (synonym *SynonymSolr) Synonym() string
type SynonymWordnet ¶
type SynonymWordnet struct {
// contains filtered or unexported fields
}
func NewSynonymWordnet ¶
func NewSynonymWordnet() *SynonymWordnet
func (*SynonymWordnet) SetSenseNumber ¶
func (synonym *SynonymWordnet) SetSenseNumber(senseNumber uint32) *SynonymWordnet
func (*SynonymWordnet) SetSynsetId ¶
func (synonym *SynonymWordnet) SetSynsetId(synsetId uint32) *SynonymWordnet
func (*SynonymWordnet) SetSynsetType ¶
func (synonym *SynonymWordnet) SetSynsetType(synsetType string) *SynonymWordnet
func (*SynonymWordnet) SetTagCount ¶
func (synonym *SynonymWordnet) SetTagCount(tagCount uint32) *SynonymWordnet
func (*SynonymWordnet) SetWord ¶
func (synonym *SynonymWordnet) SetWord(word string) *SynonymWordnet
func (*SynonymWordnet) SetWordNumber ¶
func (synonym *SynonymWordnet) SetWordNumber(wordNumber uint32) *SynonymWordnet
func (*SynonymWordnet) Synonym ¶
func (synonym *SynonymWordnet) Synonym() string
type Threshold ¶
type Threshold struct {
// contains filtered or unexported fields
}
func NewThreshold ¶
func NewThreshold() *Threshold
type Tokenizer ¶
type Tokenizer interface { Type() TokenizerType Name() TokenizerName Source() (interface{}, error) }
type TokenizerCharGroup ¶
type TokenizerCharGroup struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-chargroup-tokenizer.html
func NewTokenizerCharGroup ¶
func NewTokenizerCharGroup(name string) *TokenizerCharGroup
func (*TokenizerCharGroup) AddTokenizeOnChars ¶
func (tokenizer *TokenizerCharGroup) AddTokenizeOnChars(tokenizeOnChars ...string) *TokenizerCharGroup
func (*TokenizerCharGroup) Name ¶
func (tokenizer *TokenizerCharGroup) Name() TokenizerName
func (*TokenizerCharGroup) SetMaxTokenLength ¶
func (tokenizer *TokenizerCharGroup) SetMaxTokenLength(maxTokenLength uint32) *TokenizerCharGroup
func (*TokenizerCharGroup) Source ¶
func (tokenizer *TokenizerCharGroup) Source() (interface{}, error)
func (*TokenizerCharGroup) Type ¶
func (tokenizer *TokenizerCharGroup) Type() TokenizerType
type TokenizerClassic ¶
type TokenizerClassic struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-classic-tokenizer.html
func NewTokenizerClassic ¶
func NewTokenizerClassic(name string) *TokenizerClassic
func (*TokenizerClassic) Name ¶
func (tokenizer *TokenizerClassic) Name() TokenizerName
func (*TokenizerClassic) SetMaxTokenLength ¶
func (tokenizer *TokenizerClassic) SetMaxTokenLength(maxTokenLength uint32) *TokenizerClassic
func (*TokenizerClassic) Source ¶
func (tokenizer *TokenizerClassic) Source() (interface{}, error)
func (*TokenizerClassic) Type ¶
func (tokenizer *TokenizerClassic) Type() TokenizerType
type TokenizerKeyword ¶
type TokenizerKeyword struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-keyword-tokenizer.html
func NewTokenizerKeyword ¶
func NewTokenizerKeyword(name string) *TokenizerKeyword
func (*TokenizerKeyword) Name ¶
func (tokenizer *TokenizerKeyword) Name() TokenizerName
func (*TokenizerKeyword) SetBufferSize ¶
func (tokenizer *TokenizerKeyword) SetBufferSize(bufferSize uint32) *TokenizerKeyword
func (*TokenizerKeyword) Source ¶
func (tokenizer *TokenizerKeyword) Source() (interface{}, error)
func (*TokenizerKeyword) Type ¶
func (tokenizer *TokenizerKeyword) Type() TokenizerType
type TokenizerName ¶
type TokenizerName string
type TokenizerNgram ¶
type TokenizerNgram struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-edgengram-tokenizer.html
func NewTokenizerEdgeNgram ¶
func NewTokenizerEdgeNgram(name string) *TokenizerNgram
func NewTokenizerNgram ¶
func NewTokenizerNgram(name string) *TokenizerNgram
func (*TokenizerNgram) AddCustomTokenChars ¶
func (tokenizer *TokenizerNgram) AddCustomTokenChars(customTokenChars ...string) *TokenizerNgram
func (*TokenizerNgram) AddTokenChars ¶
func (tokenizer *TokenizerNgram) AddTokenChars(tokenChars ...EdgeNgramTokenChars) *TokenizerNgram
func (*TokenizerNgram) Name ¶
func (tokenizer *TokenizerNgram) Name() TokenizerName
func (*TokenizerNgram) SetMaxGram ¶
func (tokenizer *TokenizerNgram) SetMaxGram(maxGram uint32) *TokenizerNgram
func (*TokenizerNgram) SetMinGram ¶
func (tokenizer *TokenizerNgram) SetMinGram(minGram uint32) *TokenizerNgram
func (*TokenizerNgram) Source ¶
func (tokenizer *TokenizerNgram) Source() (interface{}, error)
func (*TokenizerNgram) Type ¶
func (tokenizer *TokenizerNgram) Type() TokenizerType
type TokenizerPathHierarchy ¶
type TokenizerPathHierarchy struct {
// contains filtered or unexported fields
}
func NewTokenizerPathHierarchy ¶
func NewTokenizerPathHierarchy(name string) *TokenizerPathHierarchy
func (*TokenizerPathHierarchy) Name ¶
func (tokenizer *TokenizerPathHierarchy) Name() TokenizerName
func (*TokenizerPathHierarchy) SetBufferSize ¶
func (tokenizer *TokenizerPathHierarchy) SetBufferSize(bufferSize uint32) *TokenizerPathHierarchy
func (*TokenizerPathHierarchy) SetDelimiter ¶
func (tokenizer *TokenizerPathHierarchy) SetDelimiter(delimiter string) *TokenizerPathHierarchy
func (*TokenizerPathHierarchy) SetReplacement ¶
func (tokenizer *TokenizerPathHierarchy) SetReplacement(replacement string) *TokenizerPathHierarchy
func (*TokenizerPathHierarchy) SetReverse ¶
func (tokenizer *TokenizerPathHierarchy) SetReverse(reverse bool) *TokenizerPathHierarchy
func (*TokenizerPathHierarchy) SetSkip ¶
func (tokenizer *TokenizerPathHierarchy) SetSkip(skip uint32) *TokenizerPathHierarchy
func (*TokenizerPathHierarchy) Source ¶
func (tokenizer *TokenizerPathHierarchy) Source() (interface{}, error)
func (*TokenizerPathHierarchy) Type ¶
func (tokenizer *TokenizerPathHierarchy) Type() TokenizerType
type TokenizerPattern ¶
type TokenizerPattern struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-pattern-tokenizer.html
func NewTokenizerPattern ¶
func NewTokenizerPattern(name string) *TokenizerPattern
func (*TokenizerPattern) AddFlags ¶
func (tokenizer *TokenizerPattern) AddFlags(flags ...JavaRegularFlag) *TokenizerPattern
func (*TokenizerPattern) Name ¶
func (tokenizer *TokenizerPattern) Name() TokenizerName
func (*TokenizerPattern) SetGroup ¶
func (tokenizer *TokenizerPattern) SetGroup(group uint8) *TokenizerPattern
func (*TokenizerPattern) SetPattern ¶
func (tokenizer *TokenizerPattern) SetPattern(pattern string) *TokenizerPattern
func (*TokenizerPattern) Source ¶
func (tokenizer *TokenizerPattern) Source() (interface{}, error)
func (*TokenizerPattern) Type ¶
func (tokenizer *TokenizerPattern) Type() TokenizerType
type TokenizerSimplePattern ¶
type TokenizerSimplePattern struct {
// contains filtered or unexported fields
}
func NewTokenizerSimplePattern ¶
func NewTokenizerSimplePattern(name string) *TokenizerSimplePattern
func NewTokenizerSimplePatternSplit ¶
func NewTokenizerSimplePatternSplit(name string) *TokenizerSimplePattern
func (*TokenizerSimplePattern) Name ¶
func (tokenizer *TokenizerSimplePattern) Name() TokenizerName
func (*TokenizerSimplePattern) SetPattern ¶
func (tokenizer *TokenizerSimplePattern) SetPattern(pattern string) *TokenizerSimplePattern
func (*TokenizerSimplePattern) Source ¶
func (tokenizer *TokenizerSimplePattern) Source() (interface{}, error)
func (*TokenizerSimplePattern) Type ¶
func (tokenizer *TokenizerSimplePattern) Type() TokenizerType
type TokenizerStandard ¶
type TokenizerStandard struct {
// contains filtered or unexported fields
}
see https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-standard-tokenizer.html
func NewTokenizerStandard ¶
func NewTokenizerStandard(name string) *TokenizerStandard
func NewTokenizerUaxUrlEmail ¶
func NewTokenizerUaxUrlEmail(name string) *TokenizerStandard
func NewTokenizerWhitespace ¶
func NewTokenizerWhitespace(name string) *TokenizerStandard
func (*TokenizerStandard) Name ¶
func (tokenizer *TokenizerStandard) Name() TokenizerName
func (*TokenizerStandard) SetMaxTokenLength ¶
func (tokenizer *TokenizerStandard) SetMaxTokenLength(maxTokenLength uint16) *TokenizerStandard
func (*TokenizerStandard) Source ¶
func (tokenizer *TokenizerStandard) Source() (interface{}, error)
func (*TokenizerStandard) Type ¶
func (tokenizer *TokenizerStandard) Type() TokenizerType
type TokenizerType ¶
type TokenizerType string
type Translog ¶
type Translog struct {
// contains filtered or unexported fields
}
func NewTranslog ¶
func NewTranslog() *Translog
func (*Translog) SetDurability ¶
func (translog *Translog) SetDurability(durability TranslogDurability) *Translog
func (*Translog) SetFlushThresholdSize ¶
func (*Translog) SetSyncInterval ¶
type TranslogDurability ¶
type TranslogDurability string
const ( TranslogDurabilityRequest TranslogDurability = "request" TranslogDurabilityAsync TranslogDurability = "async" )
type Unassigned ¶
type Unassigned struct {
// contains filtered or unexported fields
}
func NewUnassigned ¶
func NewUnassigned(nodeLeft *NodeLeft) *Unassigned
func (*Unassigned) Source ¶
func (unassigned *Unassigned) Source() (interface{}, error)
type WordDelimiter ¶
type WordDelimiter string
type WordDelimiterType ¶
type WordDelimiterType struct {
// contains filtered or unexported fields
}
func NewWordDelimiterType ¶
func NewWordDelimiterType(word string, _type WordDelimiter) *WordDelimiterType
func (*WordDelimiterType) String ¶
func (wordDelimiter *WordDelimiterType) String() string
Source Files ¶
- allocation.go
- analysis.go
- analyze.go
- analyzer.go
- blocks.go
- char_filter.go
- filter.go
- highlight.go
- idle.go
- index.go
- interval.go
- limit.go
- log_level.go
- mapping.go
- merge.go
- nodeleft.go
- normalizer.go
- policy.go
- rebalance.go
- regexp.go
- retention.go
- retention_lease.go
- routing.go
- scheduler.go
- search.go
- settings.go
- shard.go
- similarity.go
- size.go
- slowlog.go
- soft_deletes.go
- threshold.go
- tokenizer.go
- translog.go
- unassigned.go
- write.go