Documentation ¶
Index ¶
- type AcknowledgeState
- type Acknowledgement
- type ActionStatus
- type ActivationState
- type ActivationStatus
- type AdaptiveSelection
- type AddAction
- type AdjacencyMatrixAggregate
- type AdjacencyMatrixAggregation
- type AdjacencyMatrixBucket
- type Aggregate
- type AggregateMetricDoubleProperty
- type AggregateOrder
- type AggregateOutput
- type Aggregation
- type AggregationBreakdown
- type AggregationProfile
- type AggregationProfileDebug
- type AggregationProfileDelegateDebugFilter
- type AggregationRange
- type Aggregations
- type Alias
- type AliasDefinition
- type AliasesRecord
- type AllField
- type AllocationDecision
- type AllocationRecord
- type AllocationStore
- type AlwaysCondition
- type AnalysisConfig
- type AnalysisConfigRead
- type AnalysisLimits
- type AnalysisMemoryLimit
- type Analytics
- type AnalyticsStatistics
- type AnalyzeDetail
- type AnalyzeToken
- type Analyzer
- type AnalyzerDetail
- type Anomaly
- type AnomalyCause
- type AnomalyDetectors
- type AnomalyExplanation
- type ApiKey
- type ApiKeyAuthorization
- type AppendProcessor
- type ApplicationGlobalUserPrivileges
- type ApplicationPrivileges
- type ApplicationPrivilegesCheck
- type ApplicationsPrivileges
- type Archive
- type ArrayCompareCondition
- type ArrayCompareOpParams
- type ArrayPercentilesItem
- type AsciiFoldingTokenFilter
- type AsyncSearch
- type AttachmentProcessor
- type Audit
- type AuthenticateToken
- type AuthenticatedUser
- type AuthenticationProvider
- type AutoDateHistogramAggregate
- type AutoDateHistogramAggregation
- type AutoFollowPattern
- type AutoFollowPatternSummary
- type AutoFollowStats
- type AutoFollowedCluster
- type AutoscalingCapacity
- type AutoscalingDecider
- type AutoscalingDeciders
- type AutoscalingNode
- type AutoscalingPolicy
- type AutoscalingResources
- type AverageAggregation
- type AverageBucketAggregation
- type AvgAggregate
- type Base
- type BaseNode
- type BinaryProperty
- type BoolQuery
- type BooleanProperty
- type BoostingQuery
- type BoxPlotAggregate
- type BoxplotAggregation
- type Breaker
- type BucketCorrelationAggregation
- type BucketCorrelationFunction
- type BucketCorrelationFunctionCountCorrelation
- type BucketCorrelationFunctionCountCorrelationIndicator
- type BucketInfluencer
- type BucketKsAggregation
- type BucketMetricValueAggregate
- type BucketPathAggregation
- type BucketScriptAggregation
- type BucketSelectorAggregation
- type BucketSortAggregation
- type BucketSummary
- type BucketsAdjacencyMatrixBucket
- type BucketsCompositeBucket
- type BucketsDateHistogramBucket
- type BucketsDoubleTermsBucket
- type BucketsFiltersBucket
- type BucketsGeoHashGridBucket
- type BucketsGeoHexGridBucket
- type BucketsGeoTileGridBucket
- type BucketsHistogramBucket
- type BucketsIpPrefixBucket
- type BucketsIpRangeBucket
- type BucketsLongRareTermsBucket
- type BucketsLongTermsBucket
- type BucketsMultiTermsBucket
- type BucketsPath
- type BucketsQuery
- type BucketsRangeBucket
- type BucketsSignificantLongTermsBucket
- type BucketsSignificantStringTermsBucket
- type BucketsStringRareTermsBucket
- type BucketsStringTermsBucket
- type BucketsVariableWidthHistogramBucket
- type BucketsVoid
- type BuildInformation
- type BulkIndexByScrollFailure
- type BulkStats
- type ByteNumberProperty
- type ByteSize
- type BytesProcessor
- type CacheQueries
- type CacheStats
- type Calendar
- type CalendarEvent
- type CardinalityAggregate
- type CardinalityAggregation
- type CatAnonalyDetectorColumns
- type CatComponentTemplate
- type CatDatafeedColumns
- type CatDfaColumns
- type CatTrainedModelsColumns
- type CatTransformColumns
- type CategorizationAnalyzer
- type CategorizationAnalyzerDefinition
- type CategorizeTextAggregation
- type CategorizeTextAnalyzer
- type Category
- type Ccr
- type CcrShardStats
- type CertificateInformation
- type Cgroup
- type CgroupCpu
- type CgroupCpuStat
- type CgroupMemory
- type ChainInput
- type CharFilter
- type CharFilterDefinition
- type CharFilterDetail
- type CharFilterTypes
- type CharGroupTokenizer
- type CheckpointStats
- type Checkpointing
- type ChiSquareHeuristic
- type ChildrenAggregate
- type ChildrenAggregation
- type ChunkingConfig
- type CircleProcessor
- type ClassificationInferenceOptions
- type CleanupRepositoryResults
- type Client
- type CloseIndexResult
- type CloseShardResult
- type ClusterAppliedStats
- type ClusterComponentTemplate
- type ClusterFileSystem
- type ClusterIndexingPressure
- type ClusterIndices
- type ClusterIndicesShards
- type ClusterIndicesShardsIndex
- type ClusterInfo
- type ClusterIngest
- type ClusterJvm
- type ClusterJvmMemory
- type ClusterJvmVersion
- type ClusterNetworkTypes
- type ClusterNode
- type ClusterNodeCount
- type ClusterNodes
- type ClusterOperatingSystem
- type ClusterOperatingSystemArchitecture
- type ClusterOperatingSystemName
- type ClusterOperatingSystemPrettyName
- type ClusterPressureMemory
- type ClusterProcess
- type ClusterProcessCpu
- type ClusterProcessOpenFileDescriptors
- type ClusterProcessor
- type ClusterRemoteInfo
- type ClusterRemoteProxyInfo
- type ClusterRemoteSniffInfo
- type ClusterRuntimeFieldTypes
- type ClusterShardMetrics
- type ClusterStateQueue
- type ClusterStateUpdate
- type ClusterStatistics
- type Collector
- type Column
- type CombinedFieldsQuery
- type Command
- type CommandAllocatePrimaryAction
- type CommandAllocateReplicaAction
- type CommandCancelAction
- type CommandMoveAction
- type CommonGramsTokenFilter
- type CommonTermsQuery
- type CompactNodeInfo
- type CompletionContext
- type CompletionProperty
- type CompletionStats
- type CompletionSuggest
- type CompletionSuggestOption
- type CompletionSuggester
- type ComponentTemplateNode
- type ComponentTemplateSummary
- type CompositeAggregate
- type CompositeAggregateKey
- type CompositeAggregation
- type CompositeAggregationSource
- type CompositeBucket
- type ConditionTokenFilter
- type Configuration
- type Configurations
- type ConfusionMatrixItem
- type ConfusionMatrixPrediction
- type ConfusionMatrixThreshold
- type Connection
- type ConstantKeywordProperty
- type ConstantScoreQuery
- type Context
- type ContextMethod
- type ContextMethodParam
- type ConvertProcessor
- type CoordinatorStats
- type CoordsGeoBounds
- type CoreKnnQuery
- type CountRecord
- type Counter
- type Cpu
- type CpuAcct
- type CreatedStatus
- type CsvProcessor
- type CumulativeCardinalityAggregate
- type CumulativeCardinalityAggregation
- type CumulativeSumAggregation
- type CurrentNode
- type CustomAnalyzer
- type CustomCategorizeTextAnalyzer
- type CustomNormalizer
- type CustomSettings
- type DailySchedule
- type DanglingIndex
- type DataCounts
- type DataDescription
- type DataEmailAttachment
- type DataFrameAnalyticsRecord
- type DataPathStats
- type DataStream
- type DataStreamIndex
- type DataStreamNames
- type DataStreamTimestamp
- type DataStreamTimestampField
- type DataStreamVisibility
- type DataStreams
- type DataStreamsStatsItem
- type DataTierPhaseStatistics
- type DataTiers
- type DatafeedAuthorization
- type DatafeedConfig
- type DatafeedRunningState
- type DatafeedStats
- type DatafeedTimingStats
- type Datafeeds
- type DatafeedsRecord
- type DataframeAnalysis
- type DataframeAnalysisAnalyzedFields
- type DataframeAnalysisClassification
- type DataframeAnalysisContainer
- type DataframeAnalysisFeatureProcessor
- type DataframeAnalysisFeatureProcessorFrequencyEncoding
- type DataframeAnalysisFeatureProcessorMultiEncoding
- type DataframeAnalysisFeatureProcessorNGramEncoding
- type DataframeAnalysisFeatureProcessorOneHotEncoding
- type DataframeAnalysisFeatureProcessorTargetMeanEncoding
- type DataframeAnalysisOutlierDetection
- type DataframeAnalysisRegression
- type DataframeAnalytics
- type DataframeAnalyticsAuthorization
- type DataframeAnalyticsDestination
- type DataframeAnalyticsFieldSelection
- type DataframeAnalyticsMemoryEstimation
- type DataframeAnalyticsSource
- type DataframeAnalyticsStatsContainer
- type DataframeAnalyticsStatsDataCounts
- type DataframeAnalyticsStatsHyperparameters
- type DataframeAnalyticsStatsMemoryUsage
- type DataframeAnalyticsStatsOutlierDetection
- type DataframeAnalyticsStatsProgress
- type DataframeAnalyticsSummary
- type DataframeClassificationSummary
- type DataframeClassificationSummaryAccuracy
- type DataframeClassificationSummaryMulticlassConfusionMatrix
- type DataframeClassificationSummaryPrecision
- type DataframeClassificationSummaryRecall
- type DataframeEvaluationClass
- type DataframeEvaluationClassification
- type DataframeEvaluationClassificationMetrics
- type DataframeEvaluationClassificationMetricsAucRoc
- type DataframeEvaluationContainer
- type DataframeEvaluationMetrics
- type DataframeEvaluationOutlierDetection
- type DataframeEvaluationOutlierDetectionMetrics
- type DataframeEvaluationRegression
- type DataframeEvaluationRegressionMetrics
- type DataframeEvaluationRegressionMetricsHuber
- type DataframeEvaluationRegressionMetricsMsle
- type DataframeEvaluationSummaryAucRoc
- type DataframeEvaluationSummaryAucRocCurveItem
- type DataframeEvaluationValue
- type DataframeOutlierDetectionSummary
- type DataframePreviewConfig
- type DataframeRegressionSummary
- type DateDecayFunction
- type DateDistanceFeatureQuery
- type DateHistogramAggregate
- type DateHistogramAggregation
- type DateHistogramBucket
- type DateHistogramGrouping
- type DateIndexNameProcessor
- type DateNanosProperty
- type DateProcessor
- type DateProperty
- type DateRangeAggregate
- type DateRangeAggregation
- type DateRangeExpression
- type DateRangeProperty
- type DateRangeQuery
- type DateTime
- type DecayFunction
- type DecayPlacementDateMathDuration
- type DecayPlacementGeoLocationDistance
- type DecayPlacementdoubledouble
- type Defaults
- type Definition
- type DelayedDataCheckConfig
- type DelimitedPayloadTokenFilter
- type DenseVectorIndexOptions
- type DenseVectorProperty
- type Deprecation
- type DeprecationIndexing
- type DerivativeAggregate
- type DerivativeAggregation
- type DetectionRule
- type Detector
- type DetectorRead
- type DictionaryDecompounderTokenFilter
- type DirectGenerator
- type DisMaxQuery
- type Discovery
- type DiscoveryNode
- type DiskUsage
- type DissectProcessor
- type DistanceFeatureQuery
- type DistanceFeatureQueryBaseDateMathDuration
- type DistanceFeatureQueryBaseGeoLocationDistance
- type DiversifiedSamplerAggregation
- type DocStats
- type Document
- type DocumentRating
- type DocumentSimulation
- type DotExpanderProcessor
- type DoubleNumberProperty
- type DoubleRangeProperty
- type DoubleTermsAggregate
- type DoubleTermsBucket
- type DownsampleConfig
- type DropProcessor
- type Duration
- type DurationValueUnitFloatMillis
- type DurationValueUnitMillis
- type DurationValueUnitNanos
- type DurationValueUnitSeconds
- type DutchAnalyzer
- type DynamicProperty
- type DynamicTemplate
- type EdgeNGramTokenFilter
- type EdgeNGramTokenizer
- type ElasticsearchError
- type ElasticsearchVersionInfo
- type ElisionTokenFilter
- type Email
- type EmailAction
- type EmailAttachmentContainer
- type EmailBody
- type EmailResult
- type EmptyObject
- type EnrichPolicy
- type EnrichProcessor
- type Ensemble
- type EpochTimeUnitMillis
- type EpochTimeUnitSeconds
- type Eql
- type EqlFeatures
- type EqlFeaturesJoin
- type EqlFeaturesKeys
- type EqlFeaturesPipes
- type EqlFeaturesSequences
- type EqlHits
- type ErrorCause
- type EwmaModelSettings
- type EwmaMovingAverageAggregation
- type ExecuteEnrichPolicyStatus
- type ExecutingPolicy
- type ExecutionResult
- type ExecutionResultAction
- type ExecutionResultCondition
- type ExecutionResultInput
- type ExecutionState
- type ExecutionThreadPool
- type ExistsQuery
- type ExpandWildcards
- type ExplainAnalyzeToken
- type Explanation
- type ExplanationDetail
- type ExploreControls
- type ExtendedBoundsFieldDateMath
- type ExtendedBoundsdouble
- type ExtendedMemoryStats
- type ExtendedStatsAggregate
- type ExtendedStatsAggregation
- type ExtendedStatsBucketAggregate
- type ExtendedStatsBucketAggregation
- type FailProcessor
- type Feature
- type FeatureToggle
- type Features
- type FetchProfile
- type FetchProfileBreakdown
- type FetchProfileDebug
- type FieldAliasProperty
- type FieldAndFormat
- type FieldCapability
- type FieldCollapse
- type FieldDateMath
- type FieldLookup
- type FieldMapping
- type FieldMemoryUsage
- type FieldMetric
- type FieldNamesField
- type FieldRule
- type FieldSecurity
- type FieldSizeUsage
- type FieldSort
- type FieldStatistics
- type FieldSuggester
- type FieldSummary
- type FieldTypes
- type FieldTypesMappings
- type FieldValue
- type FieldValueFactorScoreFunction
- type FielddataFrequencyFilter
- type FielddataRecord
- type FielddataStats
- type Fields
- type FieldsUsageBody
- type FileCountSnapshotStats
- type FileDetails
- type FileSystem
- type FileSystemTotal
- type FillMaskInferenceOptions
- type FillMaskInferenceUpdateOptions
- type FilterAggregate
- type FilterRef
- type FiltersAggregate
- type FiltersAggregation
- type FiltersBucket
- type FingerprintAnalyzer
- type FingerprintTokenFilter
- type Flattened
- type FlattenedProperty
- type Float64
- type FloatNumberProperty
- type FloatRangeProperty
- type FlushStats
- type FollowIndexStats
- type FollowStats
- type FollowerIndex
- type FollowerIndexParameters
- type ForceMergeConfiguration
- type ForceMergeResponseBody
- type ForeachProcessor
- type FormattableMetricAggregation
- type FoundStatus
- type FrequencyEncodingPreprocessor
- type FrozenIndices
- type FunctionScore
- type FunctionScoreQuery
- type Fuzziness
- type FuzzyQuery
- type GarbageCollector
- type GarbageCollectorTotal
- type GeoBoundingBoxQuery
- type GeoBounds
- type GeoBoundsAggregate
- type GeoBoundsAggregation
- type GeoCentroidAggregate
- type GeoCentroidAggregation
- type GeoDecayFunction
- type GeoDistanceAggregate
- type GeoDistanceAggregation
- type GeoDistanceFeatureQuery
- type GeoDistanceQuery
- type GeoDistanceSort
- type GeoHashGridAggregate
- type GeoHashGridAggregation
- type GeoHashGridBucket
- type GeoHashLocation
- type GeoHashPrecision
- type GeoHexGridAggregate
- type GeoHexGridBucket
- type GeoIpDownloadStatistics
- type GeoIpNodeDatabaseName
- type GeoIpNodeDatabases
- type GeoIpProcessor
- type GeoLine
- type GeoLineAggregate
- type GeoLineAggregation
- type GeoLinePoint
- type GeoLineSort
- type GeoLocation
- type GeoPointProperty
- type GeoPolygonPoints
- type GeoPolygonQuery
- type GeoResults
- type GeoShape
- type GeoShapeFieldQuery
- type GeoShapeProperty
- type GeoShapeQuery
- type GeoTileGridAggregate
- type GeoTileGridAggregation
- type GeoTileGridBucket
- type GeohexGridAggregation
- type GetMigrationFeature
- type GetResult
- type GetScriptContext
- type GetStats
- type GetUserProfileErrors
- type GlobalAggregate
- type GlobalAggregation
- type GlobalPrivilege
- type GoogleNormalizedDistanceHeuristic
- type GrantApiKey
- type GrokProcessor
- type Groupings
- type GsubProcessor
- type HalfFloatNumberProperty
- type HasChildQuery
- type HasParentQuery
- type HasPrivilegesUserProfileErrors
- type HdrMethod
- type HdrPercentileRanksAggregate
- type HdrPercentilesAggregate
- type HealthRecord
- type HealthStatistics
- type HelpRecord
- type Highlight
- type HighlightField
- type Hint
- type HistogramAggregate
- type HistogramAggregation
- type HistogramBucket
- type HistogramGrouping
- type HistogramProperty
- type Hit
- type HitsEvent
- type HitsMetadata
- type HitsSequence
- type HoltLinearModelSettings
- type HoltMovingAverageAggregation
- type HoltWintersModelSettings
- type HoltWintersMovingAverageAggregation
- type Hop
- type HotThread
- type HourAndMinute
- type HourlySchedule
- type HtmlStripCharFilter
- type Http
- type HttpEmailAttachment
- type HttpHeaders
- type HttpInput
- type HttpInputAuthentication
- type HttpInputBasicAuthentication
- type HttpInputProxy
- type HttpInputRequestDefinition
- type HttpInputRequestResult
- type HttpInputResponseResult
- type HunspellTokenFilter
- type Hyperparameter
- type Hyperparameters
- type HyphenationDecompounderTokenFilter
- type IcuAnalyzer
- type IcuCollationTokenFilter
- type IcuFoldingTokenFilter
- type IcuNormalizationCharFilter
- type IcuNormalizationTokenFilter
- type IcuTokenizer
- type IcuTransformTokenFilter
- type Ids
- type IdsQuery
- type Ilm
- type IlmActions
- type IlmPolicy
- type IlmPolicyStatistics
- type InProgress
- type IndexAction
- type IndexAliases
- type IndexAndDataStreamAction
- type IndexCapabilities
- type IndexDetails
- type IndexField
- type IndexHealthStats
- type IndexMappingRecord
- type IndexPatterns
- type IndexPrivilegesCheck
- type IndexResult
- type IndexResultSummary
- type IndexRouting
- type IndexRoutingAllocation
- type IndexRoutingAllocationDisk
- type IndexRoutingAllocationInclude
- type IndexRoutingAllocationInitialRecovery
- type IndexRoutingRebalance
- type IndexSegment
- type IndexSegmentSort
- type IndexSettingBlocks
- type IndexSettings
- type IndexSettingsAnalysis
- type IndexSettingsLifecycle
- type IndexSettingsLifecycleStep
- type IndexSettingsTimeSeries
- type IndexState
- type IndexStats
- type IndexTemplate
- type IndexTemplateDataStreamConfiguration
- type IndexTemplateItem
- type IndexTemplateMapping
- type IndexTemplateSummary
- type IndexVersioning
- type IndexingPressureMemorySummary
- type IndexingStats
- type Indices
- type IndicesAction
- type IndicesBlockStatus
- type IndicesIndexingPressure
- type IndicesIndexingPressureMemory
- type IndicesModifyAction
- type IndicesOptions
- type IndicesPrivileges
- type IndicesPrivilegesQuery
- type IndicesRecord
- type IndicesShardStats
- type IndicesShardStores
- type IndicesShardsStats
- type IndicesStats
- type IndicesValidationExplanation
- type IndicesVersions
- type InferenceAggregate
- type InferenceAggregation
- type InferenceClassImportance
- type InferenceConfig
- type InferenceConfigClassification
- type InferenceConfigContainer
- type InferenceConfigCreateContainer
- type InferenceConfigRegression
- type InferenceConfigUpdateContainer
- type InferenceFeatureImportance
- type InferenceProcessor
- type InferenceResponseResult
- type InferenceTopClassEntry
- type Influence
- type Influencer
- type InfoFeatureState
- type IngestPipeline
- type IngestTotal
- type InlineGet
- type InlineGetDictUserDefined
- type InlineScript
- type InnerHits
- type InnerHitsResult
- type Input
- type IntegerNumberProperty
- type IntegerRangeProperty
- type Intervals
- type IntervalsAllOf
- type IntervalsAnyOf
- type IntervalsFilter
- type IntervalsFuzzy
- type IntervalsMatch
- type IntervalsPrefix
- type IntervalsQuery
- type IntervalsWildcard
- type InvertedIndex
- type Invocation
- type Invocations
- type IoStatDevice
- type IoStats
- type IpFilter
- type IpPrefixAggregate
- type IpPrefixAggregation
- type IpPrefixBucket
- type IpProperty
- type IpRangeAggregate
- type IpRangeAggregation
- type IpRangeAggregationRange
- type IpRangeBucket
- type IpRangeProperty
- type Job
- type JobBlocked
- type JobConfig
- type JobForecastStatistics
- type JobStatistics
- type JobStats
- type JobTimingStats
- type JobUsage
- type JobsRecord
- type JoinProcessor
- type JoinProperty
- type JsonProcessor
- type Jvm
- type JvmClasses
- type JvmMemoryStats
- type JvmStats
- type JvmThreads
- type KStemTokenFilter
- type KeepTypesTokenFilter
- type KeepWordsTokenFilter
- type KeyValueProcessor
- type KeyedPercentiles
- type KeyedProcessor
- type KeywordAnalyzer
- type KeywordMarkerTokenFilter
- type KeywordProperty
- type KeywordTokenizer
- type KibanaToken
- type KnnQuery
- type KuromojiAnalyzer
- type KuromojiIterationMarkCharFilter
- type KuromojiPartOfSpeechTokenFilter
- type KuromojiReadingFormTokenFilter
- type KuromojiStemmerTokenFilter
- type KuromojiTokenizer
- type LanguageAnalyzer
- type LanguageContext
- type LaplaceSmoothingModel
- type LatLonGeoLocation
- type Latest
- type LengthTokenFilter
- type LetterTokenizer
- type License
- type LicenseInformation
- type Lifecycle
- type LifecycleExplain
- type LifecycleExplainManaged
- type LifecycleExplainPhaseExecution
- type LifecycleExplainUnmanaged
- type Like
- type LikeDocument
- type LimitTokenCountTokenFilter
- type Limits
- type LinearInterpolationSmoothingModel
- type LinearMovingAverageAggregation
- type LoggingAction
- type LoggingResult
- type LogstashPipeline
- type LongNumberProperty
- type LongRangeProperty
- type LongRareTermsAggregate
- type LongRareTermsBucket
- type LongTermsAggregate
- type LongTermsBucket
- type LowercaseNormalizer
- type LowercaseProcessor
- type LowercaseTokenFilter
- type LowercaseTokenizer
- type MLDatafeed
- type MLFilter
- type MTermVectorsOperation
- type MachineLearning
- type ManageUserPrivileges
- type MapboxVectorTiles
- type MappingCharFilter
- type MappingLimitSettings
- type MappingLimitSettingsDepth
- type MappingLimitSettingsDimensionFields
- type MappingLimitSettingsFieldNameLength
- type MappingLimitSettingsNestedFields
- type MappingLimitSettingsNestedObjects
- type MappingLimitSettingsTotalFields
- type MappingStats
- type MasterRecord
- type MatchAllQuery
- type MatchBoolPrefixQuery
- type MatchNoneQuery
- type MatchOnlyTextProperty
- type MatchPhrasePrefixQuery
- type MatchPhraseQuery
- type MatchQuery
- type MatrixAggregation
- type MatrixStatsAggregate
- type MatrixStatsAggregation
- type MatrixStatsFields
- type MaxAggregate
- type MaxAggregation
- type MaxBucketAggregation
- type MedianAbsoluteDeviationAggregate
- type MedianAbsoluteDeviationAggregation
- type MemMlStats
- type MemStats
- type Memory
- type MemoryStats
- type Merge
- type MergeScheduler
- type MergesStats
- type Metadata
- type Metrics
- type MgetOperation
- type MigrationFeatureIndexInfo
- type MinAggregate
- type MinAggregation
- type MinBucketAggregation
- type MinimalLicenseInformation
- type MinimumShouldMatch
- type Missing
- type MissingAggregate
- type MissingAggregation
- type MlCounter
- type MlDataFrameAnalyticsJobs
- type MlDataFrameAnalyticsJobsAnalysis
- type MlDataFrameAnalyticsJobsCount
- type MlDataFrameAnalyticsJobsMemory
- type MlInference
- type MlInferenceDeployments
- type MlInferenceDeploymentsTimeMs
- type MlInferenceIngestProcessor
- type MlInferenceIngestProcessorCount
- type MlInferenceTrainedModels
- type MlInferenceTrainedModelsCount
- type MlJobForecasts
- type ModelPlotConfig
- type ModelSizeStats
- type ModelSnapshot
- type ModelSnapshotUpgrade
- type Monitoring
- type MoreLikeThisQuery
- type MountedSnapshot
- type MovingAverageAggregation
- type MovingFunctionAggregation
- type MovingPercentilesAggregation
- type MultiBucketAggregateBaseAdjacencyMatrixBucket
- type MultiBucketAggregateBaseCompositeBucket
- type MultiBucketAggregateBaseDateHistogramBucket
- type MultiBucketAggregateBaseDoubleTermsBucket
- type MultiBucketAggregateBaseFiltersBucket
- type MultiBucketAggregateBaseGeoHashGridBucket
- type MultiBucketAggregateBaseGeoHexGridBucket
- type MultiBucketAggregateBaseGeoTileGridBucket
- type MultiBucketAggregateBaseHistogramBucket
- type MultiBucketAggregateBaseIpPrefixBucket
- type MultiBucketAggregateBaseIpRangeBucket
- type MultiBucketAggregateBaseLongRareTermsBucket
- type MultiBucketAggregateBaseLongTermsBucket
- type MultiBucketAggregateBaseMultiTermsBucket
- type MultiBucketAggregateBaseRangeBucket
- type MultiBucketAggregateBaseSignificantLongTermsBucket
- type MultiBucketAggregateBaseSignificantStringTermsBucket
- type MultiBucketAggregateBaseStringRareTermsBucket
- type MultiBucketAggregateBaseStringTermsBucket
- type MultiBucketAggregateBaseVariableWidthHistogramBucket
- type MultiBucketAggregateBaseVoid
- type MultiGetError
- type MultiMatchQuery
- type MultiTermLookup
- type MultiTermsAggregate
- type MultiTermsAggregation
- type MultiTermsBucket
- type MultiplexerTokenFilter
- type Murmur3HashProperty
- type MutualInformationHeuristic
- type NGramTokenFilter
- type NGramTokenizer
- type Names
- type NativeCode
- type NativeCodeInformation
- type NerInferenceOptions
- type NerInferenceUpdateOptions
- type NestedAggregate
- type NestedAggregation
- type NestedIdentity
- type NestedProperty
- type NestedQuery
- type NestedSortValue
- type NeverCondition
- type NlpBertTokenizationConfig
- type NlpRobertaTokenizationConfig
- type NlpTokenizationUpdateOptions
- type Node
- type NodeAllocationExplanation
- type NodeAttributes
- type NodeAttributesRecord
- type NodeBufferPool
- type NodeDiskUsage
- type NodeIds
- type NodeInfo
- type NodeInfoAction
- type NodeInfoAggregation
- type NodeInfoBootstrap
- type NodeInfoClient
- type NodeInfoDiscover
- type NodeInfoHttp
- type NodeInfoIngest
- type NodeInfoIngestDownloader
- type NodeInfoIngestInfo
- type NodeInfoIngestProcessor
- type NodeInfoJvmMemory
- type NodeInfoMemory
- type NodeInfoNetwork
- type NodeInfoNetworkInterface
- type NodeInfoOSCPU
- type NodeInfoPath
- type NodeInfoRepositories
- type NodeInfoRepositoriesUrl
- type NodeInfoScript
- type NodeInfoSearch
- type NodeInfoSearchRemote
- type NodeInfoSettings
- type NodeInfoSettingsCluster
- type NodeInfoSettingsClusterElection
- type NodeInfoSettingsHttp
- type NodeInfoSettingsHttpType
- type NodeInfoSettingsIngest
- type NodeInfoSettingsNetwork
- type NodeInfoSettingsNode
- type NodeInfoSettingsTransport
- type NodeInfoSettingsTransportFeatures
- type NodeInfoSettingsTransportType
- type NodeInfoTransport
- type NodeInfoXpack
- type NodeInfoXpackLicense
- type NodeInfoXpackLicenseType
- type NodeInfoXpackSecurity
- type NodeInfoXpackSecurityAuthc
- type NodeInfoXpackSecurityAuthcRealms
- type NodeInfoXpackSecurityAuthcRealmsStatus
- type NodeInfoXpackSecurityAuthcToken
- type NodeInfoXpackSecuritySsl
- type NodeJvmInfo
- type NodeOperatingSystemInfo
- type NodePackagingType
- type NodeProcessInfo
- type NodeReloadError
- type NodeReloadResult
- type NodeRoles
- type NodeShard
- type NodeShutdownStatus
- type NodeStatistics
- type NodeTasks
- type NodeThreadPoolInfo
- type NodeUsage
- type NodesContext
- type NodesCredentials
- type NodesCredentialsFileToken
- type NodesIndexingPressure
- type NodesIndexingPressureMemory
- type NodesIngest
- type NodesRecord
- type NoriAnalyzer
- type NoriPartOfSpeechTokenFilter
- type NoriTokenizer
- type NormalizeAggregation
- type Normalizer
- type NumberRangeQuery
- type NumericDecayFunction
- type NumericFielddata
- type ObjectProperty
- type OneHotEncodingPreprocessor
- type OperatingSystem
- type OperatingSystemMemoryInfo
- type OutlierDetectionParameters
- type OverallBucket
- type OverallBucketJob
- type Overlapping
- type Page
- type PagerDutyAction
- type PagerDutyContext
- type PagerDutyEvent
- type PagerDutyEventProxy
- type PagerDutyResult
- type PainlessContextSetup
- type ParentAggregate
- type ParentAggregation
- type ParentIdQuery
- type ParentTaskInfo
- type PassThroughInferenceOptions
- type PassThroughInferenceUpdateOptions
- type PathHierarchyTokenizer
- type PatternAnalyzer
- type PatternCaptureTokenFilter
- type PatternReplaceCharFilter
- type PatternReplaceTokenFilter
- type PatternTokenizer
- type PendingTask
- type PendingTasksRecord
- type PerPartitionCategorization
- type Percentage
- type PercentageScoreHeuristic
- type PercentileRanksAggregation
- type Percentiles
- type PercentilesAggregation
- type PercentilesBucketAggregate
- type PercentilesBucketAggregation
- type PercolateQuery
- type PercolatorProperty
- type PersistentTaskStatus
- type Phase
- type Phases
- type PhoneticTokenFilter
- type PhraseSuggest
- type PhraseSuggestCollate
- type PhraseSuggestCollateQuery
- type PhraseSuggestHighlight
- type PhraseSuggestOption
- type PhraseSuggester
- type PinnedDoc
- type PinnedQuery
- type PipelineConfig
- type PipelineMetadata
- type PipelineProcessor
- type PipelineSettings
- type PipelineSimulation
- type Pivot
- type PivotGroupByContainer
- type PluginStats
- type PluginsRecord
- type PluginsStatus
- type PointInTimeReference
- type PointProperty
- type Pool
- type PorterStemTokenFilter
- type PostMigrationFeature
- type PredicateTokenFilter
- type PredictedValue
- type PrefixQuery
- type Preprocessor
- type PressureMemory
- type Privileges
- type PrivilegesActions
- type PrivilegesCheck
- type Process
- type Processor
- type ProcessorContainer
- type Profile
- type Property
- type PublishedClusterStates
- type Queries
- type Query
- type QueryBreakdown
- type QueryCacheStats
- type QueryProfile
- type QueryStringQuery
- type QueryVector
- type QueryVectorBuilder
- type QueryWatch
- type QuestionAnsweringInferenceOptions
- type QuestionAnsweringInferenceUpdateOptions
- type RandomScoreFunction
- type RangeAggregate
- type RangeAggregation
- type RangeBucket
- type RangeQuery
- type RankEvalHit
- type RankEvalHitItem
- type RankEvalMetric
- type RankEvalMetricDetail
- type RankEvalMetricDiscountedCumulativeGain
- type RankEvalMetricExpectedReciprocalRank
- type RankEvalMetricMeanReciprocalRank
- type RankEvalMetricPrecision
- type RankEvalMetricRatingTreshold
- type RankEvalMetricRecall
- type RankEvalQuery
- type RankEvalRequestItem
- type RankFeatureFunction
- type RankFeatureFunctionLinear
- type RankFeatureFunctionLogarithm
- type RankFeatureFunctionSaturation
- type RankFeatureFunctionSigmoid
- type RankFeatureProperty
- type RankFeatureQuery
- type RankFeaturesProperty
- type RareTermsAggregation
- type RateAggregate
- type RateAggregation
- type ReadException
- type RealmCache
- type RealmInfo
- type Recording
- type RecoveryBytes
- type RecoveryFiles
- type RecoveryIndexStatus
- type RecoveryOrigin
- type RecoveryRecord
- type RecoveryStartStatus
- type RecoveryStats
- type RecoveryStatus
- type RefreshStats
- type RegexpQuery
- type RegressionInferenceOptions
- type ReindexDestination
- type ReindexNode
- type ReindexSource
- type ReindexStatus
- type ReindexTask
- type ReloadDetails
- type RelocationFailureInfo
- type RemoteSource
- type RemoveAction
- type RemoveDuplicatesTokenFilter
- type RemoveIndexAction
- type RemoveProcessor
- type RenameProcessor
- type ReportingEmailAttachment
- type RepositoriesRecord
- type Repository
- type RepositoryLocation
- type RepositoryMeteringInformation
- type RepositorySettings
- type RequestCacheStats
- type RequestCounts
- type RerouteDecision
- type RerouteExplanation
- type RerouteParameters
- type Rescore
- type RescoreQuery
- type ReservedSize
- type ResolveIndexAliasItem
- type ResolveIndexDataStreamsItem
- type ResolveIndexItem
- type ResourcePrivileges
- type ResponseBody
- type ResponseItem
- type Retention
- type RetentionLease
- type RetentionPolicy
- type RetentionPolicyContainer
- type Retries
- type ReverseNestedAggregate
- type ReverseNestedAggregation
- type ReverseTokenFilter
- type Role
- type RoleDescriptor
- type RoleDescriptorRead
- type RoleDescriptorWrapper
- type RoleMappingRule
- type RoleTemplate
- type RoleTemplateInlineQuery
- type RoleTemplateInlineScript
- type RoleTemplateQuery
- type RoleTemplateScript
- type RolloverConditions
- type RollupCapabilities
- type RollupCapabilitySummary
- type RollupFieldSummary
- type RollupJob
- type RollupJobConfiguration
- type RollupJobStats
- type RollupJobStatus
- type RollupJobSummary
- type RollupJobSummaryField
- type RoutingField
- type Row
- type RuleCondition
- type RunningStateSearchInterval
- type RuntimeField
- type RuntimeFieldFetchFields
- type RuntimeFields
- type RuntimeFieldsType
- type SLMPolicy
- type SampleDiversity
- type SamplerAggregate
- type SamplerAggregation
- type ScaledFloatNumberProperty
- type ScheduleContainer
- type ScheduleTimeOfDay
- type ScheduleTriggerEvent
- type ScoreSort
- type Script
- type ScriptCache
- type ScriptCondition
- type ScriptField
- type ScriptQuery
- type ScriptScoreFunction
- type ScriptScoreQuery
- type ScriptSort
- type ScriptTransform
- type ScriptedHeuristic
- type ScriptedMetricAggregate
- type ScriptedMetricAggregation
- type Scripting
- type ScrollIds
- type SearchAsYouTypeProperty
- type SearchIdle
- type SearchInput
- type SearchInputRequestBody
- type SearchInputRequestDefinition
- type SearchProfile
- type SearchStats
- type SearchTemplateRequestBody
- type SearchTransform
- type SearchableSnapshots
- type Security
- type SecurityRealm
- type SecurityRoleMapping
- type SecurityRoles
- type SecurityRolesDls
- type SecurityRolesDlsBitSetCache
- type SecurityRolesFile
- type SecurityRolesNative
- type Segment
- type SegmentsRecord
- type SegmentsStats
- type SerialDifferencingAggregation
- type SerializedClusterState
- type SerializedClusterStateDetail
- type ServiceToken
- type SetProcessor
- type SetSecurityUserProcessor
- type Settings
- type SettingsAnalyze
- type SettingsHighlight
- type SettingsQueryString
- type SettingsSearch
- type SettingsSimilarity
- type SettingsSimilarityBm25
- type SettingsSimilarityDfi
- type SettingsSimilarityDfr
- type SettingsSimilarityIb
- type SettingsSimilarityLmd
- type SettingsSimilarityLmj
- type SettingsSimilarityScriptedTfidf
- type ShapeFieldQuery
- type ShapeProperty
- type ShapeQuery
- type ShardCommit
- type ShardFailure
- type ShardFileSizeInfo
- type ShardHealthStats
- type ShardLease
- type ShardMigrationStatus
- type ShardPath
- type ShardProfile
- type ShardQueryCache
- type ShardRecovery
- type ShardRetentionLeases
- type ShardRouting
- type ShardSegmentRouting
- type ShardSequenceNumber
- type ShardStatistics
- type ShardStore
- type ShardStoreException
- type ShardStoreIndex
- type ShardStoreNode
- type ShardStoreWrapper
- type ShardsRecord
- type ShardsSegment
- type ShardsStatsSummary
- type ShardsStatsSummaryItem
- type ShardsTotalStats
- type Shared
- type ShingleTokenFilter
- type ShortNumberProperty
- type ShrinkConfiguration
- type SignificantLongTermsAggregate
- type SignificantLongTermsBucket
- type SignificantStringTermsAggregate
- type SignificantStringTermsBucket
- type SignificantTermsAggregateBaseSignificantLongTermsBucket
- type SignificantTermsAggregateBaseSignificantStringTermsBucket
- type SignificantTermsAggregateBaseVoid
- type SignificantTermsAggregation
- type SignificantTextAggregation
- type SimpleAnalyzer
- type SimpleMovingAverageAggregation
- type SimpleQueryStringFlags
- type SimpleQueryStringQuery
- type SimpleValueAggregate
- type SimulateIngest
- type SimulatedActions
- type SizeField
- type SlackAction
- type SlackAttachment
- type SlackAttachmentField
- type SlackDynamicAttachment
- type SlackMessage
- type SlackResult
- type SlicedScroll
- type Slices
- type Slm
- type SlowlogSettings
- type SlowlogTresholdLevels
- type SlowlogTresholds
- type SmoothingModelContainer
- type SnapshotIndexStats
- type SnapshotInfo
- type SnapshotLifecycle
- type SnapshotResponseItem
- type SnapshotRestore
- type SnapshotShardFailure
- type SnapshotShardsStats
- type SnapshotShardsStatus
- type SnapshotStats
- type SnapshotsRecord
- type SnowballAnalyzer
- type SnowballTokenFilter
- type SoftDeletes
- type Sort
- type SortCombinations
- type SortOptions
- type SortProcessor
- type SortResults
- type SourceConfig
- type SourceConfigParam
- type SourceField
- type SourceFilter
- type SpanContainingQuery
- type SpanFieldMaskingQuery
- type SpanFirstQuery
- type SpanGapQuery
- type SpanMultiTermQuery
- type SpanNearQuery
- type SpanNotQuery
- type SpanOrQuery
- type SpanQuery
- type SpanTermQuery
- type SpanWithinQuery
- type SplitProcessor
- type Sql
- type Ssl
- type StandardAnalyzer
- type StandardDeviationBounds
- type StandardDeviationBoundsAsString
- type StandardTokenizer
- type Statistics
- type Stats
- type StatsAggregate
- type StatsAggregation
- type StatsBucketAggregate
- type StatsBucketAggregation
- type Status
- type StemmerOverrideTokenFilter
- type StemmerTokenFilter
- type StepKey
- type StopAnalyzer
- type StopTokenFilter
- type StopWords
- type Storage
- type StoreStats
- type StoredScript
- type StoredScriptId
- type StringRareTermsAggregate
- type StringRareTermsBucket
- type StringStatsAggregate
- type StringStatsAggregation
- type StringTermsAggregate
- type StringTermsBucket
- type StringifiedEpochTimeUnitMillis
- type StringifiedEpochTimeUnitSeconds
- type StringifiedVersionNumber
- type StupidBackoffSmoothingModel
- type Suggest
- type SuggestContext
- type SuggestFuzziness
- type Suggester
- type SumAggregate
- type SumAggregation
- type SumBucketAggregation
- type Summary
- type SyncContainer
- type SynonymGraphTokenFilter
- type SynonymTokenFilter
- type TDigest
- type TDigestPercentileRanksAggregate
- type TDigestPercentilesAggregate
- type TTestAggregate
- type TTestAggregation
- type TargetMeanEncodingPreprocessor
- type TaskFailure
- type TaskId
- type TaskInfo
- type TaskInfos
- type TaskStatus
- type TasksRecord
- type Template
- type TemplateMapping
- type TemplatesRecord
- type Term
- type TermQuery
- type TermSuggest
- type TermSuggestOption
- type TermSuggester
- type TermVector
- type TermVectorsFilter
- type TermVectorsResult
- type TermVectorsToken
- type TermsAggregateBaseDoubleTermsBucket
- type TermsAggregateBaseLongTermsBucket
- type TermsAggregateBaseMultiTermsBucket
- type TermsAggregateBaseStringTermsBucket
- type TermsAggregateBaseVoid
- type TermsAggregation
- type TermsExclude
- type TermsGrouping
- type TermsInclude
- type TermsLookup
- type TermsPartition
- type TermsQuery
- type TermsQueryField
- type TermsSetQuery
- type TestPopulation
- type TextClassificationInferenceOptions
- type TextClassificationInferenceUpdateOptions
- type TextEmbedding
- type TextEmbeddingInferenceOptions
- type TextEmbeddingInferenceUpdateOptions
- type TextIndexPrefixes
- type TextProperty
- type TextToAnalyze
- type ThreadCount
- type ThreadPoolRecord
- type ThrottleState
- type TimeOfMonth
- type TimeOfWeek
- type TimeOfYear
- type TimeSync
- type TimingStats
- type TokenCountProperty
- type TokenDetail
- type TokenFilter
- type TokenFilterDefinition
- type TokenizationConfigContainer
- type Tokenizer
- type TokenizerDefinition
- type TopClassEntry
- type TopHitsAggregate
- type TopHitsAggregation
- type TopLeftBottomRightGeoBounds
- type TopMetrics
- type TopMetricsAggregate
- type TopMetricsAggregation
- type TopMetricsValue
- type TopRightBottomLeftGeoBounds
- type TotalFeatureImportance
- type TotalFeatureImportanceClass
- type TotalFeatureImportanceStatistics
- type TotalHits
- type TotalUserProfiles
- type TrackHits
- type TrainedModel
- type TrainedModelAssignment
- type TrainedModelAssignmentRoutingTable
- type TrainedModelAssignmentTaskParameters
- type TrainedModelConfig
- type TrainedModelConfigInput
- type TrainedModelConfigMetadata
- type TrainedModelDeploymentAllocationStatus
- type TrainedModelDeploymentNodesStats
- type TrainedModelDeploymentStats
- type TrainedModelEntities
- type TrainedModelInferenceClassImportance
- type TrainedModelInferenceFeatureImportance
- type TrainedModelInferenceStats
- type TrainedModelLocation
- type TrainedModelLocationIndex
- type TrainedModelSizeStats
- type TrainedModelStats
- type TrainedModelTree
- type TrainedModelTreeNode
- type TrainedModelsRecord
- type TransformAuthorization
- type TransformContainer
- type TransformDestination
- type TransformIndexerStats
- type TransformProgress
- type TransformSource
- type TransformStats
- type TransformStatsHealth
- type TransformSummary
- type TransformsRecord
- type TransientMetadataConfig
- type Translog
- type TranslogRetention
- type TranslogStats
- type TranslogStatus
- type Transport
- type TransportHistogram
- type TriggerContainer
- type TriggerEventContainer
- type TriggerEventResult
- type TrimProcessor
- type TrimTokenFilter
- type TruncateTokenFilter
- type TypeFieldMappings
- type TypeMapping
- type TypeQuery
- type UaxEmailUrlTokenizer
- type UnassignedInformation
- type UniqueTokenFilter
- type UnmappedRareTermsAggregate
- type UnmappedSamplerAggregate
- type UnmappedSignificantTermsAggregate
- type UnmappedTermsAggregate
- type UnratedDocument
- type UnsignedLongNumberProperty
- type UpdateByQueryRethrottleNode
- type UppercaseProcessor
- type UppercaseTokenFilter
- type UrlDecodeProcessor
- type UsageStatsIndex
- type UsageStatsShards
- type User
- type UserAgentProcessor
- type UserIndicesPrivileges
- type UserProfile
- type UserProfileHitMetadata
- type UserProfileUser
- type UserProfileWithMetadata
- type UserRealm
- type ValidationLoss
- type ValueCountAggregate
- type ValueCountAggregation
- type VariableWidthHistogramAggregate
- type VariableWidthHistogramAggregation
- type VariableWidthHistogramBucket
- type Vector
- type VerifyIndex
- type VersionProperty
- type Vertex
- type VertexDefinition
- type VertexInclude
- type Vocabulary
- type WaitForActiveShards
- type WarmerStats
- type Watch
- type WatchRecord
- type WatchRecordQueuedStats
- type WatchRecordStats
- type WatchStatus
- type Watcher
- type WatcherAction
- type WatcherActionTotals
- type WatcherActions
- type WatcherCondition
- type WatcherInput
- type WatcherNodeStats
- type WatcherStatusActions
- type WatcherWatch
- type WatcherWatchTrigger
- type WatcherWatchTriggerSchedule
- type WebhookAction
- type WebhookResult
- type WeightedAverageAggregation
- type WeightedAverageValue
- type WeightedAvgAggregate
- type Weights
- type WhitespaceAnalyzer
- type WhitespaceTokenizer
- type WildcardProperty
- type WildcardQuery
- type WktGeoBounds
- type WordDelimiterGraphTokenFilter
- type WordDelimiterTokenFilter
- type WrapperQuery
- type XpackDatafeed
- type XpackFeature
- type XpackFeatures
- type XpackQuery
- type XpackRealm
- type XpackRoleMapping
- type XpackRuntimeFieldTypes
- type ZeroShotClassificationInferenceOptions
- type ZeroShotClassificationInferenceUpdateOptions
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type AcknowledgeState ¶
type AcknowledgeState struct { State acknowledgementoptions.AcknowledgementOptions `json:"state"` Timestamp DateTime `json:"timestamp"` }
AcknowledgeState type.
func NewAcknowledgeState ¶
func NewAcknowledgeState() *AcknowledgeState
NewAcknowledgeState returns a AcknowledgeState.
type Acknowledgement ¶
Acknowledgement type.
func NewAcknowledgement ¶
func NewAcknowledgement() *Acknowledgement
NewAcknowledgement returns a Acknowledgement.
type ActionStatus ¶
type ActionStatus struct { Ack AcknowledgeState `json:"ack"` LastExecution *ExecutionState `json:"last_execution,omitempty"` LastSuccessfulExecution *ExecutionState `json:"last_successful_execution,omitempty"` LastThrottle *ThrottleState `json:"last_throttle,omitempty"` }
ActionStatus type.
func NewActionStatus ¶
func NewActionStatus() *ActionStatus
NewActionStatus returns a ActionStatus.
type ActivationState ¶
ActivationState type.
func NewActivationState ¶
func NewActivationState() *ActivationState
NewActivationState returns a ActivationState.
type ActivationStatus ¶
type ActivationStatus struct { Actions WatcherStatusActions `json:"actions"` State ActivationState `json:"state"` Version int64 `json:"version"` }
ActivationStatus type.
func NewActivationStatus ¶
func NewActivationStatus() *ActivationStatus
NewActivationStatus returns a ActivationStatus.
type AdaptiveSelection ¶
type AdaptiveSelection struct { AvgQueueSize *int64 `json:"avg_queue_size,omitempty"` AvgResponseTime Duration `json:"avg_response_time,omitempty"` AvgResponseTimeNs *int64 `json:"avg_response_time_ns,omitempty"` AvgServiceTime Duration `json:"avg_service_time,omitempty"` AvgServiceTimeNs *int64 `json:"avg_service_time_ns,omitempty"` OutgoingSearches *int64 `json:"outgoing_searches,omitempty"` Rank *string `json:"rank,omitempty"` }
AdaptiveSelection type.
func NewAdaptiveSelection ¶
func NewAdaptiveSelection() *AdaptiveSelection
NewAdaptiveSelection returns a AdaptiveSelection.
type AddAction ¶
type AddAction struct { Alias *string `json:"alias,omitempty"` Aliases []string `json:"aliases,omitempty"` Filter *Query `json:"filter,omitempty"` Index *string `json:"index,omitempty"` IndexRouting *string `json:"index_routing,omitempty"` Indices []string `json:"indices,omitempty"` IsHidden *bool `json:"is_hidden,omitempty"` IsWriteIndex *bool `json:"is_write_index,omitempty"` MustExist *bool `json:"must_exist,omitempty"` Routing *string `json:"routing,omitempty"` SearchRouting *string `json:"search_routing,omitempty"` }
AddAction type.
type AdjacencyMatrixAggregate ¶
type AdjacencyMatrixAggregate struct { Buckets BucketsAdjacencyMatrixBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
AdjacencyMatrixAggregate type.
func NewAdjacencyMatrixAggregate ¶
func NewAdjacencyMatrixAggregate() *AdjacencyMatrixAggregate
NewAdjacencyMatrixAggregate returns a AdjacencyMatrixAggregate.
func (*AdjacencyMatrixAggregate) UnmarshalJSON ¶
func (s *AdjacencyMatrixAggregate) UnmarshalJSON(data []byte) error
type AdjacencyMatrixAggregation ¶
type AdjacencyMatrixAggregation struct { Filters map[string]Query `json:"filters,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` }
AdjacencyMatrixAggregation type.
func NewAdjacencyMatrixAggregation ¶
func NewAdjacencyMatrixAggregation() *AdjacencyMatrixAggregation
NewAdjacencyMatrixAggregation returns a AdjacencyMatrixAggregation.
type AdjacencyMatrixBucket ¶
type AdjacencyMatrixBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Key string `json:"key"` }
AdjacencyMatrixBucket type.
func NewAdjacencyMatrixBucket ¶
func NewAdjacencyMatrixBucket() *AdjacencyMatrixBucket
NewAdjacencyMatrixBucket returns a AdjacencyMatrixBucket.
func (AdjacencyMatrixBucket) MarshalJSON ¶
func (s AdjacencyMatrixBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*AdjacencyMatrixBucket) UnmarshalJSON ¶
func (s *AdjacencyMatrixBucket) UnmarshalJSON(data []byte) error
type Aggregate ¶
type Aggregate interface{}
Aggregate holds the union for the following types:
CardinalityAggregate HdrPercentilesAggregate HdrPercentileRanksAggregate TDigestPercentilesAggregate TDigestPercentileRanksAggregate PercentilesBucketAggregate MedianAbsoluteDeviationAggregate MinAggregate MaxAggregate SumAggregate AvgAggregate WeightedAvgAggregate ValueCountAggregate SimpleValueAggregate DerivativeAggregate BucketMetricValueAggregate StatsAggregate StatsBucketAggregate ExtendedStatsAggregate ExtendedStatsBucketAggregate GeoBoundsAggregate GeoCentroidAggregate HistogramAggregate DateHistogramAggregate AutoDateHistogramAggregate VariableWidthHistogramAggregate StringTermsAggregate LongTermsAggregate DoubleTermsAggregate UnmappedTermsAggregate LongRareTermsAggregate StringRareTermsAggregate UnmappedRareTermsAggregate MultiTermsAggregate MissingAggregate NestedAggregate ReverseNestedAggregate GlobalAggregate FilterAggregate ChildrenAggregate ParentAggregate SamplerAggregate UnmappedSamplerAggregate GeoHashGridAggregate GeoTileGridAggregate GeoHexGridAggregate RangeAggregate DateRangeAggregate GeoDistanceAggregate IpRangeAggregate IpPrefixAggregate FiltersAggregate AdjacencyMatrixAggregate SignificantLongTermsAggregate SignificantStringTermsAggregate UnmappedSignificantTermsAggregate CompositeAggregate ScriptedMetricAggregate TopHitsAggregate InferenceAggregate StringStatsAggregate BoxPlotAggregate TopMetricsAggregate TTestAggregate RateAggregate CumulativeCardinalityAggregate MatrixStatsAggregate GeoLineAggregate
type AggregateMetricDoubleProperty ¶
type AggregateMetricDoubleProperty struct { DefaultMetric string `json:"default_metric"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Metrics []string `json:"metrics"` Properties map[string]Property `json:"properties,omitempty"` TimeSeriesMetric *timeseriesmetrictype.TimeSeriesMetricType `json:"time_series_metric,omitempty"` Type string `json:"type,omitempty"` }
AggregateMetricDoubleProperty type.
func NewAggregateMetricDoubleProperty ¶
func NewAggregateMetricDoubleProperty() *AggregateMetricDoubleProperty
NewAggregateMetricDoubleProperty returns a AggregateMetricDoubleProperty.
func (*AggregateMetricDoubleProperty) UnmarshalJSON ¶
func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error
type AggregateOrder ¶
type AggregateOrder interface{}
AggregateOrder holds the union for the following types:
map[string]sortorder.SortOrder []map[string]sortorder.SortOrder
type AggregateOutput ¶
type AggregateOutput struct { Exponent *Weights `json:"exponent,omitempty"` LogisticRegression *Weights `json:"logistic_regression,omitempty"` WeightedMode *Weights `json:"weighted_mode,omitempty"` WeightedSum *Weights `json:"weighted_sum,omitempty"` }
AggregateOutput type.
func NewAggregateOutput ¶
func NewAggregateOutput() *AggregateOutput
NewAggregateOutput returns a AggregateOutput.
type Aggregation ¶
type Aggregation struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` }
Aggregation type.
type AggregationBreakdown ¶
type AggregationBreakdown struct { BuildAggregation int64 `json:"build_aggregation"` BuildAggregationCount int64 `json:"build_aggregation_count"` BuildLeafCollector int64 `json:"build_leaf_collector"` BuildLeafCollectorCount int64 `json:"build_leaf_collector_count"` Collect int64 `json:"collect"` CollectCount int64 `json:"collect_count"` Initialize int64 `json:"initialize"` InitializeCount int64 `json:"initialize_count"` PostCollection *int64 `json:"post_collection,omitempty"` PostCollectionCount *int64 `json:"post_collection_count,omitempty"` Reduce int64 `json:"reduce"` ReduceCount int64 `json:"reduce_count"` }
AggregationBreakdown type.
func NewAggregationBreakdown ¶
func NewAggregationBreakdown() *AggregationBreakdown
NewAggregationBreakdown returns a AggregationBreakdown.
type AggregationProfile ¶
type AggregationProfile struct { Breakdown AggregationBreakdown `json:"breakdown"` Children []AggregationProfile `json:"children,omitempty"` Debug *AggregationProfileDebug `json:"debug,omitempty"` Description string `json:"description"` TimeInNanos int64 `json:"time_in_nanos"` Type string `json:"type"` }
AggregationProfile type.
func NewAggregationProfile ¶
func NewAggregationProfile() *AggregationProfile
NewAggregationProfile returns a AggregationProfile.
type AggregationProfileDebug ¶
type AggregationProfileDebug struct { BuiltBuckets *int `json:"built_buckets,omitempty"` CharsFetched *int `json:"chars_fetched,omitempty"` CollectAnalyzedCount *int `json:"collect_analyzed_count,omitempty"` CollectAnalyzedNs *int `json:"collect_analyzed_ns,omitempty"` CollectionStrategy *string `json:"collection_strategy,omitempty"` DeferredAggregators []string `json:"deferred_aggregators,omitempty"` Delegate *string `json:"delegate,omitempty"` DelegateDebug *AggregationProfileDebug `json:"delegate_debug,omitempty"` EmptyCollectorsUsed *int `json:"empty_collectors_used,omitempty"` ExtractCount *int `json:"extract_count,omitempty"` ExtractNs *int `json:"extract_ns,omitempty"` Filters []AggregationProfileDelegateDebugFilter `json:"filters,omitempty"` HasFilter *bool `json:"has_filter,omitempty"` MapReducer *string `json:"map_reducer,omitempty"` NumericCollectorsUsed *int `json:"numeric_collectors_used,omitempty"` OrdinalsCollectorsOverheadTooHigh *int `json:"ordinals_collectors_overhead_too_high,omitempty"` OrdinalsCollectorsUsed *int `json:"ordinals_collectors_used,omitempty"` ResultStrategy *string `json:"result_strategy,omitempty"` SegmentsCollected *int `json:"segments_collected,omitempty"` SegmentsCounted *int `json:"segments_counted,omitempty"` SegmentsWithDeletedDocs *int `json:"segments_with_deleted_docs,omitempty"` SegmentsWithDocCountField *int `json:"segments_with_doc_count_field,omitempty"` SegmentsWithMultiValuedOrds *int `json:"segments_with_multi_valued_ords,omitempty"` SegmentsWithSingleValuedOrds *int `json:"segments_with_single_valued_ords,omitempty"` StringHashingCollectorsUsed *int `json:"string_hashing_collectors_used,omitempty"` SurvivingBuckets *int `json:"surviving_buckets,omitempty"` TotalBuckets *int `json:"total_buckets,omitempty"` ValuesFetched *int `json:"values_fetched,omitempty"` }
AggregationProfileDebug type.
func NewAggregationProfileDebug ¶
func NewAggregationProfileDebug() *AggregationProfileDebug
NewAggregationProfileDebug returns a AggregationProfileDebug.
type AggregationProfileDelegateDebugFilter ¶
type AggregationProfileDelegateDebugFilter struct { Query *string `json:"query,omitempty"` ResultsFromMetadata *int `json:"results_from_metadata,omitempty"` SegmentsCountedInConstantTime *int `json:"segments_counted_in_constant_time,omitempty"` SpecializedFor *string `json:"specialized_for,omitempty"` }
AggregationProfileDelegateDebugFilter type.
func NewAggregationProfileDelegateDebugFilter ¶
func NewAggregationProfileDelegateDebugFilter() *AggregationProfileDelegateDebugFilter
NewAggregationProfileDelegateDebugFilter returns a AggregationProfileDelegateDebugFilter.
type AggregationRange ¶
type AggregationRange struct { From string `json:"from,omitempty"` Key *string `json:"key,omitempty"` To string `json:"to,omitempty"` }
AggregationRange type.
func NewAggregationRange ¶
func NewAggregationRange() *AggregationRange
NewAggregationRange returns a AggregationRange.
type Aggregations ¶
type Aggregations struct { AdjacencyMatrix *AdjacencyMatrixAggregation `json:"adjacency_matrix,omitempty"` // Aggregations Sub-aggregations for this aggregation. Only applies to bucket aggregations. Aggregations map[string]Aggregations `json:"aggregations,omitempty"` AutoDateHistogram *AutoDateHistogramAggregation `json:"auto_date_histogram,omitempty"` Avg *AverageAggregation `json:"avg,omitempty"` AvgBucket *AverageBucketAggregation `json:"avg_bucket,omitempty"` Boxplot *BoxplotAggregation `json:"boxplot,omitempty"` BucketCorrelation *BucketCorrelationAggregation `json:"bucket_correlation,omitempty"` BucketCountKsTest *BucketKsAggregation `json:"bucket_count_ks_test,omitempty"` BucketScript *BucketScriptAggregation `json:"bucket_script,omitempty"` BucketSelector *BucketSelectorAggregation `json:"bucket_selector,omitempty"` BucketSort *BucketSortAggregation `json:"bucket_sort,omitempty"` Cardinality *CardinalityAggregation `json:"cardinality,omitempty"` CategorizeText *CategorizeTextAggregation `json:"categorize_text,omitempty"` Children *ChildrenAggregation `json:"children,omitempty"` Composite *CompositeAggregation `json:"composite,omitempty"` CumulativeCardinality *CumulativeCardinalityAggregation `json:"cumulative_cardinality,omitempty"` CumulativeSum *CumulativeSumAggregation `json:"cumulative_sum,omitempty"` DateHistogram *DateHistogramAggregation `json:"date_histogram,omitempty"` DateRange *DateRangeAggregation `json:"date_range,omitempty"` Derivative *DerivativeAggregation `json:"derivative,omitempty"` DiversifiedSampler *DiversifiedSamplerAggregation `json:"diversified_sampler,omitempty"` ExtendedStats *ExtendedStatsAggregation `json:"extended_stats,omitempty"` ExtendedStatsBucket *ExtendedStatsBucketAggregation `json:"extended_stats_bucket,omitempty"` Filter *Query `json:"filter,omitempty"` Filters *FiltersAggregation `json:"filters,omitempty"` GeoBounds *GeoBoundsAggregation `json:"geo_bounds,omitempty"` GeoCentroid *GeoCentroidAggregation `json:"geo_centroid,omitempty"` GeoDistance *GeoDistanceAggregation `json:"geo_distance,omitempty"` GeoLine *GeoLineAggregation `json:"geo_line,omitempty"` GeohashGrid *GeoHashGridAggregation `json:"geohash_grid,omitempty"` GeohexGrid *GeohexGridAggregation `json:"geohex_grid,omitempty"` GeotileGrid *GeoTileGridAggregation `json:"geotile_grid,omitempty"` Global *GlobalAggregation `json:"global,omitempty"` Histogram *HistogramAggregation `json:"histogram,omitempty"` Inference *InferenceAggregation `json:"inference,omitempty"` IpPrefix *IpPrefixAggregation `json:"ip_prefix,omitempty"` IpRange *IpRangeAggregation `json:"ip_range,omitempty"` Line *GeoLineAggregation `json:"line,omitempty"` MatrixStats *MatrixStatsAggregation `json:"matrix_stats,omitempty"` Max *MaxAggregation `json:"max,omitempty"` MaxBucket *MaxBucketAggregation `json:"max_bucket,omitempty"` MedianAbsoluteDeviation *MedianAbsoluteDeviationAggregation `json:"median_absolute_deviation,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Min *MinAggregation `json:"min,omitempty"` MinBucket *MinBucketAggregation `json:"min_bucket,omitempty"` Missing *MissingAggregation `json:"missing,omitempty"` MovingAvg MovingAverageAggregation `json:"moving_avg,omitempty"` MovingFn *MovingFunctionAggregation `json:"moving_fn,omitempty"` MovingPercentiles *MovingPercentilesAggregation `json:"moving_percentiles,omitempty"` MultiTerms *MultiTermsAggregation `json:"multi_terms,omitempty"` Nested *NestedAggregation `json:"nested,omitempty"` Normalize *NormalizeAggregation `json:"normalize,omitempty"` Parent *ParentAggregation `json:"parent,omitempty"` PercentileRanks *PercentileRanksAggregation `json:"percentile_ranks,omitempty"` Percentiles *PercentilesAggregation `json:"percentiles,omitempty"` PercentilesBucket *PercentilesBucketAggregation `json:"percentiles_bucket,omitempty"` Range *RangeAggregation `json:"range,omitempty"` RareTerms *RareTermsAggregation `json:"rare_terms,omitempty"` Rate *RateAggregation `json:"rate,omitempty"` ReverseNested *ReverseNestedAggregation `json:"reverse_nested,omitempty"` Sampler *SamplerAggregation `json:"sampler,omitempty"` ScriptedMetric *ScriptedMetricAggregation `json:"scripted_metric,omitempty"` SerialDiff *SerialDifferencingAggregation `json:"serial_diff,omitempty"` SignificantTerms *SignificantTermsAggregation `json:"significant_terms,omitempty"` SignificantText *SignificantTextAggregation `json:"significant_text,omitempty"` Stats *StatsAggregation `json:"stats,omitempty"` StatsBucket *StatsBucketAggregation `json:"stats_bucket,omitempty"` StringStats *StringStatsAggregation `json:"string_stats,omitempty"` Sum *SumAggregation `json:"sum,omitempty"` SumBucket *SumBucketAggregation `json:"sum_bucket,omitempty"` TTest *TTestAggregation `json:"t_test,omitempty"` Terms *TermsAggregation `json:"terms,omitempty"` TopHits *TopHitsAggregation `json:"top_hits,omitempty"` TopMetrics *TopMetricsAggregation `json:"top_metrics,omitempty"` ValueCount *ValueCountAggregation `json:"value_count,omitempty"` VariableWidthHistogram *VariableWidthHistogramAggregation `json:"variable_width_histogram,omitempty"` WeightedAvg *WeightedAverageAggregation `json:"weighted_avg,omitempty"` }
Aggregations type.
func NewAggregations ¶
func NewAggregations() *Aggregations
NewAggregations returns a Aggregations.
func (*Aggregations) UnmarshalJSON ¶
func (s *Aggregations) UnmarshalJSON(data []byte) error
type Alias ¶
type Alias struct { Filter *Query `json:"filter,omitempty"` IndexRouting *string `json:"index_routing,omitempty"` IsHidden *bool `json:"is_hidden,omitempty"` IsWriteIndex *bool `json:"is_write_index,omitempty"` Routing *string `json:"routing,omitempty"` SearchRouting *string `json:"search_routing,omitempty"` }
Alias type.
type AliasDefinition ¶
type AliasDefinition struct { Filter *Query `json:"filter,omitempty"` IndexRouting *string `json:"index_routing,omitempty"` IsHidden *bool `json:"is_hidden,omitempty"` IsWriteIndex *bool `json:"is_write_index,omitempty"` Routing *string `json:"routing,omitempty"` SearchRouting *string `json:"search_routing,omitempty"` }
AliasDefinition type.
func NewAliasDefinition ¶
func NewAliasDefinition() *AliasDefinition
NewAliasDefinition returns a AliasDefinition.
type AliasesRecord ¶
type AliasesRecord struct { // Alias alias name Alias *string `json:"alias,omitempty"` // Filter filter Filter *string `json:"filter,omitempty"` // Index index alias points to Index *string `json:"index,omitempty"` // IsWriteIndex write index IsWriteIndex *string `json:"is_write_index,omitempty"` // RoutingIndex index routing RoutingIndex *string `json:"routing.index,omitempty"` // RoutingSearch search routing RoutingSearch *string `json:"routing.search,omitempty"` }
AliasesRecord type.
func NewAliasesRecord ¶
func NewAliasesRecord() *AliasesRecord
NewAliasesRecord returns a AliasesRecord.
type AllField ¶
type AllField struct { Analyzer string `json:"analyzer"` Enabled bool `json:"enabled"` OmitNorms bool `json:"omit_norms"` SearchAnalyzer string `json:"search_analyzer"` Similarity string `json:"similarity"` Store bool `json:"store"` StoreTermVectorOffsets bool `json:"store_term_vector_offsets"` StoreTermVectorPayloads bool `json:"store_term_vector_payloads"` StoreTermVectorPositions bool `json:"store_term_vector_positions"` StoreTermVectors bool `json:"store_term_vectors"` }
AllField type.
type AllocationDecision ¶
type AllocationDecision struct { Decider string `json:"decider"` Decision allocationexplaindecision.AllocationExplainDecision `json:"decision"` Explanation string `json:"explanation"` }
AllocationDecision type.
func NewAllocationDecision ¶
func NewAllocationDecision() *AllocationDecision
NewAllocationDecision returns a AllocationDecision.
type AllocationRecord ¶
type AllocationRecord struct { // DiskAvail disk available DiskAvail ByteSize `json:"disk.avail,omitempty"` // DiskIndices disk used by ES indices DiskIndices ByteSize `json:"disk.indices,omitempty"` // DiskPercent percent disk used DiskPercent Percentage `json:"disk.percent,omitempty"` // DiskTotal total capacity of all volumes DiskTotal ByteSize `json:"disk.total,omitempty"` // DiskUsed disk used (total, not just ES) DiskUsed ByteSize `json:"disk.used,omitempty"` // Host host of node Host string `json:"host,omitempty"` // Ip ip of node Ip string `json:"ip,omitempty"` // Node name of node Node *string `json:"node,omitempty"` // Shards number of shards on node Shards *string `json:"shards,omitempty"` }
AllocationRecord type.
func NewAllocationRecord ¶
func NewAllocationRecord() *AllocationRecord
NewAllocationRecord returns a AllocationRecord.
type AllocationStore ¶
type AllocationStore struct { AllocationId string `json:"allocation_id"` Found bool `json:"found"` InSync bool `json:"in_sync"` MatchingSizeInBytes int64 `json:"matching_size_in_bytes"` MatchingSyncId bool `json:"matching_sync_id"` StoreException string `json:"store_exception"` }
AllocationStore type.
func NewAllocationStore ¶
func NewAllocationStore() *AllocationStore
NewAllocationStore returns a AllocationStore.
type AlwaysCondition ¶
type AlwaysCondition struct { }
AlwaysCondition type.
func NewAlwaysCondition ¶
func NewAlwaysCondition() *AlwaysCondition
NewAlwaysCondition returns a AlwaysCondition.
type AnalysisConfig ¶
type AnalysisConfig struct { // BucketSpan The size of the interval that the analysis is aggregated into, typically // between `5m` and `1h`. This value should be either a whole number of days or // equate to a // whole number of buckets in one day. If the anomaly detection job uses a // datafeed with aggregations, this value must also be divisible by the interval // of the date histogram aggregation. BucketSpan Duration `json:"bucket_span,omitempty"` // CategorizationAnalyzer If `categorization_field_name` is specified, you can also define the analyzer // that is used to interpret the categorization field. This property cannot be // used at the same time as `categorization_filters`. The categorization // analyzer specifies how the `categorization_field` is interpreted by the // categorization process. The `categorization_analyzer` field can be specified // either as a string or as an object. If it is a string, it must refer to a // built-in analyzer or one added by another plugin. CategorizationAnalyzer CategorizationAnalyzer `json:"categorization_analyzer,omitempty"` // CategorizationFieldName If this property is specified, the values of the specified field will be // categorized. The resulting categories must be used in a detector by setting // `by_field_name`, `over_field_name`, or `partition_field_name` to the keyword // `mlcategory`. CategorizationFieldName *string `json:"categorization_field_name,omitempty"` // CategorizationFilters If `categorization_field_name` is specified, you can also define optional // filters. This property expects an array of regular expressions. The // expressions are used to filter out matching sequences from the categorization // field values. You can use this functionality to fine tune the categorization // by excluding sequences from consideration when categories are defined. For // example, you can exclude SQL statements that appear in your log files. This // property cannot be used at the same time as `categorization_analyzer`. If you // only want to define simple regular expression filters that are applied prior // to tokenization, setting this property is the easiest method. If you also // want to customize the tokenizer or post-tokenization filtering, use the // `categorization_analyzer` property instead and include the filters as // pattern_replace character filters. The effect is exactly the same. CategorizationFilters []string `json:"categorization_filters,omitempty"` // Detectors Detector configuration objects specify which data fields a job analyzes. They // also specify which analytical functions are used. You can specify multiple // detectors for a job. If the detectors array does not contain at least one // detector, no analysis can occur and an error is returned. Detectors []Detector `json:"detectors"` // Influencers A comma separated list of influencer field names. Typically these can be the // by, over, or partition fields that are used in the detector configuration. // You might also want to use a field name that is not specifically named in a // detector, but is available as part of the input data. When you use multiple // detectors, the use of influencers is recommended as it aggregates results for // each influencer entity. Influencers []string `json:"influencers,omitempty"` // Latency The size of the window in which to expect data that is out of time order. If // you specify a non-zero value, it must be greater than or equal to one second. // NOTE: Latency is applicable only when you send data by using the post data // API. Latency Duration `json:"latency,omitempty"` // ModelPruneWindow Advanced configuration option. Affects the pruning of models that have not // been updated for the given time duration. The value must be set to a multiple // of the `bucket_span`. If set too low, important information may be removed // from the model. For jobs created in 8.1 and later, the default value is the // greater of `30d` or 20 times `bucket_span`. ModelPruneWindow Duration `json:"model_prune_window,omitempty"` // MultivariateByFields This functionality is reserved for internal use. It is not supported for use // in customer environments and is not subject to the support SLA of official GA // features. If set to `true`, the analysis will automatically find correlations // between metrics for a given by field value and report anomalies when those // correlations cease to hold. For example, suppose CPU and memory usage on host // A is usually highly correlated with the same metrics on host B. Perhaps this // correlation occurs because they are running a load-balanced application. If // you enable this property, anomalies will be reported when, for example, CPU // usage on host A is high and the value of CPU usage on host B is low. That is // to say, you’ll see an anomaly when the CPU of host A is unusual given the CPU // of host B. To use the `multivariate_by_fields` property, you must also // specify `by_field_name` in your detector. MultivariateByFields *bool `json:"multivariate_by_fields,omitempty"` // PerPartitionCategorization Settings related to how categorization interacts with partition fields. PerPartitionCategorization *PerPartitionCategorization `json:"per_partition_categorization,omitempty"` // SummaryCountFieldName If this property is specified, the data that is fed to the job is expected to // be pre-summarized. This property value is the name of the field that contains // the count of raw data points that have been summarized. The same // `summary_count_field_name` applies to all detectors in the job. NOTE: The // `summary_count_field_name` property cannot be used with the `metric` // function. SummaryCountFieldName *string `json:"summary_count_field_name,omitempty"` }
AnalysisConfig type.
func NewAnalysisConfig ¶
func NewAnalysisConfig() *AnalysisConfig
NewAnalysisConfig returns a AnalysisConfig.
func (*AnalysisConfig) UnmarshalJSON ¶
func (s *AnalysisConfig) UnmarshalJSON(data []byte) error
type AnalysisConfigRead ¶
type AnalysisConfigRead struct { // BucketSpan The size of the interval that the analysis is aggregated into, typically // between `5m` and `1h`. This value should be either a whole number of days or // equate to a // whole number of buckets in one day. If the anomaly detection job uses a // datafeed with aggregations, this value must also be divisible by the interval // of the date histogram aggregation. BucketSpan Duration `json:"bucket_span"` // CategorizationAnalyzer If `categorization_field_name` is specified, you can also define the analyzer // that is used to interpret the categorization field. This property cannot be // used at the same time as `categorization_filters`. The categorization // analyzer specifies how the `categorization_field` is interpreted by the // categorization process. The `categorization_analyzer` field can be specified // either as a string or as an object. If it is a string, it must refer to a // built-in analyzer or one added by another plugin. CategorizationAnalyzer CategorizationAnalyzer `json:"categorization_analyzer,omitempty"` // CategorizationFieldName If this property is specified, the values of the specified field will be // categorized. The resulting categories must be used in a detector by setting // `by_field_name`, `over_field_name`, or `partition_field_name` to the keyword // `mlcategory`. CategorizationFieldName *string `json:"categorization_field_name,omitempty"` // CategorizationFilters If `categorization_field_name` is specified, you can also define optional // filters. This property expects an array of regular expressions. The // expressions are used to filter out matching sequences from the categorization // field values. You can use this functionality to fine tune the categorization // by excluding sequences from consideration when categories are defined. For // example, you can exclude SQL statements that appear in your log files. This // property cannot be used at the same time as `categorization_analyzer`. If you // only want to define simple regular expression filters that are applied prior // to tokenization, setting this property is the easiest method. If you also // want to customize the tokenizer or post-tokenization filtering, use the // `categorization_analyzer` property instead and include the filters as // pattern_replace character filters. The effect is exactly the same. CategorizationFilters []string `json:"categorization_filters,omitempty"` // Detectors Detector configuration objects specify which data fields a job analyzes. They // also specify which analytical functions are used. You can specify multiple // detectors for a job. If the detectors array does not contain at least one // detector, no analysis can occur and an error is returned. Detectors []DetectorRead `json:"detectors"` // Influencers A comma separated list of influencer field names. Typically these can be the // by, over, or partition fields that are used in the detector configuration. // You might also want to use a field name that is not specifically named in a // detector, but is available as part of the input data. When you use multiple // detectors, the use of influencers is recommended as it aggregates results for // each influencer entity. Influencers []string `json:"influencers"` // Latency The size of the window in which to expect data that is out of time order. If // you specify a non-zero value, it must be greater than or equal to one second. // NOTE: Latency is applicable only when you send data by using the post data // API. Latency Duration `json:"latency,omitempty"` // ModelPruneWindow Advanced configuration option. Affects the pruning of models that have not // been updated for the given time duration. The value must be set to a multiple // of the `bucket_span`. If set too low, important information may be removed // from the model. For jobs created in 8.1 and later, the default value is the // greater of `30d` or 20 times `bucket_span`. ModelPruneWindow Duration `json:"model_prune_window,omitempty"` // MultivariateByFields This functionality is reserved for internal use. It is not supported for use // in customer environments and is not subject to the support SLA of official GA // features. If set to `true`, the analysis will automatically find correlations // between metrics for a given by field value and report anomalies when those // correlations cease to hold. For example, suppose CPU and memory usage on host // A is usually highly correlated with the same metrics on host B. Perhaps this // correlation occurs because they are running a load-balanced application. If // you enable this property, anomalies will be reported when, for example, CPU // usage on host A is high and the value of CPU usage on host B is low. That is // to say, you’ll see an anomaly when the CPU of host A is unusual given the CPU // of host B. To use the `multivariate_by_fields` property, you must also // specify `by_field_name` in your detector. MultivariateByFields *bool `json:"multivariate_by_fields,omitempty"` // PerPartitionCategorization Settings related to how categorization interacts with partition fields. PerPartitionCategorization *PerPartitionCategorization `json:"per_partition_categorization,omitempty"` // SummaryCountFieldName If this property is specified, the data that is fed to the job is expected to // be pre-summarized. This property value is the name of the field that contains // the count of raw data points that have been summarized. The same // `summary_count_field_name` applies to all detectors in the job. NOTE: The // `summary_count_field_name` property cannot be used with the `metric` // function. SummaryCountFieldName *string `json:"summary_count_field_name,omitempty"` }
AnalysisConfigRead type.
func NewAnalysisConfigRead ¶
func NewAnalysisConfigRead() *AnalysisConfigRead
NewAnalysisConfigRead returns a AnalysisConfigRead.
func (*AnalysisConfigRead) UnmarshalJSON ¶
func (s *AnalysisConfigRead) UnmarshalJSON(data []byte) error
type AnalysisLimits ¶
type AnalysisLimits struct { // CategorizationExamplesLimit The maximum number of examples stored per category in memory and in the // results data store. If you increase this value, more examples are available, // however it requires that you have more storage available. If you set this // value to 0, no examples are stored. NOTE: The `categorization_examples_limit` // applies only to analysis that uses categorization. CategorizationExamplesLimit *int64 `json:"categorization_examples_limit,omitempty"` // ModelMemoryLimit The approximate maximum amount of memory resources that are required for // analytical processing. Once this limit is approached, data pruning becomes // more aggressive. Upon exceeding this limit, new entities are not modeled. If // the `xpack.ml.max_model_memory_limit` setting has a value greater than 0 and // less than 1024mb, that value is used instead of the default. The default // value is relatively small to ensure that high resource usage is a conscious // decision. If you have jobs that are expected to analyze high cardinality // fields, you will likely need to use a higher value. If you specify a number // instead of a string, the units are assumed to be MiB. Specifying a string is // recommended for clarity. If you specify a byte size unit of `b` or `kb` and // the number does not equate to a discrete number of megabytes, it is rounded // down to the closest MiB. The minimum valid value is 1 MiB. If you specify a // value less than 1 MiB, an error occurs. If you specify a value for the // `xpack.ml.max_model_memory_limit` setting, an error occurs when you try to // create jobs that have `model_memory_limit` values greater than that setting // value. ModelMemoryLimit *string `json:"model_memory_limit,omitempty"` }
AnalysisLimits type.
func NewAnalysisLimits ¶
func NewAnalysisLimits() *AnalysisLimits
NewAnalysisLimits returns a AnalysisLimits.
type AnalysisMemoryLimit ¶
type AnalysisMemoryLimit struct { // ModelMemoryLimit Limits can be applied for the resources required to hold the mathematical // models in memory. These limits are approximate and can be set per job. They // do not control the memory used by other processes, for example the // Elasticsearch Java processes. ModelMemoryLimit string `json:"model_memory_limit"` }
AnalysisMemoryLimit type.
func NewAnalysisMemoryLimit ¶
func NewAnalysisMemoryLimit() *AnalysisMemoryLimit
NewAnalysisMemoryLimit returns a AnalysisMemoryLimit.
type Analytics ¶
type Analytics struct { Available bool `json:"available"` Enabled bool `json:"enabled"` Stats AnalyticsStatistics `json:"stats"` }
Analytics type.
type AnalyticsStatistics ¶
type AnalyticsStatistics struct { BoxplotUsage int64 `json:"boxplot_usage"` CumulativeCardinalityUsage int64 `json:"cumulative_cardinality_usage"` MovingPercentilesUsage int64 `json:"moving_percentiles_usage"` MultiTermsUsage *int64 `json:"multi_terms_usage,omitempty"` NormalizeUsage int64 `json:"normalize_usage"` RateUsage int64 `json:"rate_usage"` StringStatsUsage int64 `json:"string_stats_usage"` TTestUsage int64 `json:"t_test_usage"` TopMetricsUsage int64 `json:"top_metrics_usage"` }
AnalyticsStatistics type.
func NewAnalyticsStatistics ¶
func NewAnalyticsStatistics() *AnalyticsStatistics
NewAnalyticsStatistics returns a AnalyticsStatistics.
type AnalyzeDetail ¶
type AnalyzeDetail struct { Analyzer *AnalyzerDetail `json:"analyzer,omitempty"` Charfilters []CharFilterDetail `json:"charfilters,omitempty"` CustomAnalyzer bool `json:"custom_analyzer"` Tokenfilters []TokenDetail `json:"tokenfilters,omitempty"` Tokenizer *TokenDetail `json:"tokenizer,omitempty"` }
AnalyzeDetail type.
func NewAnalyzeDetail ¶
func NewAnalyzeDetail() *AnalyzeDetail
NewAnalyzeDetail returns a AnalyzeDetail.
type AnalyzeToken ¶
type AnalyzeToken struct { EndOffset int64 `json:"end_offset"` Position int64 `json:"position"` PositionLength *int64 `json:"positionLength,omitempty"` StartOffset int64 `json:"start_offset"` Token string `json:"token"` Type string `json:"type"` }
AnalyzeToken type.
func NewAnalyzeToken ¶
func NewAnalyzeToken() *AnalyzeToken
NewAnalyzeToken returns a AnalyzeToken.
type Analyzer ¶
type Analyzer interface{}
Analyzer holds the union for the following types:
CustomAnalyzer FingerprintAnalyzer KeywordAnalyzer LanguageAnalyzer NoriAnalyzer PatternAnalyzer SimpleAnalyzer StandardAnalyzer StopAnalyzer WhitespaceAnalyzer IcuAnalyzer KuromojiAnalyzer SnowballAnalyzer DutchAnalyzer
type AnalyzerDetail ¶
type AnalyzerDetail struct { Name string `json:"name"` Tokens []ExplainAnalyzeToken `json:"tokens"` }
AnalyzerDetail type.
func NewAnalyzerDetail ¶
func NewAnalyzerDetail() *AnalyzerDetail
NewAnalyzerDetail returns a AnalyzerDetail.
type Anomaly ¶
type Anomaly struct { // Actual The actual value for the bucket. Actual []Float64 `json:"actual,omitempty"` // AnomalyScoreExplanation Information about the factors impacting the initial anomaly score. AnomalyScoreExplanation *AnomalyExplanation `json:"anomaly_score_explanation,omitempty"` // BucketSpan The length of the bucket in seconds. This value matches the `bucket_span` // that is specified in the job. BucketSpan int64 `json:"bucket_span"` // ByFieldName The field used to split the data. In particular, this property is used for // analyzing the splits with respect to their own history. It is used for // finding unusual values in the context of the split. ByFieldName *string `json:"by_field_name,omitempty"` // ByFieldValue The value of `by_field_name`. ByFieldValue *string `json:"by_field_value,omitempty"` // Causes For population analysis, an over field must be specified in the detector. // This property contains an array of anomaly records that are the causes for // the anomaly that has been identified for the over field. This sub-resource // contains the most anomalous records for the `over_field_name`. For // scalability reasons, a maximum of the 10 most significant causes of the // anomaly are returned. As part of the core analytical modeling, these // low-level anomaly records are aggregated for their parent over field record. // The `causes` resource contains similar elements to the record resource, // namely `actual`, `typical`, `geo_results.actual_point`, // `geo_results.typical_point`, `*_field_name` and `*_field_value`. Probability // and scores are not applicable to causes. Causes []AnomalyCause `json:"causes,omitempty"` // DetectorIndex A unique identifier for the detector. DetectorIndex int `json:"detector_index"` // FieldName Certain functions require a field to operate on, for example, `sum()`. For // those functions, this value is the name of the field to be analyzed. FieldName *string `json:"field_name,omitempty"` // Function The function in which the anomaly occurs, as specified in the detector // configuration. For example, `max`. Function *string `json:"function,omitempty"` // FunctionDescription The description of the function in which the anomaly occurs, as specified in // the detector configuration. FunctionDescription *string `json:"function_description,omitempty"` // GeoResults If the detector function is `lat_long`, this object contains comma delimited // strings for the latitude and longitude of the actual and typical values. GeoResults *GeoResults `json:"geo_results,omitempty"` // Influencers If influencers were specified in the detector configuration, this array // contains influencers that contributed to or were to blame for an anomaly. Influencers []Influence `json:"influencers,omitempty"` // InitialRecordScore A normalized score between 0-100, which is based on the probability of the // anomalousness of this record. This is the initial value that was calculated // at the time the bucket was processed. InitialRecordScore Float64 `json:"initial_record_score"` // IsInterim If true, this is an interim result. In other words, the results are // calculated based on partial input data. IsInterim bool `json:"is_interim"` // JobId Identifier for the anomaly detection job. JobId string `json:"job_id"` // OverFieldName The field used to split the data. In particular, this property is used for // analyzing the splits with respect to the history of all splits. It is used // for finding unusual values in the population of all splits. OverFieldName *string `json:"over_field_name,omitempty"` // OverFieldValue The value of `over_field_name`. OverFieldValue *string `json:"over_field_value,omitempty"` // PartitionFieldName The field used to segment the analysis. When you use this property, you have // completely independent baselines for each value of this field. PartitionFieldName *string `json:"partition_field_name,omitempty"` // PartitionFieldValue The value of `partition_field_name`. PartitionFieldValue *string `json:"partition_field_value,omitempty"` // Probability The probability of the individual anomaly occurring, in the range 0 to 1. For // example, `0.0000772031`. This value can be held to a high precision of over // 300 decimal places, so the `record_score` is provided as a human-readable and // friendly interpretation of this. Probability Float64 `json:"probability"` // RecordScore A normalized score between 0-100, which is based on the probability of the // anomalousness of this record. Unlike `initial_record_score`, this value will // be updated by a re-normalization process as new data is analyzed. RecordScore Float64 `json:"record_score"` // ResultType Internal. This is always set to `record`. ResultType string `json:"result_type"` // Timestamp The start time of the bucket for which these results were calculated. Timestamp int64 `json:"timestamp"` // Typical The typical value for the bucket, according to analytical modeling. Typical []Float64 `json:"typical,omitempty"` }
Anomaly type.
type AnomalyCause ¶
type AnomalyCause struct { Actual []Float64 `json:"actual"` ByFieldName string `json:"by_field_name"` ByFieldValue string `json:"by_field_value"` FieldName string `json:"field_name"` Function string `json:"function"` FunctionDescription string `json:"function_description"` Influencers []Influence `json:"influencers"` OverFieldName string `json:"over_field_name"` OverFieldValue string `json:"over_field_value"` PartitionFieldName string `json:"partition_field_name"` PartitionFieldValue string `json:"partition_field_value"` Probability Float64 `json:"probability"` Typical []Float64 `json:"typical"` }
AnomalyCause type.
func NewAnomalyCause ¶
func NewAnomalyCause() *AnomalyCause
NewAnomalyCause returns a AnomalyCause.
type AnomalyDetectors ¶
type AnomalyDetectors struct { CategorizationAnalyzer CategorizationAnalyzer `json:"categorization_analyzer"` CategorizationExamplesLimit int `json:"categorization_examples_limit"` DailyModelSnapshotRetentionAfterDays int `json:"daily_model_snapshot_retention_after_days"` ModelMemoryLimit string `json:"model_memory_limit"` ModelSnapshotRetentionDays int `json:"model_snapshot_retention_days"` }
AnomalyDetectors type.
func NewAnomalyDetectors ¶
func NewAnomalyDetectors() *AnomalyDetectors
NewAnomalyDetectors returns a AnomalyDetectors.
func (*AnomalyDetectors) UnmarshalJSON ¶
func (s *AnomalyDetectors) UnmarshalJSON(data []byte) error
type AnomalyExplanation ¶
type AnomalyExplanation struct { // AnomalyCharacteristicsImpact Impact from the duration and magnitude of the detected anomaly relative to // the historical average. AnomalyCharacteristicsImpact *int `json:"anomaly_characteristics_impact,omitempty"` // AnomalyLength Length of the detected anomaly in the number of buckets. AnomalyLength *int `json:"anomaly_length,omitempty"` // AnomalyType Type of the detected anomaly: `spike` or `dip`. AnomalyType *string `json:"anomaly_type,omitempty"` // HighVariancePenalty Indicates reduction of anomaly score for the bucket with large confidence // intervals. If a bucket has large confidence intervals, the score is reduced. HighVariancePenalty *bool `json:"high_variance_penalty,omitempty"` // IncompleteBucketPenalty If the bucket contains fewer samples than expected, the score is reduced. IncompleteBucketPenalty *bool `json:"incomplete_bucket_penalty,omitempty"` // LowerConfidenceBound Lower bound of the 95% confidence interval. LowerConfidenceBound *Float64 `json:"lower_confidence_bound,omitempty"` // MultiBucketImpact Impact of the deviation between actual and typical values in the past 12 // buckets. MultiBucketImpact *int `json:"multi_bucket_impact,omitempty"` // SingleBucketImpact Impact of the deviation between actual and typical values in the current // bucket. SingleBucketImpact *int `json:"single_bucket_impact,omitempty"` // TypicalValue Typical (expected) value for this bucket. TypicalValue *Float64 `json:"typical_value,omitempty"` // UpperConfidenceBound Upper bound of the 95% confidence interval. UpperConfidenceBound *Float64 `json:"upper_confidence_bound,omitempty"` }
AnomalyExplanation type.
func NewAnomalyExplanation ¶
func NewAnomalyExplanation() *AnomalyExplanation
NewAnomalyExplanation returns a AnomalyExplanation.
type ApiKey ¶
type ApiKey struct { Creation *int64 `json:"creation,omitempty"` Expiration *int64 `json:"expiration,omitempty"` Id string `json:"id"` Invalidated *bool `json:"invalidated,omitempty"` LimitedBy []map[string]RoleDescriptor `json:"limited_by,omitempty"` Metadata map[string]json.RawMessage `json:"metadata,omitempty"` Name string `json:"name"` Realm *string `json:"realm,omitempty"` RoleDescriptors map[string]RoleDescriptor `json:"role_descriptors,omitempty"` Sort_ []FieldValue `json:"_sort,omitempty"` Username *string `json:"username,omitempty"` }
ApiKey type.
type ApiKeyAuthorization ¶
type ApiKeyAuthorization struct { // Id The identifier for the API key. Id string `json:"id"` // Name The name of the API key. Name string `json:"name"` }
ApiKeyAuthorization type.
func NewApiKeyAuthorization ¶
func NewApiKeyAuthorization() *ApiKeyAuthorization
NewApiKeyAuthorization returns a ApiKeyAuthorization.
type AppendProcessor ¶
type AppendProcessor struct { AllowDuplicates *bool `json:"allow_duplicates,omitempty"` Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` Value []json.RawMessage `json:"value"` }
AppendProcessor type.
func NewAppendProcessor ¶
func NewAppendProcessor() *AppendProcessor
NewAppendProcessor returns a AppendProcessor.
type ApplicationGlobalUserPrivileges ¶
type ApplicationGlobalUserPrivileges struct {
Manage ManageUserPrivileges `json:"manage"`
}
ApplicationGlobalUserPrivileges type.
func NewApplicationGlobalUserPrivileges ¶
func NewApplicationGlobalUserPrivileges() *ApplicationGlobalUserPrivileges
NewApplicationGlobalUserPrivileges returns a ApplicationGlobalUserPrivileges.
type ApplicationPrivileges ¶
type ApplicationPrivileges struct { // Application The name of the application to which this entry applies. Application string `json:"application"` // Privileges A list of strings, where each element is the name of an application privilege // or action. Privileges []string `json:"privileges"` // Resources A list resources to which the privileges are applied. Resources []string `json:"resources"` }
ApplicationPrivileges type.
func NewApplicationPrivileges ¶
func NewApplicationPrivileges() *ApplicationPrivileges
NewApplicationPrivileges returns a ApplicationPrivileges.
type ApplicationPrivilegesCheck ¶
type ApplicationPrivilegesCheck struct { // Application The name of the application. Application string `json:"application"` // Privileges A list of the privileges that you want to check for the specified resources. // May be either application privilege names, or the names of actions that are // granted by those privileges Privileges []string `json:"privileges"` // Resources A list of resource names against which the privileges should be checked Resources []string `json:"resources"` }
ApplicationPrivilegesCheck type.
func NewApplicationPrivilegesCheck ¶
func NewApplicationPrivilegesCheck() *ApplicationPrivilegesCheck
NewApplicationPrivilegesCheck returns a ApplicationPrivilegesCheck.
type ApplicationsPrivileges ¶
ApplicationsPrivileges type alias.
type Archive ¶
type Archive struct { Available bool `json:"available"` Enabled bool `json:"enabled"` IndicesCount int64 `json:"indices_count"` }
Archive type.
type ArrayCompareCondition ¶
type ArrayCompareCondition struct { ArrayCompareCondition map[conditionop.ConditionOp]ArrayCompareOpParams `json:"-"` Path string `json:"path"` }
ArrayCompareCondition type.
func NewArrayCompareCondition ¶
func NewArrayCompareCondition() *ArrayCompareCondition
NewArrayCompareCondition returns a ArrayCompareCondition.
func (ArrayCompareCondition) MarshalJSON ¶
func (s ArrayCompareCondition) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type ArrayCompareOpParams ¶
type ArrayCompareOpParams struct { Quantifier quantifier.Quantifier `json:"quantifier"` Value FieldValue `json:"value"` }
ArrayCompareOpParams type.
func NewArrayCompareOpParams ¶
func NewArrayCompareOpParams() *ArrayCompareOpParams
NewArrayCompareOpParams returns a ArrayCompareOpParams.
type ArrayPercentilesItem ¶
type ArrayPercentilesItem struct { Key string `json:"key"` Value Float64 `json:"value,omitempty"` ValueAsString *string `json:"value_as_string,omitempty"` }
ArrayPercentilesItem type.
func NewArrayPercentilesItem ¶
func NewArrayPercentilesItem() *ArrayPercentilesItem
NewArrayPercentilesItem returns a ArrayPercentilesItem.
type AsciiFoldingTokenFilter ¶
type AsciiFoldingTokenFilter struct { PreserveOriginal *bool `json:"preserve_original,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
AsciiFoldingTokenFilter type.
func NewAsciiFoldingTokenFilter ¶
func NewAsciiFoldingTokenFilter() *AsciiFoldingTokenFilter
NewAsciiFoldingTokenFilter returns a AsciiFoldingTokenFilter.
type AsyncSearch ¶
type AsyncSearch struct { Aggregations map[string]Aggregate `json:"aggregations,omitempty"` Clusters_ *ClusterStatistics `json:"_clusters,omitempty"` Fields map[string]json.RawMessage `json:"fields,omitempty"` Hits HitsMetadata `json:"hits"` MaxScore *Float64 `json:"max_score,omitempty"` NumReducePhases *int64 `json:"num_reduce_phases,omitempty"` PitId *string `json:"pit_id,omitempty"` Profile *Profile `json:"profile,omitempty"` ScrollId_ *string `json:"_scroll_id,omitempty"` Shards_ ShardStatistics `json:"_shards"` Suggest map[string][]Suggest `json:"suggest,omitempty"` TerminatedEarly *bool `json:"terminated_early,omitempty"` TimedOut bool `json:"timed_out"` Took int64 `json:"took"` }
AsyncSearch type.
func (*AsyncSearch) UnmarshalJSON ¶
func (s *AsyncSearch) UnmarshalJSON(data []byte) error
type AttachmentProcessor ¶
type AttachmentProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` IndexedChars *int64 `json:"indexed_chars,omitempty"` IndexedCharsField *string `json:"indexed_chars_field,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Properties []string `json:"properties,omitempty"` ResourceName *string `json:"resource_name,omitempty"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
AttachmentProcessor type.
func NewAttachmentProcessor ¶
func NewAttachmentProcessor() *AttachmentProcessor
NewAttachmentProcessor returns a AttachmentProcessor.
type AuthenticateToken ¶
AuthenticateToken type.
func NewAuthenticateToken ¶
func NewAuthenticateToken() *AuthenticateToken
NewAuthenticateToken returns a AuthenticateToken.
type AuthenticatedUser ¶
type AuthenticatedUser struct { AuthenticationProvider *AuthenticationProvider `json:"authentication_provider,omitempty"` AuthenticationRealm UserRealm `json:"authentication_realm"` AuthenticationType string `json:"authentication_type"` Email string `json:"email,omitempty"` Enabled bool `json:"enabled"` FullName string `json:"full_name,omitempty"` LookupRealm UserRealm `json:"lookup_realm"` Metadata map[string]json.RawMessage `json:"metadata"` ProfileUid *string `json:"profile_uid,omitempty"` Roles []string `json:"roles"` Username string `json:"username"` }
AuthenticatedUser type.
func NewAuthenticatedUser ¶
func NewAuthenticatedUser() *AuthenticatedUser
NewAuthenticatedUser returns a AuthenticatedUser.
type AuthenticationProvider ¶
AuthenticationProvider type.
func NewAuthenticationProvider ¶
func NewAuthenticationProvider() *AuthenticationProvider
NewAuthenticationProvider returns a AuthenticationProvider.
type AutoDateHistogramAggregate ¶
type AutoDateHistogramAggregate struct { Buckets BucketsDateHistogramBucket `json:"buckets"` Interval string `json:"interval"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
AutoDateHistogramAggregate type.
func NewAutoDateHistogramAggregate ¶
func NewAutoDateHistogramAggregate() *AutoDateHistogramAggregate
NewAutoDateHistogramAggregate returns a AutoDateHistogramAggregate.
func (*AutoDateHistogramAggregate) UnmarshalJSON ¶
func (s *AutoDateHistogramAggregate) UnmarshalJSON(data []byte) error
type AutoDateHistogramAggregation ¶
type AutoDateHistogramAggregation struct { Buckets *int `json:"buckets,omitempty"` Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` MinimumInterval *minimuminterval.MinimumInterval `json:"minimum_interval,omitempty"` Missing DateTime `json:"missing,omitempty"` Name *string `json:"name,omitempty"` Offset *string `json:"offset,omitempty"` Params map[string]json.RawMessage `json:"params,omitempty"` Script Script `json:"script,omitempty"` TimeZone *string `json:"time_zone,omitempty"` }
AutoDateHistogramAggregation type.
func NewAutoDateHistogramAggregation ¶
func NewAutoDateHistogramAggregation() *AutoDateHistogramAggregation
NewAutoDateHistogramAggregation returns a AutoDateHistogramAggregation.
type AutoFollowPattern ¶
type AutoFollowPattern struct { Name string `json:"name"` Pattern AutoFollowPatternSummary `json:"pattern"` }
AutoFollowPattern type.
func NewAutoFollowPattern ¶
func NewAutoFollowPattern() *AutoFollowPattern
NewAutoFollowPattern returns a AutoFollowPattern.
type AutoFollowPatternSummary ¶
type AutoFollowPatternSummary struct { Active bool `json:"active"` // FollowIndexPattern The name of follower index. FollowIndexPattern *string `json:"follow_index_pattern,omitempty"` // LeaderIndexExclusionPatterns An array of simple index patterns that can be used to exclude indices from // being auto-followed. LeaderIndexExclusionPatterns []string `json:"leader_index_exclusion_patterns"` // LeaderIndexPatterns An array of simple index patterns to match against indices in the remote // cluster specified by the remote_cluster field. LeaderIndexPatterns []string `json:"leader_index_patterns"` // MaxOutstandingReadRequests The maximum number of outstanding reads requests from the remote cluster. MaxOutstandingReadRequests int `json:"max_outstanding_read_requests"` // RemoteCluster The remote cluster containing the leader indices to match against. RemoteCluster string `json:"remote_cluster"` }
AutoFollowPatternSummary type.
func NewAutoFollowPatternSummary ¶
func NewAutoFollowPatternSummary() *AutoFollowPatternSummary
NewAutoFollowPatternSummary returns a AutoFollowPatternSummary.
type AutoFollowStats ¶
type AutoFollowStats struct { AutoFollowedClusters []AutoFollowedCluster `json:"auto_followed_clusters"` NumberOfFailedFollowIndices int64 `json:"number_of_failed_follow_indices"` NumberOfFailedRemoteClusterStateRequests int64 `json:"number_of_failed_remote_cluster_state_requests"` NumberOfSuccessfulFollowIndices int64 `json:"number_of_successful_follow_indices"` RecentAutoFollowErrors []ErrorCause `json:"recent_auto_follow_errors"` }
AutoFollowStats type.
func NewAutoFollowStats ¶
func NewAutoFollowStats() *AutoFollowStats
NewAutoFollowStats returns a AutoFollowStats.
type AutoFollowedCluster ¶
type AutoFollowedCluster struct { ClusterName string `json:"cluster_name"` LastSeenMetadataVersion int64 `json:"last_seen_metadata_version"` TimeSinceLastCheckMillis int64 `json:"time_since_last_check_millis"` }
AutoFollowedCluster type.
func NewAutoFollowedCluster ¶
func NewAutoFollowedCluster() *AutoFollowedCluster
NewAutoFollowedCluster returns a AutoFollowedCluster.
type AutoscalingCapacity ¶
type AutoscalingCapacity struct { Node AutoscalingResources `json:"node"` Total AutoscalingResources `json:"total"` }
AutoscalingCapacity type.
func NewAutoscalingCapacity ¶
func NewAutoscalingCapacity() *AutoscalingCapacity
NewAutoscalingCapacity returns a AutoscalingCapacity.
type AutoscalingDecider ¶
type AutoscalingDecider struct { ReasonDetails json.RawMessage `json:"reason_details,omitempty"` ReasonSummary *string `json:"reason_summary,omitempty"` RequiredCapacity AutoscalingCapacity `json:"required_capacity"` }
AutoscalingDecider type.
func NewAutoscalingDecider ¶
func NewAutoscalingDecider() *AutoscalingDecider
NewAutoscalingDecider returns a AutoscalingDecider.
type AutoscalingDeciders ¶
type AutoscalingDeciders struct { CurrentCapacity AutoscalingCapacity `json:"current_capacity"` CurrentNodes []AutoscalingNode `json:"current_nodes"` Deciders map[string]AutoscalingDecider `json:"deciders"` RequiredCapacity AutoscalingCapacity `json:"required_capacity"` }
AutoscalingDeciders type.
func NewAutoscalingDeciders ¶
func NewAutoscalingDeciders() *AutoscalingDeciders
NewAutoscalingDeciders returns a AutoscalingDeciders.
type AutoscalingNode ¶
type AutoscalingNode struct {
Name string `json:"name"`
}
AutoscalingNode type.
func NewAutoscalingNode ¶
func NewAutoscalingNode() *AutoscalingNode
NewAutoscalingNode returns a AutoscalingNode.
type AutoscalingPolicy ¶
type AutoscalingPolicy struct { // Deciders Decider settings Deciders map[string]json.RawMessage `json:"deciders"` Roles []string `json:"roles"` }
AutoscalingPolicy type.
func NewAutoscalingPolicy ¶
func NewAutoscalingPolicy() *AutoscalingPolicy
NewAutoscalingPolicy returns a AutoscalingPolicy.
type AutoscalingResources ¶
AutoscalingResources type.
func NewAutoscalingResources ¶
func NewAutoscalingResources() *AutoscalingResources
NewAutoscalingResources returns a AutoscalingResources.
type AverageAggregation ¶
type AverageAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` }
AverageAggregation type.
func NewAverageAggregation ¶
func NewAverageAggregation() *AverageAggregation
NewAverageAggregation returns a AverageAggregation.
type AverageBucketAggregation ¶
type AverageBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` }
AverageBucketAggregation type.
func NewAverageBucketAggregation ¶
func NewAverageBucketAggregation() *AverageBucketAggregation
NewAverageBucketAggregation returns a AverageBucketAggregation.
func (*AverageBucketAggregation) UnmarshalJSON ¶
func (s *AverageBucketAggregation) UnmarshalJSON(data []byte) error
type AvgAggregate ¶
type AvgAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. Value Float64 `json:"value,omitempty"` ValueAsString *string `json:"value_as_string,omitempty"` }
AvgAggregate type.
func NewAvgAggregate ¶
func NewAvgAggregate() *AvgAggregate
NewAvgAggregate returns a AvgAggregate.
type BaseNode ¶
type BaseNode struct { Attributes map[string]string `json:"attributes"` Host string `json:"host"` Ip string `json:"ip"` Name string `json:"name"` Roles []noderole.NodeRole `json:"roles,omitempty"` TransportAddress string `json:"transport_address"` }
BaseNode type.
type BinaryProperty ¶
type BinaryProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
BinaryProperty type.
func NewBinaryProperty ¶
func NewBinaryProperty() *BinaryProperty
NewBinaryProperty returns a BinaryProperty.
func (*BinaryProperty) UnmarshalJSON ¶
func (s *BinaryProperty) UnmarshalJSON(data []byte) error
type BoolQuery ¶
type BoolQuery struct { Boost *float32 `json:"boost,omitempty"` Filter []Query `json:"filter,omitempty"` MinimumShouldMatch MinimumShouldMatch `json:"minimum_should_match,omitempty"` Must []Query `json:"must,omitempty"` MustNot []Query `json:"must_not,omitempty"` QueryName_ *string `json:"_name,omitempty"` Should []Query `json:"should,omitempty"` }
BoolQuery type.
type BooleanProperty ¶
type BooleanProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fielddata *NumericFielddata `json:"fielddata,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue *bool `json:"null_value,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
BooleanProperty type.
func NewBooleanProperty ¶
func NewBooleanProperty() *BooleanProperty
NewBooleanProperty returns a BooleanProperty.
func (*BooleanProperty) UnmarshalJSON ¶
func (s *BooleanProperty) UnmarshalJSON(data []byte) error
type BoostingQuery ¶
type BoostingQuery struct { Boost *float32 `json:"boost,omitempty"` Negative *Query `json:"negative,omitempty"` NegativeBoost Float64 `json:"negative_boost"` Positive *Query `json:"positive,omitempty"` QueryName_ *string `json:"_name,omitempty"` }
BoostingQuery type.
func NewBoostingQuery ¶
func NewBoostingQuery() *BoostingQuery
NewBoostingQuery returns a BoostingQuery.
type BoxPlotAggregate ¶
type BoxPlotAggregate struct { Lower Float64 `json:"lower"` LowerAsString *string `json:"lower_as_string,omitempty"` Max Float64 `json:"max"` MaxAsString *string `json:"max_as_string,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Min Float64 `json:"min"` MinAsString *string `json:"min_as_string,omitempty"` Q1 Float64 `json:"q1"` Q1AsString *string `json:"q1_as_string,omitempty"` Q2 Float64 `json:"q2"` Q2AsString *string `json:"q2_as_string,omitempty"` Q3 Float64 `json:"q3"` Q3AsString *string `json:"q3_as_string,omitempty"` Upper Float64 `json:"upper"` UpperAsString *string `json:"upper_as_string,omitempty"` }
BoxPlotAggregate type.
func NewBoxPlotAggregate ¶
func NewBoxPlotAggregate() *BoxPlotAggregate
NewBoxPlotAggregate returns a BoxPlotAggregate.
type BoxplotAggregation ¶
type BoxplotAggregation struct { Compression *Float64 `json:"compression,omitempty"` Field *string `json:"field,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` }
BoxplotAggregation type.
func NewBoxplotAggregation ¶
func NewBoxplotAggregation() *BoxplotAggregation
NewBoxplotAggregation returns a BoxplotAggregation.
type Breaker ¶
type Breaker struct { EstimatedSize *string `json:"estimated_size,omitempty"` EstimatedSizeInBytes *int64 `json:"estimated_size_in_bytes,omitempty"` LimitSize *string `json:"limit_size,omitempty"` LimitSizeInBytes *int64 `json:"limit_size_in_bytes,omitempty"` Overhead *float32 `json:"overhead,omitempty"` Tripped *float32 `json:"tripped,omitempty"` }
Breaker type.
type BucketCorrelationAggregation ¶
type BucketCorrelationAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` // Function The correlation function to execute. Function BucketCorrelationFunction `json:"function"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` }
BucketCorrelationAggregation type.
func NewBucketCorrelationAggregation ¶
func NewBucketCorrelationAggregation() *BucketCorrelationAggregation
NewBucketCorrelationAggregation returns a BucketCorrelationAggregation.
func (*BucketCorrelationAggregation) UnmarshalJSON ¶
func (s *BucketCorrelationAggregation) UnmarshalJSON(data []byte) error
type BucketCorrelationFunction ¶
type BucketCorrelationFunction struct { // CountCorrelation The configuration to calculate a count correlation. This function is designed // for determining the correlation of a term value and a given metric. CountCorrelation BucketCorrelationFunctionCountCorrelation `json:"count_correlation"` }
BucketCorrelationFunction type.
func NewBucketCorrelationFunction ¶
func NewBucketCorrelationFunction() *BucketCorrelationFunction
NewBucketCorrelationFunction returns a BucketCorrelationFunction.
type BucketCorrelationFunctionCountCorrelation ¶
type BucketCorrelationFunctionCountCorrelation struct { // Indicator The indicator with which to correlate the configured `bucket_path` values. Indicator BucketCorrelationFunctionCountCorrelationIndicator `json:"indicator"` }
BucketCorrelationFunctionCountCorrelation type.
func NewBucketCorrelationFunctionCountCorrelation ¶
func NewBucketCorrelationFunctionCountCorrelation() *BucketCorrelationFunctionCountCorrelation
NewBucketCorrelationFunctionCountCorrelation returns a BucketCorrelationFunctionCountCorrelation.
type BucketCorrelationFunctionCountCorrelationIndicator ¶
type BucketCorrelationFunctionCountCorrelationIndicator struct { // DocCount The total number of documents that initially created the expectations. It’s // required to be greater // than or equal to the sum of all values in the buckets_path as this is the // originating superset of data // to which the term values are correlated. DocCount int `json:"doc_count"` // Expectations An array of numbers with which to correlate the configured `bucket_path` // values. // The length of this value must always equal the number of buckets returned by // the `bucket_path`. Expectations []Float64 `json:"expectations"` // Fractions An array of fractions to use when averaging and calculating variance. This // should be used if // the pre-calculated data and the buckets_path have known gaps. The length of // fractions, if provided, // must equal expectations. Fractions []Float64 `json:"fractions,omitempty"` }
BucketCorrelationFunctionCountCorrelationIndicator type.
func NewBucketCorrelationFunctionCountCorrelationIndicator ¶
func NewBucketCorrelationFunctionCountCorrelationIndicator() *BucketCorrelationFunctionCountCorrelationIndicator
NewBucketCorrelationFunctionCountCorrelationIndicator returns a BucketCorrelationFunctionCountCorrelationIndicator.
type BucketInfluencer ¶
type BucketInfluencer struct { // AnomalyScore A normalized score between 0-100, which is calculated for each bucket // influencer. This score might be updated as // newer data is analyzed. AnomalyScore Float64 `json:"anomaly_score"` // BucketSpan The length of the bucket in seconds. This value matches the bucket span that // is specified in the job. BucketSpan int64 `json:"bucket_span"` // InfluencerFieldName The field name of the influencer. InfluencerFieldName string `json:"influencer_field_name"` // InitialAnomalyScore The score between 0-100 for each bucket influencer. This score is the initial // value that was calculated at the // time the bucket was processed. InitialAnomalyScore Float64 `json:"initial_anomaly_score"` // IsInterim If true, this is an interim result. In other words, the results are // calculated based on partial input data. IsInterim bool `json:"is_interim"` // JobId Identifier for the anomaly detection job. JobId string `json:"job_id"` // Probability The probability that the bucket has this behavior, in the range 0 to 1. This // value can be held to a high precision // of over 300 decimal places, so the `anomaly_score` is provided as a // human-readable and friendly interpretation of // this. Probability Float64 `json:"probability"` // RawAnomalyScore Internal. RawAnomalyScore Float64 `json:"raw_anomaly_score"` // ResultType Internal. This value is always set to `bucket_influencer`. ResultType string `json:"result_type"` // Timestamp The start time of the bucket for which these results were calculated. Timestamp int64 `json:"timestamp"` // TimestampString The start time of the bucket for which these results were calculated. TimestampString DateTime `json:"timestamp_string,omitempty"` }
BucketInfluencer type.
func NewBucketInfluencer ¶
func NewBucketInfluencer() *BucketInfluencer
NewBucketInfluencer returns a BucketInfluencer.
type BucketKsAggregation ¶
type BucketKsAggregation struct { // Alternative A list of string values indicating which K-S test alternative to calculate. // The valid values // are: "greater", "less", "two_sided". This parameter is key for determining // the K-S statistic used // when calculating the K-S test. Default value is all possible alternative // hypotheses. Alternative []string `json:"alternative,omitempty"` // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` // Fractions A list of doubles indicating the distribution of the samples with which to // compare to the `buckets_path` results. // In typical usage this is the overall proportion of documents in each bucket, // which is compared with the actual // document proportions in each bucket from the sibling aggregation counts. The // default is to assume that overall // documents are uniformly distributed on these buckets, which they would be if // one used equal percentiles of a // metric to define the bucket end points. Fractions []Float64 `json:"fractions,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` // SamplingMethod Indicates the sampling methodology when calculating the K-S test. Note, this // is sampling of the returned values. // This determines the cumulative distribution function (CDF) points used // comparing the two samples. Default is // `upper_tail`, which emphasizes the upper end of the CDF points. Valid options // are: `upper_tail`, `uniform`, // and `lower_tail`. SamplingMethod *string `json:"sampling_method,omitempty"` }
BucketKsAggregation type.
func NewBucketKsAggregation ¶
func NewBucketKsAggregation() *BucketKsAggregation
NewBucketKsAggregation returns a BucketKsAggregation.
func (*BucketKsAggregation) UnmarshalJSON ¶
func (s *BucketKsAggregation) UnmarshalJSON(data []byte) error
type BucketMetricValueAggregate ¶
type BucketMetricValueAggregate struct { Keys []string `json:"keys"` Meta map[string]json.RawMessage `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. Value Float64 `json:"value,omitempty"` ValueAsString *string `json:"value_as_string,omitempty"` }
BucketMetricValueAggregate type.
func NewBucketMetricValueAggregate ¶
func NewBucketMetricValueAggregate() *BucketMetricValueAggregate
NewBucketMetricValueAggregate returns a BucketMetricValueAggregate.
type BucketPathAggregation ¶
type BucketPathAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` }
BucketPathAggregation type.
func NewBucketPathAggregation ¶
func NewBucketPathAggregation() *BucketPathAggregation
NewBucketPathAggregation returns a BucketPathAggregation.
func (*BucketPathAggregation) UnmarshalJSON ¶
func (s *BucketPathAggregation) UnmarshalJSON(data []byte) error
type BucketScriptAggregation ¶
type BucketScriptAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Script Script `json:"script,omitempty"` }
BucketScriptAggregation type.
func NewBucketScriptAggregation ¶
func NewBucketScriptAggregation() *BucketScriptAggregation
NewBucketScriptAggregation returns a BucketScriptAggregation.
func (*BucketScriptAggregation) UnmarshalJSON ¶
func (s *BucketScriptAggregation) UnmarshalJSON(data []byte) error
type BucketSelectorAggregation ¶
type BucketSelectorAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Script Script `json:"script,omitempty"` }
BucketSelectorAggregation type.
func NewBucketSelectorAggregation ¶
func NewBucketSelectorAggregation() *BucketSelectorAggregation
NewBucketSelectorAggregation returns a BucketSelectorAggregation.
func (*BucketSelectorAggregation) UnmarshalJSON ¶
func (s *BucketSelectorAggregation) UnmarshalJSON(data []byte) error
type BucketSortAggregation ¶
type BucketSortAggregation struct { From *int `json:"from,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Size *int `json:"size,omitempty"` Sort []SortCombinations `json:"sort,omitempty"` }
BucketSortAggregation type.
func NewBucketSortAggregation ¶
func NewBucketSortAggregation() *BucketSortAggregation
NewBucketSortAggregation returns a BucketSortAggregation.
type BucketSummary ¶
type BucketSummary struct { // AnomalyScore The maximum anomaly score, between 0-100, for any of the bucket influencers. // This is an overall, rate-limited // score for the job. All the anomaly records in the bucket contribute to this // score. This value might be updated as // new data is analyzed. AnomalyScore Float64 `json:"anomaly_score"` BucketInfluencers []BucketInfluencer `json:"bucket_influencers"` // BucketSpan The length of the bucket in seconds. This value matches the bucket span that // is specified in the job. BucketSpan int64 `json:"bucket_span"` // EventCount The number of input data records processed in this bucket. EventCount int64 `json:"event_count"` // InitialAnomalyScore The maximum anomaly score for any of the bucket influencers. This is the // initial value that was calculated at the // time the bucket was processed. InitialAnomalyScore Float64 `json:"initial_anomaly_score"` // IsInterim If true, this is an interim result. In other words, the results are // calculated based on partial input data. IsInterim bool `json:"is_interim"` // JobId Identifier for the anomaly detection job. JobId string `json:"job_id"` // ProcessingTimeMs The amount of time, in milliseconds, that it took to analyze the bucket // contents and calculate results. ProcessingTimeMs int64 `json:"processing_time_ms"` // ResultType Internal. This value is always set to bucket. ResultType string `json:"result_type"` // Timestamp The start time of the bucket. This timestamp uniquely identifies the bucket. // Events that occur exactly at the // timestamp of the bucket are included in the results for the bucket. Timestamp int64 `json:"timestamp"` // TimestampString The start time of the bucket. This timestamp uniquely identifies the bucket. // Events that occur exactly at the // timestamp of the bucket are included in the results for the bucket. TimestampString DateTime `json:"timestamp_string,omitempty"` }
BucketSummary type.
func NewBucketSummary ¶
func NewBucketSummary() *BucketSummary
NewBucketSummary returns a BucketSummary.
type BucketsAdjacencyMatrixBucket ¶
type BucketsAdjacencyMatrixBucket interface{}
BucketsAdjacencyMatrixBucket holds the union for the following types:
map[string]AdjacencyMatrixBucket []AdjacencyMatrixBucket
type BucketsCompositeBucket ¶
type BucketsCompositeBucket interface{}
BucketsCompositeBucket holds the union for the following types:
map[string]CompositeBucket []CompositeBucket
type BucketsDateHistogramBucket ¶
type BucketsDateHistogramBucket interface{}
BucketsDateHistogramBucket holds the union for the following types:
map[string]DateHistogramBucket []DateHistogramBucket
type BucketsDoubleTermsBucket ¶
type BucketsDoubleTermsBucket interface{}
BucketsDoubleTermsBucket holds the union for the following types:
map[string]DoubleTermsBucket []DoubleTermsBucket
type BucketsFiltersBucket ¶
type BucketsFiltersBucket interface{}
BucketsFiltersBucket holds the union for the following types:
map[string]FiltersBucket []FiltersBucket
type BucketsGeoHashGridBucket ¶
type BucketsGeoHashGridBucket interface{}
BucketsGeoHashGridBucket holds the union for the following types:
map[string]GeoHashGridBucket []GeoHashGridBucket
type BucketsGeoHexGridBucket ¶
type BucketsGeoHexGridBucket interface{}
BucketsGeoHexGridBucket holds the union for the following types:
map[string]GeoHexGridBucket []GeoHexGridBucket
type BucketsGeoTileGridBucket ¶
type BucketsGeoTileGridBucket interface{}
BucketsGeoTileGridBucket holds the union for the following types:
map[string]GeoTileGridBucket []GeoTileGridBucket
type BucketsHistogramBucket ¶
type BucketsHistogramBucket interface{}
BucketsHistogramBucket holds the union for the following types:
map[string]HistogramBucket []HistogramBucket
type BucketsIpPrefixBucket ¶
type BucketsIpPrefixBucket interface{}
BucketsIpPrefixBucket holds the union for the following types:
map[string]IpPrefixBucket []IpPrefixBucket
type BucketsIpRangeBucket ¶
type BucketsIpRangeBucket interface{}
BucketsIpRangeBucket holds the union for the following types:
map[string]IpRangeBucket []IpRangeBucket
type BucketsLongRareTermsBucket ¶
type BucketsLongRareTermsBucket interface{}
BucketsLongRareTermsBucket holds the union for the following types:
map[string]LongRareTermsBucket []LongRareTermsBucket
type BucketsLongTermsBucket ¶
type BucketsLongTermsBucket interface{}
BucketsLongTermsBucket holds the union for the following types:
map[string]LongTermsBucket []LongTermsBucket
type BucketsMultiTermsBucket ¶
type BucketsMultiTermsBucket interface{}
BucketsMultiTermsBucket holds the union for the following types:
map[string]MultiTermsBucket []MultiTermsBucket
type BucketsPath ¶
type BucketsPath interface{}
BucketsPath holds the union for the following types:
string []string map[string]string
type BucketsQuery ¶
type BucketsQuery interface{}
BucketsQuery holds the union for the following types:
map[string]Query []Query
type BucketsRangeBucket ¶
type BucketsRangeBucket interface{}
BucketsRangeBucket holds the union for the following types:
map[string]RangeBucket []RangeBucket
type BucketsSignificantLongTermsBucket ¶
type BucketsSignificantLongTermsBucket interface{}
BucketsSignificantLongTermsBucket holds the union for the following types:
map[string]SignificantLongTermsBucket []SignificantLongTermsBucket
type BucketsSignificantStringTermsBucket ¶
type BucketsSignificantStringTermsBucket interface{}
BucketsSignificantStringTermsBucket holds the union for the following types:
map[string]SignificantStringTermsBucket []SignificantStringTermsBucket
type BucketsStringRareTermsBucket ¶
type BucketsStringRareTermsBucket interface{}
BucketsStringRareTermsBucket holds the union for the following types:
map[string]StringRareTermsBucket []StringRareTermsBucket
type BucketsStringTermsBucket ¶
type BucketsStringTermsBucket interface{}
BucketsStringTermsBucket holds the union for the following types:
map[string]StringTermsBucket []StringTermsBucket
type BucketsVariableWidthHistogramBucket ¶
type BucketsVariableWidthHistogramBucket interface{}
BucketsVariableWidthHistogramBucket holds the union for the following types:
map[string]VariableWidthHistogramBucket []VariableWidthHistogramBucket
type BucketsVoid ¶
type BucketsVoid interface{}
BucketsVoid holds the union for the following types:
map[string]struct{} []struct{}
type BuildInformation ¶
BuildInformation type.
func NewBuildInformation ¶
func NewBuildInformation() *BuildInformation
NewBuildInformation returns a BuildInformation.
type BulkIndexByScrollFailure ¶
type BulkIndexByScrollFailure struct { Cause ErrorCause `json:"cause"` Id string `json:"id"` Index string `json:"index"` Status int `json:"status"` Type string `json:"type"` }
BulkIndexByScrollFailure type.
func NewBulkIndexByScrollFailure ¶
func NewBulkIndexByScrollFailure() *BulkIndexByScrollFailure
NewBulkIndexByScrollFailure returns a BulkIndexByScrollFailure.
type BulkStats ¶
type BulkStats struct { AvgSize ByteSize `json:"avg_size,omitempty"` AvgSizeInBytes int64 `json:"avg_size_in_bytes"` AvgTime Duration `json:"avg_time,omitempty"` AvgTimeInMillis int64 `json:"avg_time_in_millis"` TotalOperations int64 `json:"total_operations"` TotalSize ByteSize `json:"total_size,omitempty"` TotalSizeInBytes int64 `json:"total_size_in_bytes"` TotalTime Duration `json:"total_time,omitempty"` TotalTimeInMillis int64 `json:"total_time_in_millis"` }
BulkStats type.
type ByteNumberProperty ¶
type ByteNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue *byte `json:"null_value,omitempty"` OnScriptError *onscripterror.OnScriptError `json:"on_script_error,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Script Script `json:"script,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` // TimeSeriesDimension For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesDimension *bool `json:"time_series_dimension,omitempty"` // TimeSeriesMetric For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesMetric *timeseriesmetrictype.TimeSeriesMetricType `json:"time_series_metric,omitempty"` Type string `json:"type,omitempty"` }
ByteNumberProperty type.
func NewByteNumberProperty ¶
func NewByteNumberProperty() *ByteNumberProperty
NewByteNumberProperty returns a ByteNumberProperty.
func (*ByteNumberProperty) UnmarshalJSON ¶
func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error
type ByteSize ¶
type ByteSize interface{}
ByteSize holds the union for the following types:
int64 string
type BytesProcessor ¶
type BytesProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
BytesProcessor type.
func NewBytesProcessor ¶
func NewBytesProcessor() *BytesProcessor
NewBytesProcessor returns a BytesProcessor.
type CacheQueries ¶
type CacheQueries struct {
Enabled bool `json:"enabled"`
}
CacheQueries type.
func NewCacheQueries ¶
func NewCacheQueries() *CacheQueries
NewCacheQueries returns a CacheQueries.
type CacheStats ¶
type CacheStats struct { Count int `json:"count"` Evictions int `json:"evictions"` Hits int `json:"hits"` Misses int `json:"misses"` NodeId string `json:"node_id"` }
CacheStats type.
type Calendar ¶
type Calendar struct { // CalendarId A string that uniquely identifies a calendar. CalendarId string `json:"calendar_id"` // Description A description of the calendar. Description *string `json:"description,omitempty"` // JobIds An array of anomaly detection job identifiers. JobIds []string `json:"job_ids"` }
Calendar type.
type CalendarEvent ¶
type CalendarEvent struct { // CalendarId A string that uniquely identifies a calendar. CalendarId *string `json:"calendar_id,omitempty"` // Description A description of the scheduled event. Description string `json:"description"` // EndTime The timestamp for the end of the scheduled event in milliseconds since the // epoch or ISO 8601 format. EndTime DateTime `json:"end_time"` EventId *string `json:"event_id,omitempty"` // StartTime The timestamp for the beginning of the scheduled event in milliseconds since // the epoch or ISO 8601 format. StartTime DateTime `json:"start_time"` }
CalendarEvent type.
func NewCalendarEvent ¶
func NewCalendarEvent() *CalendarEvent
NewCalendarEvent returns a CalendarEvent.
type CardinalityAggregate ¶
type CardinalityAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Value int64 `json:"value"` }
CardinalityAggregate type.
func NewCardinalityAggregate ¶
func NewCardinalityAggregate() *CardinalityAggregate
NewCardinalityAggregate returns a CardinalityAggregate.
type CardinalityAggregation ¶
type CardinalityAggregation struct { ExecutionHint *cardinalityexecutionmode.CardinalityExecutionMode `json:"execution_hint,omitempty"` Field *string `json:"field,omitempty"` Missing Missing `json:"missing,omitempty"` PrecisionThreshold *int `json:"precision_threshold,omitempty"` Rehash *bool `json:"rehash,omitempty"` Script Script `json:"script,omitempty"` }
CardinalityAggregation type.
func NewCardinalityAggregation ¶
func NewCardinalityAggregation() *CardinalityAggregation
NewCardinalityAggregation returns a CardinalityAggregation.
type CatAnonalyDetectorColumns ¶
type CatAnonalyDetectorColumns []catanomalydetectorcolumn.CatAnomalyDetectorColumn
CatAnonalyDetectorColumns type alias.
type CatComponentTemplate ¶
type CatComponentTemplate struct { AliasCount string `json:"alias_count"` IncludedIn string `json:"included_in"` MappingCount string `json:"mapping_count"` MetadataCount string `json:"metadata_count"` Name string `json:"name"` SettingsCount string `json:"settings_count"` Version string `json:"version"` }
CatComponentTemplate type.
func NewCatComponentTemplate ¶
func NewCatComponentTemplate() *CatComponentTemplate
NewCatComponentTemplate returns a CatComponentTemplate.
type CatDatafeedColumns ¶
type CatDatafeedColumns []catdatafeedcolumn.CatDatafeedColumn
CatDatafeedColumns type alias.
type CatTrainedModelsColumns ¶
type CatTrainedModelsColumns []cattrainedmodelscolumn.CatTrainedModelsColumn
CatTrainedModelsColumns type alias.
type CatTransformColumns ¶
type CatTransformColumns []cattransformcolumn.CatTransformColumn
CatTransformColumns type alias.
type CategorizationAnalyzer ¶
type CategorizationAnalyzer interface{}
CategorizationAnalyzer holds the union for the following types:
string CategorizationAnalyzerDefinition
type CategorizationAnalyzerDefinition ¶
type CategorizationAnalyzerDefinition struct { // CharFilter One or more character filters. In addition to the built-in character filters, // other plugins can provide more character filters. If this property is not // specified, no character filters are applied prior to categorization. If you // are customizing some other aspect of the analyzer and you need to achieve the // equivalent of `categorization_filters` (which are not permitted when some // other aspect of the analyzer is customized), add them here as pattern replace // character filters. CharFilter []CharFilter `json:"char_filter,omitempty"` // Filter One or more token filters. In addition to the built-in token filters, other // plugins can provide more token filters. If this property is not specified, no // token filters are applied prior to categorization. Filter []TokenFilter `json:"filter,omitempty"` // Tokenizer The name or definition of the tokenizer to use after character filters are // applied. This property is compulsory if `categorization_analyzer` is // specified as an object. Machine learning provides a tokenizer called // `ml_standard` that tokenizes in a way that has been determined to produce // good categorization results on a variety of log file formats for logs in // English. If you want to use that tokenizer but change the character or token // filters, specify "tokenizer": "ml_standard" in your // `categorization_analyzer`. Additionally, the `ml_classic` tokenizer is // available, which tokenizes in the same way as the non-customizable tokenizer // in old versions of the product (before 6.2). `ml_classic` was the default // categorization tokenizer in versions 6.2 to 7.13, so if you need // categorization identical to the default for jobs created in these versions, // specify "tokenizer": "ml_classic" in your `categorization_analyzer`. Tokenizer Tokenizer `json:"tokenizer,omitempty"` }
CategorizationAnalyzerDefinition type.
func NewCategorizationAnalyzerDefinition ¶
func NewCategorizationAnalyzerDefinition() *CategorizationAnalyzerDefinition
NewCategorizationAnalyzerDefinition returns a CategorizationAnalyzerDefinition.
func (*CategorizationAnalyzerDefinition) UnmarshalJSON ¶
func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error
type CategorizeTextAggregation ¶
type CategorizeTextAggregation struct { // CategorizationAnalyzer The categorization analyzer specifies how the text is analyzed and tokenized // before being categorized. // The syntax is very similar to that used to define the analyzer in the // [Analyze // endpoint](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-analyze.html). // This property // cannot be used at the same time as categorization_filters. CategorizationAnalyzer CategorizeTextAnalyzer `json:"categorization_analyzer,omitempty"` // CategorizationFilters This property expects an array of regular expressions. The expressions are // used to filter out matching // sequences from the categorization field values. You can use this // functionality to fine tune the categorization // by excluding sequences from consideration when categories are defined. For // example, you can exclude SQL // statements that appear in your log files. This property cannot be used at the // same time as categorization_analyzer. // If you only want to define simple regular expression filters that are applied // prior to tokenization, setting // this property is the easiest method. If you also want to customize the // tokenizer or post-tokenization filtering, // use the categorization_analyzer property instead and include the filters as // pattern_replace character filters. CategorizationFilters []string `json:"categorization_filters,omitempty"` // Field The semi-structured text field to categorize. Field string `json:"field"` // MaxMatchedTokens The maximum number of token positions to match on before attempting to merge // categories. Larger // values will use more memory and create narrower categories. Max allowed value // is 100. MaxMatchedTokens *int `json:"max_matched_tokens,omitempty"` // MaxUniqueTokens The maximum number of unique tokens at any position up to max_matched_tokens. // Must be larger than 1. // Smaller values use less memory and create fewer categories. Larger values // will use more memory and // create narrower categories. Max allowed value is 100. MaxUniqueTokens *int `json:"max_unique_tokens,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` // MinDocCount The minimum number of documents for a bucket to be returned to the results. MinDocCount *int `json:"min_doc_count,omitempty"` Name *string `json:"name,omitempty"` // ShardMinDocCount The minimum number of documents for a bucket to be returned from the shard // before merging. ShardMinDocCount *int `json:"shard_min_doc_count,omitempty"` // ShardSize The number of categorization buckets to return from each shard before merging // all the results. ShardSize *int `json:"shard_size,omitempty"` // SimilarityThreshold The minimum percentage of tokens that must match for text to be added to the // category bucket. Must // be between 1 and 100. The larger the value the narrower the categories. // Larger values will increase memory // usage and create narrower categories. SimilarityThreshold *int `json:"similarity_threshold,omitempty"` // Size The number of buckets to return. Size *int `json:"size,omitempty"` }
CategorizeTextAggregation type.
func NewCategorizeTextAggregation ¶
func NewCategorizeTextAggregation() *CategorizeTextAggregation
NewCategorizeTextAggregation returns a CategorizeTextAggregation.
func (*CategorizeTextAggregation) UnmarshalJSON ¶
func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error
type CategorizeTextAnalyzer ¶
type CategorizeTextAnalyzer interface{}
CategorizeTextAnalyzer holds the union for the following types:
string CustomCategorizeTextAnalyzer
type Category ¶
type Category struct { // CategoryId A unique identifier for the category. category_id is unique at the job level, // even when per-partition categorization is enabled. CategoryId uint64 `json:"category_id"` // Examples A list of examples of actual values that matched the category. Examples []string `json:"examples"` // GrokPattern [experimental] A Grok pattern that could be used in Logstash or an ingest // pipeline to extract fields from messages that match the category. This field // is experimental and may be changed or removed in a future release. The Grok // patterns that are found are not optimal, but are often a good starting point // for manual tweaking. GrokPattern *string `json:"grok_pattern,omitempty"` // JobId Identifier for the anomaly detection job. JobId string `json:"job_id"` // MaxMatchingLength The maximum length of the fields that matched the category. The value is // increased by 10% to enable matching for similar fields that have not been // analyzed. MaxMatchingLength uint64 `json:"max_matching_length"` Mlcategory string `json:"mlcategory"` // NumMatches The number of messages that have been matched by this category. This is only // guaranteed to have the latest accurate count after a job _flush or _close NumMatches *int64 `json:"num_matches,omitempty"` P *string `json:"p,omitempty"` // PartitionFieldName If per-partition categorization is enabled, this property identifies the // field used to segment the categorization. It is not present when // per-partition categorization is disabled. PartitionFieldName *string `json:"partition_field_name,omitempty"` // PartitionFieldValue If per-partition categorization is enabled, this property identifies the // value of the partition_field_name for the category. It is not present when // per-partition categorization is disabled. PartitionFieldValue *string `json:"partition_field_value,omitempty"` // PreferredToCategories A list of category_id entries that this current category encompasses. Any new // message that is processed by the categorizer will match against this category // and not any of the categories in this list. This is only guaranteed to have // the latest accurate list of categories after a job _flush or _close PreferredToCategories []string `json:"preferred_to_categories,omitempty"` // Regex A regular expression that is used to search for values that match the // category. Regex string `json:"regex"` ResultType string `json:"result_type"` // Terms A space separated list of the common tokens that are matched in values of the // category. Terms string `json:"terms"` }
Category type.
type Ccr ¶
type Ccr struct { AutoFollowPatternsCount int `json:"auto_follow_patterns_count"` Available bool `json:"available"` Enabled bool `json:"enabled"` FollowerIndicesCount int `json:"follower_indices_count"` }
Ccr type.
type CcrShardStats ¶
type CcrShardStats struct { BytesRead int64 `json:"bytes_read"` FailedReadRequests int64 `json:"failed_read_requests"` FailedWriteRequests int64 `json:"failed_write_requests"` FatalException *ErrorCause `json:"fatal_exception,omitempty"` FollowerAliasesVersion int64 `json:"follower_aliases_version"` FollowerGlobalCheckpoint int64 `json:"follower_global_checkpoint"` FollowerIndex string `json:"follower_index"` FollowerMappingVersion int64 `json:"follower_mapping_version"` FollowerMaxSeqNo int64 `json:"follower_max_seq_no"` FollowerSettingsVersion int64 `json:"follower_settings_version"` LastRequestedSeqNo int64 `json:"last_requested_seq_no"` LeaderGlobalCheckpoint int64 `json:"leader_global_checkpoint"` LeaderIndex string `json:"leader_index"` LeaderMaxSeqNo int64 `json:"leader_max_seq_no"` OperationsRead int64 `json:"operations_read"` OperationsWritten int64 `json:"operations_written"` OutstandingReadRequests int `json:"outstanding_read_requests"` OutstandingWriteRequests int `json:"outstanding_write_requests"` ReadExceptions []ReadException `json:"read_exceptions"` RemoteCluster string `json:"remote_cluster"` ShardId int `json:"shard_id"` SuccessfulReadRequests int64 `json:"successful_read_requests"` SuccessfulWriteRequests int64 `json:"successful_write_requests"` TimeSinceLastRead Duration `json:"time_since_last_read,omitempty"` TimeSinceLastReadMillis int64 `json:"time_since_last_read_millis"` TotalReadRemoteExecTime Duration `json:"total_read_remote_exec_time,omitempty"` TotalReadRemoteExecTimeMillis int64 `json:"total_read_remote_exec_time_millis"` TotalReadTime Duration `json:"total_read_time,omitempty"` TotalReadTimeMillis int64 `json:"total_read_time_millis"` TotalWriteTime Duration `json:"total_write_time,omitempty"` TotalWriteTimeMillis int64 `json:"total_write_time_millis"` WriteBufferOperationCount int64 `json:"write_buffer_operation_count"` WriteBufferSizeInBytes ByteSize `json:"write_buffer_size_in_bytes"` }
CcrShardStats type.
func NewCcrShardStats ¶
func NewCcrShardStats() *CcrShardStats
NewCcrShardStats returns a CcrShardStats.
type CertificateInformation ¶
type CertificateInformation struct { Alias string `json:"alias,omitempty"` Expiry DateTime `json:"expiry"` Format string `json:"format"` HasPrivateKey bool `json:"has_private_key"` Issuer *string `json:"issuer,omitempty"` Path string `json:"path"` SerialNumber string `json:"serial_number"` SubjectDn string `json:"subject_dn"` }
CertificateInformation type.
func NewCertificateInformation ¶
func NewCertificateInformation() *CertificateInformation
NewCertificateInformation returns a CertificateInformation.
type Cgroup ¶
type Cgroup struct { Cpu *CgroupCpu `json:"cpu,omitempty"` Cpuacct *CpuAcct `json:"cpuacct,omitempty"` Memory *CgroupMemory `json:"memory,omitempty"` }
Cgroup type.
type CgroupCpu ¶
type CgroupCpu struct { CfsPeriodMicros *int `json:"cfs_period_micros,omitempty"` CfsQuotaMicros *int `json:"cfs_quota_micros,omitempty"` ControlGroup *string `json:"control_group,omitempty"` Stat *CgroupCpuStat `json:"stat,omitempty"` }
CgroupCpu type.
type CgroupCpuStat ¶
type CgroupCpuStat struct { NumberOfElapsedPeriods *int64 `json:"number_of_elapsed_periods,omitempty"` NumberOfTimesThrottled *int64 `json:"number_of_times_throttled,omitempty"` TimeThrottledNanos *int64 `json:"time_throttled_nanos,omitempty"` }
CgroupCpuStat type.
func NewCgroupCpuStat ¶
func NewCgroupCpuStat() *CgroupCpuStat
NewCgroupCpuStat returns a CgroupCpuStat.
type CgroupMemory ¶
type CgroupMemory struct { ControlGroup *string `json:"control_group,omitempty"` LimitInBytes *string `json:"limit_in_bytes,omitempty"` UsageInBytes *string `json:"usage_in_bytes,omitempty"` }
CgroupMemory type.
func NewCgroupMemory ¶
func NewCgroupMemory() *CgroupMemory
NewCgroupMemory returns a CgroupMemory.
type ChainInput ¶
type ChainInput struct {
Inputs []map[string]WatcherInput `json:"inputs"`
}
ChainInput type.
type CharFilter ¶
type CharFilter interface{}
CharFilter holds the union for the following types:
string CharFilterDefinition
type CharFilterDefinition ¶
type CharFilterDefinition interface{}
CharFilterDefinition holds the union for the following types:
HtmlStripCharFilter MappingCharFilter PatternReplaceCharFilter IcuNormalizationCharFilter KuromojiIterationMarkCharFilter
type CharFilterDetail ¶
type CharFilterDetail struct { FilteredText []string `json:"filtered_text"` Name string `json:"name"` }
CharFilterDetail type.
func NewCharFilterDetail ¶
func NewCharFilterDetail() *CharFilterDetail
NewCharFilterDetail returns a CharFilterDetail.
type CharFilterTypes ¶
type CharFilterTypes struct { AnalyzerTypes []FieldTypes `json:"analyzer_types"` BuiltInAnalyzers []FieldTypes `json:"built_in_analyzers"` BuiltInCharFilters []FieldTypes `json:"built_in_char_filters"` BuiltInFilters []FieldTypes `json:"built_in_filters"` BuiltInTokenizers []FieldTypes `json:"built_in_tokenizers"` CharFilterTypes []FieldTypes `json:"char_filter_types"` FilterTypes []FieldTypes `json:"filter_types"` TokenizerTypes []FieldTypes `json:"tokenizer_types"` }
CharFilterTypes type.
func NewCharFilterTypes ¶
func NewCharFilterTypes() *CharFilterTypes
NewCharFilterTypes returns a CharFilterTypes.
type CharGroupTokenizer ¶
type CharGroupTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` TokenizeOnChars []string `json:"tokenize_on_chars"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
CharGroupTokenizer type.
func NewCharGroupTokenizer ¶
func NewCharGroupTokenizer() *CharGroupTokenizer
NewCharGroupTokenizer returns a CharGroupTokenizer.
type CheckpointStats ¶
type CheckpointStats struct { Checkpoint int64 `json:"checkpoint"` CheckpointProgress *TransformProgress `json:"checkpoint_progress,omitempty"` TimeUpperBound DateTime `json:"time_upper_bound,omitempty"` TimeUpperBoundMillis *int64 `json:"time_upper_bound_millis,omitempty"` Timestamp DateTime `json:"timestamp,omitempty"` TimestampMillis *int64 `json:"timestamp_millis,omitempty"` }
CheckpointStats type.
func NewCheckpointStats ¶
func NewCheckpointStats() *CheckpointStats
NewCheckpointStats returns a CheckpointStats.
type Checkpointing ¶
type Checkpointing struct { ChangesLastDetectedAt *int64 `json:"changes_last_detected_at,omitempty"` ChangesLastDetectedAtDateTime DateTime `json:"changes_last_detected_at_date_time,omitempty"` Last CheckpointStats `json:"last"` LastSearchTime *int64 `json:"last_search_time,omitempty"` Next *CheckpointStats `json:"next,omitempty"` OperationsBehind *int64 `json:"operations_behind,omitempty"` }
Checkpointing type.
func NewCheckpointing ¶
func NewCheckpointing() *Checkpointing
NewCheckpointing returns a Checkpointing.
type ChiSquareHeuristic ¶
type ChiSquareHeuristic struct { BackgroundIsSuperset bool `json:"background_is_superset"` IncludeNegatives bool `json:"include_negatives"` }
ChiSquareHeuristic type.
func NewChiSquareHeuristic ¶
func NewChiSquareHeuristic() *ChiSquareHeuristic
NewChiSquareHeuristic returns a ChiSquareHeuristic.
type ChildrenAggregate ¶
type ChildrenAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
ChildrenAggregate type.
func NewChildrenAggregate ¶
func NewChildrenAggregate() *ChildrenAggregate
NewChildrenAggregate returns a ChildrenAggregate.
func (ChildrenAggregate) MarshalJSON ¶
func (s ChildrenAggregate) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*ChildrenAggregate) UnmarshalJSON ¶
func (s *ChildrenAggregate) UnmarshalJSON(data []byte) error
type ChildrenAggregation ¶
type ChildrenAggregation struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Type *string `json:"type,omitempty"` }
ChildrenAggregation type.
func NewChildrenAggregation ¶
func NewChildrenAggregation() *ChildrenAggregation
NewChildrenAggregation returns a ChildrenAggregation.
type ChunkingConfig ¶
type ChunkingConfig struct { // Mode If the mode is `auto`, the chunk size is dynamically calculated; // this is the recommended value when the datafeed does not use aggregations. // If the mode is `manual`, chunking is applied according to the specified // `time_span`; // use this mode when the datafeed uses aggregations. If the mode is `off`, no // chunking is applied. Mode chunkingmode.ChunkingMode `json:"mode"` // TimeSpan The time span that each search will be querying. This setting is applicable // only when the `mode` is set to `manual`. TimeSpan Duration `json:"time_span,omitempty"` }
ChunkingConfig type.
func NewChunkingConfig ¶
func NewChunkingConfig() *ChunkingConfig
NewChunkingConfig returns a ChunkingConfig.
type CircleProcessor ¶
type CircleProcessor struct { Description *string `json:"description,omitempty"` ErrorDistance Float64 `json:"error_distance"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` ShapeType shapetype.ShapeType `json:"shape_type"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
CircleProcessor type.
func NewCircleProcessor ¶
func NewCircleProcessor() *CircleProcessor
NewCircleProcessor returns a CircleProcessor.
type ClassificationInferenceOptions ¶
type ClassificationInferenceOptions struct { // NumTopClasses Specifies the number of top class predictions to return. Defaults to 0. NumTopClasses *int `json:"num_top_classes,omitempty"` // NumTopFeatureImportanceValues Specifies the maximum number of feature importance values per document. NumTopFeatureImportanceValues *int `json:"num_top_feature_importance_values,omitempty"` // PredictionFieldType Specifies the type of the predicted field to write. Acceptable values are: // string, number, boolean. When boolean is provided 1.0 is transformed to true // and 0.0 to false. PredictionFieldType *string `json:"prediction_field_type,omitempty"` // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` // TopClassesResultsField Specifies the field to which the top classes are written. Defaults to // top_classes. TopClassesResultsField *string `json:"top_classes_results_field,omitempty"` }
ClassificationInferenceOptions type.
func NewClassificationInferenceOptions ¶
func NewClassificationInferenceOptions() *ClassificationInferenceOptions
NewClassificationInferenceOptions returns a ClassificationInferenceOptions.
type CleanupRepositoryResults ¶
type CleanupRepositoryResults struct { // DeletedBlobs Number of binary large objects (blobs) removed during cleanup. DeletedBlobs int64 `json:"deleted_blobs"` // DeletedBytes Number of bytes freed by cleanup operations. DeletedBytes int64 `json:"deleted_bytes"` }
CleanupRepositoryResults type.
func NewCleanupRepositoryResults ¶
func NewCleanupRepositoryResults() *CleanupRepositoryResults
NewCleanupRepositoryResults returns a CleanupRepositoryResults.
type Client ¶
type Client struct { Agent *string `json:"agent,omitempty"` ClosedTimeMillis *int64 `json:"closed_time_millis,omitempty"` Id *int64 `json:"id,omitempty"` LastRequestTimeMillis *int64 `json:"last_request_time_millis,omitempty"` LastUri *string `json:"last_uri,omitempty"` LocalAddress *string `json:"local_address,omitempty"` OpenedTimeMillis *int64 `json:"opened_time_millis,omitempty"` RemoteAddress *string `json:"remote_address,omitempty"` RequestCount *int64 `json:"request_count,omitempty"` RequestSizeBytes *int64 `json:"request_size_bytes,omitempty"` XOpaqueId *string `json:"x_opaque_id,omitempty"` }
Client type.
type CloseIndexResult ¶
type CloseIndexResult struct { Closed bool `json:"closed"` Shards map[string]CloseShardResult `json:"shards,omitempty"` }
CloseIndexResult type.
func NewCloseIndexResult ¶
func NewCloseIndexResult() *CloseIndexResult
NewCloseIndexResult returns a CloseIndexResult.
type CloseShardResult ¶
type CloseShardResult struct {
Failures []ShardFailure `json:"failures"`
}
CloseShardResult type.
func NewCloseShardResult ¶
func NewCloseShardResult() *CloseShardResult
NewCloseShardResult returns a CloseShardResult.
type ClusterAppliedStats ¶
type ClusterAppliedStats struct {
Recordings []Recording `json:"recordings,omitempty"`
}
ClusterAppliedStats type.
func NewClusterAppliedStats ¶
func NewClusterAppliedStats() *ClusterAppliedStats
NewClusterAppliedStats returns a ClusterAppliedStats.
type ClusterComponentTemplate ¶
type ClusterComponentTemplate struct { ComponentTemplate ComponentTemplateNode `json:"component_template"` Name string `json:"name"` }
ClusterComponentTemplate type.
func NewClusterComponentTemplate ¶
func NewClusterComponentTemplate() *ClusterComponentTemplate
NewClusterComponentTemplate returns a ClusterComponentTemplate.
type ClusterFileSystem ¶
type ClusterFileSystem struct { AvailableInBytes int64 `json:"available_in_bytes"` FreeInBytes int64 `json:"free_in_bytes"` TotalInBytes int64 `json:"total_in_bytes"` }
ClusterFileSystem type.
func NewClusterFileSystem ¶
func NewClusterFileSystem() *ClusterFileSystem
NewClusterFileSystem returns a ClusterFileSystem.
type ClusterIndexingPressure ¶
type ClusterIndexingPressure struct {
Memory ClusterPressureMemory `json:"memory"`
}
ClusterIndexingPressure type.
func NewClusterIndexingPressure ¶
func NewClusterIndexingPressure() *ClusterIndexingPressure
NewClusterIndexingPressure returns a ClusterIndexingPressure.
type ClusterIndices ¶
type ClusterIndices struct { // Analysis Contains statistics about analyzers and analyzer components used in selected // nodes. Analysis CharFilterTypes `json:"analysis"` // Completion Contains statistics about memory used for completion in selected nodes. Completion CompletionStats `json:"completion"` // Count Total number of indices with shards assigned to selected nodes. Count int64 `json:"count"` // Docs Contains counts for documents in selected nodes. Docs DocStats `json:"docs"` // Fielddata Contains statistics about the field data cache of selected nodes. Fielddata FielddataStats `json:"fielddata"` // Mappings Contains statistics about field mappings in selected nodes. Mappings FieldTypesMappings `json:"mappings"` // QueryCache Contains statistics about the query cache of selected nodes. QueryCache QueryCacheStats `json:"query_cache"` // Segments Contains statistics about segments in selected nodes. Segments SegmentsStats `json:"segments"` // Shards Contains statistics about indices with shards assigned to selected nodes. Shards ClusterIndicesShards `json:"shards"` // Store Contains statistics about the size of shards assigned to selected nodes. Store StoreStats `json:"store"` Versions []IndicesVersions `json:"versions,omitempty"` }
ClusterIndices type.
func NewClusterIndices ¶
func NewClusterIndices() *ClusterIndices
NewClusterIndices returns a ClusterIndices.
type ClusterIndicesShards ¶
type ClusterIndicesShards struct { // Index Contains statistics about shards assigned to selected nodes. Index *ClusterIndicesShardsIndex `json:"index,omitempty"` // Primaries Number of primary shards assigned to selected nodes. Primaries *Float64 `json:"primaries,omitempty"` // Replication Ratio of replica shards to primary shards across all selected nodes. Replication *Float64 `json:"replication,omitempty"` // Total Total number of shards assigned to selected nodes. Total *Float64 `json:"total,omitempty"` }
ClusterIndicesShards type.
func NewClusterIndicesShards ¶
func NewClusterIndicesShards() *ClusterIndicesShards
NewClusterIndicesShards returns a ClusterIndicesShards.
type ClusterIndicesShardsIndex ¶
type ClusterIndicesShardsIndex struct { // Primaries Contains statistics about the number of primary shards assigned to selected // nodes. Primaries ClusterShardMetrics `json:"primaries"` // Replication Contains statistics about the number of replication shards assigned to // selected nodes. Replication ClusterShardMetrics `json:"replication"` // Shards Contains statistics about the number of shards assigned to selected nodes. Shards ClusterShardMetrics `json:"shards"` }
ClusterIndicesShardsIndex type.
func NewClusterIndicesShardsIndex ¶
func NewClusterIndicesShardsIndex() *ClusterIndicesShardsIndex
NewClusterIndicesShardsIndex returns a ClusterIndicesShardsIndex.
type ClusterInfo ¶
type ClusterInfo struct { Nodes map[string]NodeDiskUsage `json:"nodes"` ReservedSizes []ReservedSize `json:"reserved_sizes"` ShardDataSetSizes map[string]string `json:"shard_data_set_sizes,omitempty"` ShardPaths map[string]string `json:"shard_paths"` ShardSizes map[string]int64 `json:"shard_sizes"` }
ClusterInfo type.
type ClusterIngest ¶
type ClusterIngest struct { NumberOfPipelines int `json:"number_of_pipelines"` ProcessorStats map[string]ClusterProcessor `json:"processor_stats"` }
ClusterIngest type.
func NewClusterIngest ¶
func NewClusterIngest() *ClusterIngest
NewClusterIngest returns a ClusterIngest.
type ClusterJvm ¶
type ClusterJvm struct { MaxUptimeInMillis int64 `json:"max_uptime_in_millis"` Mem ClusterJvmMemory `json:"mem"` Threads int64 `json:"threads"` Versions []ClusterJvmVersion `json:"versions"` }
ClusterJvm type.
type ClusterJvmMemory ¶
type ClusterJvmMemory struct { HeapMaxInBytes int64 `json:"heap_max_in_bytes"` HeapUsedInBytes int64 `json:"heap_used_in_bytes"` }
ClusterJvmMemory type.
func NewClusterJvmMemory ¶
func NewClusterJvmMemory() *ClusterJvmMemory
NewClusterJvmMemory returns a ClusterJvmMemory.
type ClusterJvmVersion ¶
type ClusterJvmVersion struct { BundledJdk bool `json:"bundled_jdk"` Count int `json:"count"` UsingBundledJdk bool `json:"using_bundled_jdk"` Version string `json:"version"` VmName string `json:"vm_name"` VmVendor string `json:"vm_vendor"` VmVersion string `json:"vm_version"` }
ClusterJvmVersion type.
func NewClusterJvmVersion ¶
func NewClusterJvmVersion() *ClusterJvmVersion
NewClusterJvmVersion returns a ClusterJvmVersion.
type ClusterNetworkTypes ¶
type ClusterNetworkTypes struct { HttpTypes map[string]int `json:"http_types"` TransportTypes map[string]int `json:"transport_types"` }
ClusterNetworkTypes type.
func NewClusterNetworkTypes ¶
func NewClusterNetworkTypes() *ClusterNetworkTypes
NewClusterNetworkTypes returns a ClusterNetworkTypes.
type ClusterNodeCount ¶
type ClusterNodeCount struct { CoordinatingOnly int `json:"coordinating_only"` Data int `json:"data"` DataCold int `json:"data_cold"` DataContent int `json:"data_content"` DataFrozen *int `json:"data_frozen,omitempty"` DataHot int `json:"data_hot"` DataWarm int `json:"data_warm"` Ingest int `json:"ingest"` Master int `json:"master"` Ml int `json:"ml"` RemoteClusterClient int `json:"remote_cluster_client"` Total int `json:"total"` Transform int `json:"transform"` VotingOnly int `json:"voting_only"` }
ClusterNodeCount type.
func NewClusterNodeCount ¶
func NewClusterNodeCount() *ClusterNodeCount
NewClusterNodeCount returns a ClusterNodeCount.
type ClusterNodes ¶
type ClusterNodes struct { // Count Contains counts for nodes selected by the request’s node filters. Count ClusterNodeCount `json:"count"` // DiscoveryTypes Contains statistics about the discovery types used by selected nodes. DiscoveryTypes map[string]int `json:"discovery_types"` // Fs Contains statistics about file stores by selected nodes. Fs ClusterFileSystem `json:"fs"` IndexingPressure ClusterIndexingPressure `json:"indexing_pressure"` Ingest ClusterIngest `json:"ingest"` // Jvm Contains statistics about the Java Virtual Machines (JVMs) used by selected // nodes. Jvm ClusterJvm `json:"jvm"` // NetworkTypes Contains statistics about the transport and HTTP networks used by selected // nodes. NetworkTypes ClusterNetworkTypes `json:"network_types"` // Os Contains statistics about the operating systems used by selected nodes. Os ClusterOperatingSystem `json:"os"` // PackagingTypes Contains statistics about Elasticsearch distributions installed on selected // nodes. PackagingTypes []NodePackagingType `json:"packaging_types"` // Plugins Contains statistics about installed plugins and modules by selected nodes. Plugins []PluginStats `json:"plugins"` // Process Contains statistics about processes used by selected nodes. Process ClusterProcess `json:"process"` // Versions Array of Elasticsearch versions used on selected nodes. Versions []string `json:"versions"` }
ClusterNodes type.
func NewClusterNodes ¶
func NewClusterNodes() *ClusterNodes
NewClusterNodes returns a ClusterNodes.
type ClusterOperatingSystem ¶
type ClusterOperatingSystem struct { AllocatedProcessors int `json:"allocated_processors"` Architectures []ClusterOperatingSystemArchitecture `json:"architectures,omitempty"` AvailableProcessors int `json:"available_processors"` Mem OperatingSystemMemoryInfo `json:"mem"` Names []ClusterOperatingSystemName `json:"names"` PrettyNames []ClusterOperatingSystemPrettyName `json:"pretty_names"` }
ClusterOperatingSystem type.
func NewClusterOperatingSystem ¶
func NewClusterOperatingSystem() *ClusterOperatingSystem
NewClusterOperatingSystem returns a ClusterOperatingSystem.
type ClusterOperatingSystemArchitecture ¶
type ClusterOperatingSystemArchitecture struct { Arch string `json:"arch"` Count int `json:"count"` }
ClusterOperatingSystemArchitecture type.
func NewClusterOperatingSystemArchitecture ¶
func NewClusterOperatingSystemArchitecture() *ClusterOperatingSystemArchitecture
NewClusterOperatingSystemArchitecture returns a ClusterOperatingSystemArchitecture.
type ClusterOperatingSystemName ¶
ClusterOperatingSystemName type.
func NewClusterOperatingSystemName ¶
func NewClusterOperatingSystemName() *ClusterOperatingSystemName
NewClusterOperatingSystemName returns a ClusterOperatingSystemName.
type ClusterOperatingSystemPrettyName ¶
type ClusterOperatingSystemPrettyName struct { Count int `json:"count"` PrettyName string `json:"pretty_name"` }
ClusterOperatingSystemPrettyName type.
func NewClusterOperatingSystemPrettyName ¶
func NewClusterOperatingSystemPrettyName() *ClusterOperatingSystemPrettyName
NewClusterOperatingSystemPrettyName returns a ClusterOperatingSystemPrettyName.
type ClusterPressureMemory ¶
type ClusterPressureMemory struct { Current IndexingPressureMemorySummary `json:"current"` LimitInBytes int64 `json:"limit_in_bytes"` Total IndexingPressureMemorySummary `json:"total"` }
ClusterPressureMemory type.
func NewClusterPressureMemory ¶
func NewClusterPressureMemory() *ClusterPressureMemory
NewClusterPressureMemory returns a ClusterPressureMemory.
type ClusterProcess ¶
type ClusterProcess struct { Cpu ClusterProcessCpu `json:"cpu"` OpenFileDescriptors ClusterProcessOpenFileDescriptors `json:"open_file_descriptors"` }
ClusterProcess type.
func NewClusterProcess ¶
func NewClusterProcess() *ClusterProcess
NewClusterProcess returns a ClusterProcess.
type ClusterProcessCpu ¶
type ClusterProcessCpu struct {
Percent int `json:"percent"`
}
ClusterProcessCpu type.
func NewClusterProcessCpu ¶
func NewClusterProcessCpu() *ClusterProcessCpu
NewClusterProcessCpu returns a ClusterProcessCpu.
type ClusterProcessOpenFileDescriptors ¶
type ClusterProcessOpenFileDescriptors struct { Avg int64 `json:"avg"` Max int64 `json:"max"` Min int64 `json:"min"` }
ClusterProcessOpenFileDescriptors type.
func NewClusterProcessOpenFileDescriptors ¶
func NewClusterProcessOpenFileDescriptors() *ClusterProcessOpenFileDescriptors
NewClusterProcessOpenFileDescriptors returns a ClusterProcessOpenFileDescriptors.
type ClusterProcessor ¶
type ClusterProcessor struct { Count int64 `json:"count"` Current int64 `json:"current"` Failed int64 `json:"failed"` Time Duration `json:"time,omitempty"` TimeInMillis int64 `json:"time_in_millis"` }
ClusterProcessor type.
func NewClusterProcessor ¶
func NewClusterProcessor() *ClusterProcessor
NewClusterProcessor returns a ClusterProcessor.
type ClusterRemoteInfo ¶
type ClusterRemoteInfo interface{}
ClusterRemoteInfo holds the union for the following types:
ClusterRemoteSniffInfo ClusterRemoteProxyInfo
type ClusterRemoteProxyInfo ¶
type ClusterRemoteProxyInfo struct { Connected bool `json:"connected"` InitialConnectTimeout Duration `json:"initial_connect_timeout"` MaxProxySocketConnections int `json:"max_proxy_socket_connections"` Mode string `json:"mode,omitempty"` NumProxySocketsConnected int `json:"num_proxy_sockets_connected"` ProxyAddress string `json:"proxy_address"` ServerName string `json:"server_name"` }
ClusterRemoteProxyInfo type.
func NewClusterRemoteProxyInfo ¶
func NewClusterRemoteProxyInfo() *ClusterRemoteProxyInfo
NewClusterRemoteProxyInfo returns a ClusterRemoteProxyInfo.
type ClusterRemoteSniffInfo ¶
type ClusterRemoteSniffInfo struct { Connected bool `json:"connected"` InitialConnectTimeout Duration `json:"initial_connect_timeout"` MaxConnectionsPerCluster int `json:"max_connections_per_cluster"` Mode string `json:"mode,omitempty"` NumNodesConnected int64 `json:"num_nodes_connected"` Seeds []string `json:"seeds"` }
ClusterRemoteSniffInfo type.
func NewClusterRemoteSniffInfo ¶
func NewClusterRemoteSniffInfo() *ClusterRemoteSniffInfo
NewClusterRemoteSniffInfo returns a ClusterRemoteSniffInfo.
type ClusterRuntimeFieldTypes ¶
type ClusterRuntimeFieldTypes struct { CharsMax int `json:"chars_max"` CharsTotal int `json:"chars_total"` Count int `json:"count"` DocMax int `json:"doc_max"` DocTotal int `json:"doc_total"` IndexCount int `json:"index_count"` Lang []string `json:"lang"` LinesMax int `json:"lines_max"` LinesTotal int `json:"lines_total"` Name string `json:"name"` ScriptlessCount int `json:"scriptless_count"` ShadowedCount int `json:"shadowed_count"` SourceMax int `json:"source_max"` SourceTotal int `json:"source_total"` }
ClusterRuntimeFieldTypes type.
func NewClusterRuntimeFieldTypes ¶
func NewClusterRuntimeFieldTypes() *ClusterRuntimeFieldTypes
NewClusterRuntimeFieldTypes returns a ClusterRuntimeFieldTypes.
type ClusterShardMetrics ¶
type ClusterShardMetrics struct { Avg Float64 `json:"avg"` Max Float64 `json:"max"` Min Float64 `json:"min"` }
ClusterShardMetrics type.
func NewClusterShardMetrics ¶
func NewClusterShardMetrics() *ClusterShardMetrics
NewClusterShardMetrics returns a ClusterShardMetrics.
type ClusterStateQueue ¶
type ClusterStateQueue struct { Committed *int64 `json:"committed,omitempty"` Pending *int64 `json:"pending,omitempty"` Total *int64 `json:"total,omitempty"` }
ClusterStateQueue type.
func NewClusterStateQueue ¶
func NewClusterStateQueue() *ClusterStateQueue
NewClusterStateQueue returns a ClusterStateQueue.
type ClusterStateUpdate ¶
type ClusterStateUpdate struct { CommitTime Duration `json:"commit_time,omitempty"` CommitTimeMillis *int64 `json:"commit_time_millis,omitempty"` CompletionTime Duration `json:"completion_time,omitempty"` CompletionTimeMillis *int64 `json:"completion_time_millis,omitempty"` ComputationTime Duration `json:"computation_time,omitempty"` ComputationTimeMillis *int64 `json:"computation_time_millis,omitempty"` ContextConstructionTime Duration `json:"context_construction_time,omitempty"` ContextConstructionTimeMillis *int64 `json:"context_construction_time_millis,omitempty"` Count int64 `json:"count"` MasterApplyTime Duration `json:"master_apply_time,omitempty"` MasterApplyTimeMillis *int64 `json:"master_apply_time_millis,omitempty"` NotificationTime Duration `json:"notification_time,omitempty"` NotificationTimeMillis *int64 `json:"notification_time_millis,omitempty"` PublicationTime Duration `json:"publication_time,omitempty"` PublicationTimeMillis *int64 `json:"publication_time_millis,omitempty"` }
ClusterStateUpdate type.
func NewClusterStateUpdate ¶
func NewClusterStateUpdate() *ClusterStateUpdate
NewClusterStateUpdate returns a ClusterStateUpdate.
type ClusterStatistics ¶
type ClusterStatistics struct { Skipped int `json:"skipped"` Successful int `json:"successful"` Total int `json:"total"` }
ClusterStatistics type.
func NewClusterStatistics ¶
func NewClusterStatistics() *ClusterStatistics
NewClusterStatistics returns a ClusterStatistics.
type Collector ¶
type Collector struct { Children []Collector `json:"children,omitempty"` Name string `json:"name"` Reason string `json:"reason"` TimeInNanos int64 `json:"time_in_nanos"` }
Collector type.
type CombinedFieldsQuery ¶
type CombinedFieldsQuery struct { AutoGenerateSynonymsPhraseQuery *bool `json:"auto_generate_synonyms_phrase_query,omitempty"` Boost *float32 `json:"boost,omitempty"` Fields []string `json:"fields"` MinimumShouldMatch MinimumShouldMatch `json:"minimum_should_match,omitempty"` Operator *combinedfieldsoperator.CombinedFieldsOperator `json:"operator,omitempty"` Query string `json:"query"` QueryName_ *string `json:"_name,omitempty"` ZeroTermsQuery *combinedfieldszeroterms.CombinedFieldsZeroTerms `json:"zero_terms_query,omitempty"` }
CombinedFieldsQuery type.
func NewCombinedFieldsQuery ¶
func NewCombinedFieldsQuery() *CombinedFieldsQuery
NewCombinedFieldsQuery returns a CombinedFieldsQuery.
type Command ¶
type Command struct { // AllocateEmptyPrimary Allocate an empty primary shard to a node. Accepts the index and shard for // index name and shard number, and node to allocate the shard to. Using this // command leads to a complete loss of all data that was indexed into this // shard, if it was previously started. If a node which has a copy of the data // rejoins the cluster later on, that data will be deleted. To ensure that these // implications are well-understood, this command requires the flag // accept_data_loss to be explicitly set to true. AllocateEmptyPrimary *CommandAllocatePrimaryAction `json:"allocate_empty_primary,omitempty"` // AllocateReplica Allocate an unassigned replica shard to a node. Accepts index and shard for // index name and shard number, and node to allocate the shard to. Takes // allocation deciders into account. AllocateReplica *CommandAllocateReplicaAction `json:"allocate_replica,omitempty"` // AllocateStalePrimary Allocate a primary shard to a node that holds a stale copy. Accepts the index // and shard for index name and shard number, and node to allocate the shard to. // Using this command may lead to data loss for the provided shard id. If a node // which has the good copy of the data rejoins the cluster later on, that data // will be deleted or overwritten with the data of the stale copy that was // forcefully allocated with this command. To ensure that these implications are // well-understood, this command requires the flag accept_data_loss to be // explicitly set to true. AllocateStalePrimary *CommandAllocatePrimaryAction `json:"allocate_stale_primary,omitempty"` // Cancel Cancel allocation of a shard (or recovery). Accepts index and shard for index // name and shard number, and node for the node to cancel the shard allocation // on. This can be used to force resynchronization of existing replicas from the // primary shard by cancelling them and allowing them to be reinitialized // through the standard recovery process. By default only replica shard // allocations can be cancelled. If it is necessary to cancel the allocation of // a primary shard then the allow_primary flag must also be included in the // request. Cancel *CommandCancelAction `json:"cancel,omitempty"` // Move Move a started shard from one node to another node. Accepts index and shard // for index name and shard number, from_node for the node to move the shard // from, and to_node for the node to move the shard to. Move *CommandMoveAction `json:"move,omitempty"` }
Command type.
type CommandAllocatePrimaryAction ¶
type CommandAllocatePrimaryAction struct { // AcceptDataLoss If a node which has a copy of the data rejoins the cluster later on, that // data will be deleted. To ensure that these implications are well-understood, // this command requires the flag accept_data_loss to be explicitly set to true AcceptDataLoss bool `json:"accept_data_loss"` Index string `json:"index"` Node string `json:"node"` Shard int `json:"shard"` }
CommandAllocatePrimaryAction type.
func NewCommandAllocatePrimaryAction ¶
func NewCommandAllocatePrimaryAction() *CommandAllocatePrimaryAction
NewCommandAllocatePrimaryAction returns a CommandAllocatePrimaryAction.
type CommandAllocateReplicaAction ¶
type CommandAllocateReplicaAction struct { Index string `json:"index"` Node string `json:"node"` Shard int `json:"shard"` }
CommandAllocateReplicaAction type.
func NewCommandAllocateReplicaAction ¶
func NewCommandAllocateReplicaAction() *CommandAllocateReplicaAction
NewCommandAllocateReplicaAction returns a CommandAllocateReplicaAction.
type CommandCancelAction ¶
type CommandCancelAction struct { AllowPrimary *bool `json:"allow_primary,omitempty"` Index string `json:"index"` Node string `json:"node"` Shard int `json:"shard"` }
CommandCancelAction type.
func NewCommandCancelAction ¶
func NewCommandCancelAction() *CommandCancelAction
NewCommandCancelAction returns a CommandCancelAction.
type CommandMoveAction ¶
type CommandMoveAction struct { // FromNode The node to move the shard from FromNode string `json:"from_node"` Index string `json:"index"` Shard int `json:"shard"` // ToNode The node to move the shard to ToNode string `json:"to_node"` }
CommandMoveAction type.
func NewCommandMoveAction ¶
func NewCommandMoveAction() *CommandMoveAction
NewCommandMoveAction returns a CommandMoveAction.
type CommonGramsTokenFilter ¶
type CommonGramsTokenFilter struct { CommonWords []string `json:"common_words,omitempty"` CommonWordsPath *string `json:"common_words_path,omitempty"` IgnoreCase *bool `json:"ignore_case,omitempty"` QueryMode *bool `json:"query_mode,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
CommonGramsTokenFilter type.
func NewCommonGramsTokenFilter ¶
func NewCommonGramsTokenFilter() *CommonGramsTokenFilter
NewCommonGramsTokenFilter returns a CommonGramsTokenFilter.
type CommonTermsQuery ¶
type CommonTermsQuery struct { Analyzer *string `json:"analyzer,omitempty"` Boost *float32 `json:"boost,omitempty"` CutoffFrequency *Float64 `json:"cutoff_frequency,omitempty"` HighFreqOperator *operator.Operator `json:"high_freq_operator,omitempty"` LowFreqOperator *operator.Operator `json:"low_freq_operator,omitempty"` MinimumShouldMatch MinimumShouldMatch `json:"minimum_should_match,omitempty"` Query string `json:"query"` QueryName_ *string `json:"_name,omitempty"` }
CommonTermsQuery type.
func NewCommonTermsQuery ¶
func NewCommonTermsQuery() *CommonTermsQuery
NewCommonTermsQuery returns a CommonTermsQuery.
type CompactNodeInfo ¶
type CompactNodeInfo struct {
Name string `json:"name"`
}
CompactNodeInfo type.
func NewCompactNodeInfo ¶
func NewCompactNodeInfo() *CompactNodeInfo
NewCompactNodeInfo returns a CompactNodeInfo.
type CompletionContext ¶
type CompletionContext struct { Boost *Float64 `json:"boost,omitempty"` Context Context `json:"context"` Neighbours []GeoHashPrecision `json:"neighbours,omitempty"` Precision GeoHashPrecision `json:"precision,omitempty"` Prefix *bool `json:"prefix,omitempty"` }
CompletionContext type.
func NewCompletionContext ¶
func NewCompletionContext() *CompletionContext
NewCompletionContext returns a CompletionContext.
func (*CompletionContext) UnmarshalJSON ¶
func (s *CompletionContext) UnmarshalJSON(data []byte) error
type CompletionProperty ¶
type CompletionProperty struct { Analyzer *string `json:"analyzer,omitempty"` Contexts []SuggestContext `json:"contexts,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` MaxInputLength *int `json:"max_input_length,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` PreservePositionIncrements *bool `json:"preserve_position_increments,omitempty"` PreserveSeparators *bool `json:"preserve_separators,omitempty"` Properties map[string]Property `json:"properties,omitempty"` SearchAnalyzer *string `json:"search_analyzer,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
CompletionProperty type.
func NewCompletionProperty ¶
func NewCompletionProperty() *CompletionProperty
NewCompletionProperty returns a CompletionProperty.
func (*CompletionProperty) UnmarshalJSON ¶
func (s *CompletionProperty) UnmarshalJSON(data []byte) error
type CompletionStats ¶
type CompletionStats struct { Fields map[string]FieldSizeUsage `json:"fields,omitempty"` Size ByteSize `json:"size,omitempty"` SizeInBytes int64 `json:"size_in_bytes"` }
CompletionStats type.
func NewCompletionStats ¶
func NewCompletionStats() *CompletionStats
NewCompletionStats returns a CompletionStats.
type CompletionSuggest ¶
type CompletionSuggest struct { Length int `json:"length"` Offset int `json:"offset"` Options []CompletionSuggestOption `json:"options"` Text string `json:"text"` }
CompletionSuggest type.
func NewCompletionSuggest ¶
func NewCompletionSuggest() *CompletionSuggest
NewCompletionSuggest returns a CompletionSuggest.
type CompletionSuggestOption ¶
type CompletionSuggestOption struct { CollateMatch *bool `json:"collate_match,omitempty"` Contexts map[string][]Context `json:"contexts,omitempty"` Fields map[string]json.RawMessage `json:"fields,omitempty"` Id_ *string `json:"_id,omitempty"` Index_ *string `json:"_index,omitempty"` Routing_ *string `json:"_routing,omitempty"` Score *Float64 `json:"score,omitempty"` Score_ *Float64 `json:"_score,omitempty"` Source_ json.RawMessage `json:"_source,omitempty"` Text string `json:"text"` }
CompletionSuggestOption type.
func NewCompletionSuggestOption ¶
func NewCompletionSuggestOption() *CompletionSuggestOption
NewCompletionSuggestOption returns a CompletionSuggestOption.
type CompletionSuggester ¶
type CompletionSuggester struct { Analyzer *string `json:"analyzer,omitempty"` Contexts map[string][]CompletionContext `json:"contexts,omitempty"` Field string `json:"field"` Fuzzy *SuggestFuzziness `json:"fuzzy,omitempty"` Prefix *string `json:"prefix,omitempty"` Regex *string `json:"regex,omitempty"` Size *int `json:"size,omitempty"` SkipDuplicates *bool `json:"skip_duplicates,omitempty"` }
CompletionSuggester type.
func NewCompletionSuggester ¶
func NewCompletionSuggester() *CompletionSuggester
NewCompletionSuggester returns a CompletionSuggester.
type ComponentTemplateNode ¶
type ComponentTemplateNode struct { Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` Template ComponentTemplateSummary `json:"template"` Version *int64 `json:"version,omitempty"` }
ComponentTemplateNode type.
func NewComponentTemplateNode ¶
func NewComponentTemplateNode() *ComponentTemplateNode
NewComponentTemplateNode returns a ComponentTemplateNode.
type ComponentTemplateSummary ¶
type ComponentTemplateSummary struct { Aliases map[string]AliasDefinition `json:"aliases,omitempty"` Mappings *TypeMapping `json:"mappings,omitempty"` Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` Settings map[string]IndexSettings `json:"settings,omitempty"` Version *int64 `json:"version,omitempty"` }
ComponentTemplateSummary type.
func NewComponentTemplateSummary ¶
func NewComponentTemplateSummary() *ComponentTemplateSummary
NewComponentTemplateSummary returns a ComponentTemplateSummary.
type CompositeAggregate ¶
type CompositeAggregate struct { AfterKey map[string]FieldValue `json:"after_key,omitempty"` Buckets BucketsCompositeBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
CompositeAggregate type.
func NewCompositeAggregate ¶
func NewCompositeAggregate() *CompositeAggregate
NewCompositeAggregate returns a CompositeAggregate.
func (*CompositeAggregate) UnmarshalJSON ¶
func (s *CompositeAggregate) UnmarshalJSON(data []byte) error
type CompositeAggregateKey ¶
type CompositeAggregateKey map[string]FieldValue
CompositeAggregateKey type alias.
type CompositeAggregation ¶
type CompositeAggregation struct { After map[string]FieldValue `json:"after,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Size *int `json:"size,omitempty"` Sources []map[string]CompositeAggregationSource `json:"sources,omitempty"` }
CompositeAggregation type.
func NewCompositeAggregation ¶
func NewCompositeAggregation() *CompositeAggregation
NewCompositeAggregation returns a CompositeAggregation.
type CompositeAggregationSource ¶
type CompositeAggregationSource struct { DateHistogram *DateHistogramAggregation `json:"date_histogram,omitempty"` GeotileGrid *GeoTileGridAggregation `json:"geotile_grid,omitempty"` Histogram *HistogramAggregation `json:"histogram,omitempty"` Terms *TermsAggregation `json:"terms,omitempty"` }
CompositeAggregationSource type.
func NewCompositeAggregationSource ¶
func NewCompositeAggregationSource() *CompositeAggregationSource
NewCompositeAggregationSource returns a CompositeAggregationSource.
type CompositeBucket ¶
type CompositeBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Key map[string]FieldValue `json:"key"` }
CompositeBucket type.
func NewCompositeBucket ¶
func NewCompositeBucket() *CompositeBucket
NewCompositeBucket returns a CompositeBucket.
func (CompositeBucket) MarshalJSON ¶
func (s CompositeBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*CompositeBucket) UnmarshalJSON ¶
func (s *CompositeBucket) UnmarshalJSON(data []byte) error
type ConditionTokenFilter ¶
type ConditionTokenFilter struct { Filter []string `json:"filter"` Script Script `json:"script"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
ConditionTokenFilter type.
func NewConditionTokenFilter ¶
func NewConditionTokenFilter() *ConditionTokenFilter
NewConditionTokenFilter returns a ConditionTokenFilter.
type Configuration ¶
type Configuration struct { // FeatureStates A list of feature states to be included in this snapshot. A list of features // available for inclusion in the snapshot and their descriptions be can be // retrieved using the get features API. // Each feature state includes one or more system indices containing data // necessary for the function of that feature. Providing an empty array will // include no feature states in the snapshot, regardless of the value of // include_global_state. By default, all available feature states will be // included in the snapshot if include_global_state is true, or no feature // states if include_global_state is false. FeatureStates []string `json:"feature_states,omitempty"` // missing or closed. If true, the snapshot ignores missing or closed data // streams and indices. IgnoreUnavailable *bool `json:"ignore_unavailable,omitempty"` // IncludeGlobalState If true, the current global state is included in the snapshot. IncludeGlobalState *bool `json:"include_global_state,omitempty"` // Indices A comma-separated list of data streams and indices to include in the // snapshot. Multi-index syntax is supported. // By default, a snapshot includes all data streams and indices in the cluster. // If this argument is provided, the snapshot only includes the specified data // streams and clusters. Indices []string `json:"indices,omitempty"` // Metadata Attaches arbitrary metadata to the snapshot, such as a record of who took the // snapshot, why it was taken, or any other useful data. Metadata must be less // than 1024 bytes. Metadata map[string]json.RawMessage `json:"metadata,omitempty"` // Partial If false, the entire snapshot will fail if one or more indices included in // the snapshot do not have all primary shards available. Partial *bool `json:"partial,omitempty"` }
Configuration type.
func NewConfiguration ¶
func NewConfiguration() *Configuration
NewConfiguration returns a Configuration.
type Configurations ¶
type Configurations struct { Forcemerge *ForceMergeConfiguration `json:"forcemerge,omitempty"` Rollover *RolloverConditions `json:"rollover,omitempty"` Shrink *ShrinkConfiguration `json:"shrink,omitempty"` }
Configurations type.
func NewConfigurations ¶
func NewConfigurations() *Configurations
NewConfigurations returns a Configurations.
type ConfusionMatrixItem ¶
type ConfusionMatrixItem struct { ActualClass string `json:"actual_class"` ActualClassDocCount int `json:"actual_class_doc_count"` OtherPredictedClassDocCount int `json:"other_predicted_class_doc_count"` PredictedClasses []ConfusionMatrixPrediction `json:"predicted_classes"` }
ConfusionMatrixItem type.
func NewConfusionMatrixItem ¶
func NewConfusionMatrixItem() *ConfusionMatrixItem
NewConfusionMatrixItem returns a ConfusionMatrixItem.
type ConfusionMatrixPrediction ¶
type ConfusionMatrixPrediction struct { Count int `json:"count"` PredictedClass string `json:"predicted_class"` }
ConfusionMatrixPrediction type.
func NewConfusionMatrixPrediction ¶
func NewConfusionMatrixPrediction() *ConfusionMatrixPrediction
NewConfusionMatrixPrediction returns a ConfusionMatrixPrediction.
type ConfusionMatrixThreshold ¶
type ConfusionMatrixThreshold struct { // FalseNegative False Negative FalseNegative int `json:"fn"` // FalsePositive False Positive FalsePositive int `json:"fp"` // TrueNegative True Negative TrueNegative int `json:"tn"` // TruePositive True Positive TruePositive int `json:"tp"` }
ConfusionMatrixThreshold type.
func NewConfusionMatrixThreshold ¶
func NewConfusionMatrixThreshold() *ConfusionMatrixThreshold
NewConfusionMatrixThreshold returns a ConfusionMatrixThreshold.
type Connection ¶
type Connection struct { DocCount int64 `json:"doc_count"` Source int64 `json:"source"` Target int64 `json:"target"` Weight Float64 `json:"weight"` }
Connection type.
type ConstantKeywordProperty ¶
type ConstantKeywordProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Type string `json:"type,omitempty"` Value json.RawMessage `json:"value,omitempty"` }
ConstantKeywordProperty type.
func NewConstantKeywordProperty ¶
func NewConstantKeywordProperty() *ConstantKeywordProperty
NewConstantKeywordProperty returns a ConstantKeywordProperty.
func (*ConstantKeywordProperty) UnmarshalJSON ¶
func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error
type ConstantScoreQuery ¶
type ConstantScoreQuery struct { Boost *float32 `json:"boost,omitempty"` Filter *Query `json:"filter,omitempty"` QueryName_ *string `json:"_name,omitempty"` }
ConstantScoreQuery type.
func NewConstantScoreQuery ¶
func NewConstantScoreQuery() *ConstantScoreQuery
NewConstantScoreQuery returns a ConstantScoreQuery.
type Context ¶
type Context interface{}
Context holds the union for the following types:
string GeoLocation
type ContextMethod ¶
type ContextMethod struct { Name string `json:"name"` Params []ContextMethodParam `json:"params"` ReturnType string `json:"return_type"` }
ContextMethod type.
func NewContextMethod ¶
func NewContextMethod() *ContextMethod
NewContextMethod returns a ContextMethod.
type ContextMethodParam ¶
ContextMethodParam type.
func NewContextMethodParam ¶
func NewContextMethodParam() *ContextMethodParam
NewContextMethodParam returns a ContextMethodParam.
type ConvertProcessor ¶
type ConvertProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` Type converttype.ConvertType `json:"type"` }
ConvertProcessor type.
func NewConvertProcessor ¶
func NewConvertProcessor() *ConvertProcessor
NewConvertProcessor returns a ConvertProcessor.
type CoordinatorStats ¶
type CoordinatorStats struct { ExecutedSearchesTotal int64 `json:"executed_searches_total"` NodeId string `json:"node_id"` QueueSize int `json:"queue_size"` RemoteRequestsCurrent int `json:"remote_requests_current"` RemoteRequestsTotal int64 `json:"remote_requests_total"` }
CoordinatorStats type.
func NewCoordinatorStats ¶
func NewCoordinatorStats() *CoordinatorStats
NewCoordinatorStats returns a CoordinatorStats.
type CoordsGeoBounds ¶
type CoordsGeoBounds struct { Bottom Float64 `json:"bottom"` Left Float64 `json:"left"` Right Float64 `json:"right"` Top Float64 `json:"top"` }
CoordsGeoBounds type.
func NewCoordsGeoBounds ¶
func NewCoordsGeoBounds() *CoordsGeoBounds
NewCoordsGeoBounds returns a CoordsGeoBounds.
type CoreKnnQuery ¶
type CoreKnnQuery struct { // Field The name of the vector field to search against Field string `json:"field"` // K The final number of nearest neighbors to return as top hits K int64 `json:"k"` // NumCandidates The number of nearest neighbor candidates to consider per shard NumCandidates int64 `json:"num_candidates"` // QueryVector The query vector QueryVector []float32 `json:"query_vector"` }
CoreKnnQuery type.
func NewCoreKnnQuery ¶
func NewCoreKnnQuery() *CoreKnnQuery
NewCoreKnnQuery returns a CoreKnnQuery.
type CountRecord ¶
type CountRecord struct { // Count the document count Count *string `json:"count,omitempty"` // Epoch seconds since 1970-01-01 00:00:00 Epoch StringifiedEpochTimeUnitSeconds `json:"epoch,omitempty"` // Timestamp time in HH:MM:SS Timestamp *string `json:"timestamp,omitempty"` }
CountRecord type.
type Cpu ¶
type Cpu struct { LoadAverage map[string]Float64 `json:"load_average,omitempty"` Percent *int `json:"percent,omitempty"` Sys Duration `json:"sys,omitempty"` SysInMillis *int64 `json:"sys_in_millis,omitempty"` Total Duration `json:"total,omitempty"` TotalInMillis *int64 `json:"total_in_millis,omitempty"` User Duration `json:"user,omitempty"` UserInMillis *int64 `json:"user_in_millis,omitempty"` }
Cpu type.
type CpuAcct ¶
type CpuAcct struct { ControlGroup *string `json:"control_group,omitempty"` UsageNanos *int64 `json:"usage_nanos,omitempty"` }
CpuAcct type.
type CreatedStatus ¶
type CreatedStatus struct {
Created bool `json:"created"`
}
CreatedStatus type.
func NewCreatedStatus ¶
func NewCreatedStatus() *CreatedStatus
NewCreatedStatus returns a CreatedStatus.
type CsvProcessor ¶
type CsvProcessor struct { Description *string `json:"description,omitempty"` EmptyValue json.RawMessage `json:"empty_value,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Quote *string `json:"quote,omitempty"` Separator *string `json:"separator,omitempty"` Tag *string `json:"tag,omitempty"` TargetFields []string `json:"target_fields"` Trim *bool `json:"trim,omitempty"` }
CsvProcessor type.
func NewCsvProcessor ¶
func NewCsvProcessor() *CsvProcessor
NewCsvProcessor returns a CsvProcessor.
type CumulativeCardinalityAggregate ¶
type CumulativeCardinalityAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Value int64 `json:"value"` ValueAsString *string `json:"value_as_string,omitempty"` }
CumulativeCardinalityAggregate type.
func NewCumulativeCardinalityAggregate ¶
func NewCumulativeCardinalityAggregate() *CumulativeCardinalityAggregate
NewCumulativeCardinalityAggregate returns a CumulativeCardinalityAggregate.
type CumulativeCardinalityAggregation ¶
type CumulativeCardinalityAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` }
CumulativeCardinalityAggregation type.
func NewCumulativeCardinalityAggregation ¶
func NewCumulativeCardinalityAggregation() *CumulativeCardinalityAggregation
NewCumulativeCardinalityAggregation returns a CumulativeCardinalityAggregation.
func (*CumulativeCardinalityAggregation) UnmarshalJSON ¶
func (s *CumulativeCardinalityAggregation) UnmarshalJSON(data []byte) error
type CumulativeSumAggregation ¶
type CumulativeSumAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` }
CumulativeSumAggregation type.
func NewCumulativeSumAggregation ¶
func NewCumulativeSumAggregation() *CumulativeSumAggregation
NewCumulativeSumAggregation returns a CumulativeSumAggregation.
func (*CumulativeSumAggregation) UnmarshalJSON ¶
func (s *CumulativeSumAggregation) UnmarshalJSON(data []byte) error
type CurrentNode ¶
type CurrentNode struct { Attributes map[string]string `json:"attributes"` Id string `json:"id"` Name string `json:"name"` TransportAddress string `json:"transport_address"` WeightRanking int `json:"weight_ranking"` }
CurrentNode type.
type CustomAnalyzer ¶
type CustomAnalyzer struct { CharFilter []string `json:"char_filter,omitempty"` Filter []string `json:"filter,omitempty"` PositionIncrementGap *int `json:"position_increment_gap,omitempty"` PositionOffsetGap *int `json:"position_offset_gap,omitempty"` Tokenizer string `json:"tokenizer"` Type string `json:"type,omitempty"` }
CustomAnalyzer type.
func NewCustomAnalyzer ¶
func NewCustomAnalyzer() *CustomAnalyzer
NewCustomAnalyzer returns a CustomAnalyzer.
type CustomCategorizeTextAnalyzer ¶
type CustomCategorizeTextAnalyzer struct { CharFilter []string `json:"char_filter,omitempty"` Filter []string `json:"filter,omitempty"` Tokenizer *string `json:"tokenizer,omitempty"` }
CustomCategorizeTextAnalyzer type.
func NewCustomCategorizeTextAnalyzer ¶
func NewCustomCategorizeTextAnalyzer() *CustomCategorizeTextAnalyzer
NewCustomCategorizeTextAnalyzer returns a CustomCategorizeTextAnalyzer.
type CustomNormalizer ¶
type CustomNormalizer struct { CharFilter []string `json:"char_filter,omitempty"` Filter []string `json:"filter,omitempty"` Type string `json:"type,omitempty"` }
CustomNormalizer type.
func NewCustomNormalizer ¶
func NewCustomNormalizer() *CustomNormalizer
NewCustomNormalizer returns a CustomNormalizer.
type DailySchedule ¶
type DailySchedule struct {
At []ScheduleTimeOfDay `json:"at"`
}
DailySchedule type.
func NewDailySchedule ¶
func NewDailySchedule() *DailySchedule
NewDailySchedule returns a DailySchedule.
type DanglingIndex ¶
type DanglingIndex struct { CreationDateMillis int64 `json:"creation_date_millis"` IndexName string `json:"index_name"` IndexUuid string `json:"index_uuid"` NodeIds []string `json:"node_ids"` }
DanglingIndex type.
func NewDanglingIndex ¶
func NewDanglingIndex() *DanglingIndex
NewDanglingIndex returns a DanglingIndex.
type DataCounts ¶
type DataCounts struct { BucketCount int64 `json:"bucket_count"` EarliestRecordTimestamp *int64 `json:"earliest_record_timestamp,omitempty"` EmptyBucketCount int64 `json:"empty_bucket_count"` InputBytes int64 `json:"input_bytes"` InputFieldCount int64 `json:"input_field_count"` InputRecordCount int64 `json:"input_record_count"` InvalidDateCount int64 `json:"invalid_date_count"` JobId string `json:"job_id"` LastDataTime *int64 `json:"last_data_time,omitempty"` LatestBucketTimestamp *int64 `json:"latest_bucket_timestamp,omitempty"` LatestEmptyBucketTimestamp *int64 `json:"latest_empty_bucket_timestamp,omitempty"` LatestRecordTimestamp *int64 `json:"latest_record_timestamp,omitempty"` LatestSparseBucketTimestamp *int64 `json:"latest_sparse_bucket_timestamp,omitempty"` LogTime *int64 `json:"log_time,omitempty"` MissingFieldCount int64 `json:"missing_field_count"` OutOfOrderTimestampCount int64 `json:"out_of_order_timestamp_count"` ProcessedFieldCount int64 `json:"processed_field_count"` ProcessedRecordCount int64 `json:"processed_record_count"` SparseBucketCount int64 `json:"sparse_bucket_count"` }
DataCounts type.
type DataDescription ¶
type DataDescription struct { FieldDelimiter *string `json:"field_delimiter,omitempty"` // Format Only JSON format is supported at this time. Format *string `json:"format,omitempty"` // TimeField The name of the field that contains the timestamp. TimeField *string `json:"time_field,omitempty"` // TimeFormat The time format, which can be `epoch`, `epoch_ms`, or a custom pattern. The // value `epoch` refers to UNIX or Epoch time (the number of seconds since 1 Jan // 1970). The value `epoch_ms` indicates that time is measured in milliseconds // since the epoch. The `epoch` and `epoch_ms` time formats accept either // integer or real values. Custom patterns must conform to the Java // DateTimeFormatter class. When you use date-time formatting patterns, it is // recommended that you provide the full date, time and time zone. For example: // `yyyy-MM-dd'T'HH:mm:ssX`. If the pattern that you specify is not sufficient // to produce a complete timestamp, job creation fails. TimeFormat *string `json:"time_format,omitempty"` }
DataDescription type.
func NewDataDescription ¶
func NewDataDescription() *DataDescription
NewDataDescription returns a DataDescription.
type DataEmailAttachment ¶
type DataEmailAttachment struct {
Format *dataattachmentformat.DataAttachmentFormat `json:"format,omitempty"`
}
DataEmailAttachment type.
func NewDataEmailAttachment ¶
func NewDataEmailAttachment() *DataEmailAttachment
NewDataEmailAttachment returns a DataEmailAttachment.
type DataFrameAnalyticsRecord ¶
type DataFrameAnalyticsRecord struct { // AssignmentExplanation why the job is or is not assigned to a node AssignmentExplanation *string `json:"assignment_explanation,omitempty"` // CreateTime job creation time CreateTime *string `json:"create_time,omitempty"` // Description description Description *string `json:"description,omitempty"` // DestIndex destination index DestIndex *string `json:"dest_index,omitempty"` // FailureReason failure reason FailureReason *string `json:"failure_reason,omitempty"` // Id the id Id *string `json:"id,omitempty"` // ModelMemoryLimit model memory limit ModelMemoryLimit *string `json:"model_memory_limit,omitempty"` // NodeAddress network address of the assigned node NodeAddress *string `json:"node.address,omitempty"` // NodeEphemeralId ephemeral id of the assigned node NodeEphemeralId *string `json:"node.ephemeral_id,omitempty"` // NodeId id of the assigned node NodeId *string `json:"node.id,omitempty"` // NodeName name of the assigned node NodeName *string `json:"node.name,omitempty"` // Progress progress Progress *string `json:"progress,omitempty"` // SourceIndex source index SourceIndex *string `json:"source_index,omitempty"` // State job state State *string `json:"state,omitempty"` // Type analysis type Type *string `json:"type,omitempty"` // Version the version of Elasticsearch when the analytics was created Version *string `json:"version,omitempty"` }
DataFrameAnalyticsRecord type.
func NewDataFrameAnalyticsRecord ¶
func NewDataFrameAnalyticsRecord() *DataFrameAnalyticsRecord
NewDataFrameAnalyticsRecord returns a DataFrameAnalyticsRecord.
type DataPathStats ¶
type DataPathStats struct { Available *string `json:"available,omitempty"` AvailableInBytes *int64 `json:"available_in_bytes,omitempty"` DiskQueue *string `json:"disk_queue,omitempty"` DiskReadSize *string `json:"disk_read_size,omitempty"` DiskReadSizeInBytes *int64 `json:"disk_read_size_in_bytes,omitempty"` DiskReads *int64 `json:"disk_reads,omitempty"` DiskWriteSize *string `json:"disk_write_size,omitempty"` DiskWriteSizeInBytes *int64 `json:"disk_write_size_in_bytes,omitempty"` DiskWrites *int64 `json:"disk_writes,omitempty"` Free *string `json:"free,omitempty"` FreeInBytes *int64 `json:"free_in_bytes,omitempty"` Mount *string `json:"mount,omitempty"` Path *string `json:"path,omitempty"` Total *string `json:"total,omitempty"` TotalInBytes *int64 `json:"total_in_bytes,omitempty"` Type *string `json:"type,omitempty"` }
DataPathStats type.
func NewDataPathStats ¶
func NewDataPathStats() *DataPathStats
NewDataPathStats returns a DataPathStats.
type DataStream ¶
type DataStream struct { AllowCustomRouting *bool `json:"allow_custom_routing,omitempty"` Generation int `json:"generation"` Hidden bool `json:"hidden"` IlmPolicy *string `json:"ilm_policy,omitempty"` Indices []DataStreamIndex `json:"indices"` Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` Name string `json:"name"` Replicated *bool `json:"replicated,omitempty"` Status healthstatus.HealthStatus `json:"status"` System *bool `json:"system,omitempty"` Template string `json:"template"` TimestampField DataStreamTimestampField `json:"timestamp_field"` }
DataStream type.
type DataStreamIndex ¶
type DataStreamIndex struct { IndexName string `json:"index_name"` IndexUuid string `json:"index_uuid"` }
DataStreamIndex type.
func NewDataStreamIndex ¶
func NewDataStreamIndex() *DataStreamIndex
NewDataStreamIndex returns a DataStreamIndex.
type DataStreamTimestamp ¶
type DataStreamTimestamp struct {
Enabled bool `json:"enabled"`
}
DataStreamTimestamp type.
func NewDataStreamTimestamp ¶
func NewDataStreamTimestamp() *DataStreamTimestamp
NewDataStreamTimestamp returns a DataStreamTimestamp.
type DataStreamTimestampField ¶
type DataStreamTimestampField struct {
Name string `json:"name"`
}
DataStreamTimestampField type.
func NewDataStreamTimestampField ¶
func NewDataStreamTimestampField() *DataStreamTimestampField
NewDataStreamTimestampField returns a DataStreamTimestampField.
type DataStreamVisibility ¶
type DataStreamVisibility struct {
Hidden *bool `json:"hidden,omitempty"`
}
DataStreamVisibility type.
func NewDataStreamVisibility ¶
func NewDataStreamVisibility() *DataStreamVisibility
NewDataStreamVisibility returns a DataStreamVisibility.
type DataStreams ¶
type DataStreams struct { Available bool `json:"available"` DataStreams int64 `json:"data_streams"` Enabled bool `json:"enabled"` IndicesCount int64 `json:"indices_count"` }
DataStreams type.
type DataStreamsStatsItem ¶
type DataStreamsStatsItem struct { BackingIndices int `json:"backing_indices"` DataStream string `json:"data_stream"` MaximumTimestamp int64 `json:"maximum_timestamp"` StoreSize ByteSize `json:"store_size,omitempty"` StoreSizeBytes int `json:"store_size_bytes"` }
DataStreamsStatsItem type.
func NewDataStreamsStatsItem ¶
func NewDataStreamsStatsItem() *DataStreamsStatsItem
NewDataStreamsStatsItem returns a DataStreamsStatsItem.
type DataTierPhaseStatistics ¶
type DataTierPhaseStatistics struct { DocCount int64 `json:"doc_count"` IndexCount int64 `json:"index_count"` NodeCount int64 `json:"node_count"` PrimaryShardCount int64 `json:"primary_shard_count"` PrimaryShardSizeAvgBytes int64 `json:"primary_shard_size_avg_bytes"` PrimaryShardSizeMadBytes int64 `json:"primary_shard_size_mad_bytes"` PrimaryShardSizeMedianBytes int64 `json:"primary_shard_size_median_bytes"` PrimarySizeBytes int64 `json:"primary_size_bytes"` TotalShardCount int64 `json:"total_shard_count"` TotalSizeBytes int64 `json:"total_size_bytes"` }
DataTierPhaseStatistics type.
func NewDataTierPhaseStatistics ¶
func NewDataTierPhaseStatistics() *DataTierPhaseStatistics
NewDataTierPhaseStatistics returns a DataTierPhaseStatistics.
type DataTiers ¶
type DataTiers struct { Available bool `json:"available"` DataCold DataTierPhaseStatistics `json:"data_cold"` DataContent DataTierPhaseStatistics `json:"data_content"` DataFrozen *DataTierPhaseStatistics `json:"data_frozen,omitempty"` DataHot DataTierPhaseStatistics `json:"data_hot"` DataWarm DataTierPhaseStatistics `json:"data_warm"` Enabled bool `json:"enabled"` }
DataTiers type.
type DatafeedAuthorization ¶
type DatafeedAuthorization struct { // ApiKey If an API key was used for the most recent update to the datafeed, its name // and identifier are listed in the response. ApiKey *ApiKeyAuthorization `json:"api_key,omitempty"` // Roles If a user ID was used for the most recent update to the datafeed, its roles // at the time of the update are listed in the response. Roles []string `json:"roles,omitempty"` // ServiceAccount If a service account was used for the most recent update to the datafeed, the // account name is listed in the response. ServiceAccount *string `json:"service_account,omitempty"` }
DatafeedAuthorization type.
func NewDatafeedAuthorization ¶
func NewDatafeedAuthorization() *DatafeedAuthorization
NewDatafeedAuthorization returns a DatafeedAuthorization.
type DatafeedConfig ¶
type DatafeedConfig struct { // Aggregations If set, the datafeed performs aggregation searches. Support for aggregations // is limited and should be used only with low cardinality data. Aggregations map[string]Aggregations `json:"aggregations,omitempty"` // ChunkingConfig Datafeeds might be required to search over long time periods, for several // months or years. This search is split into time chunks in order to ensure the // load on Elasticsearch is managed. Chunking configuration controls how the // size of these time chunks are calculated and is an advanced configuration // option. ChunkingConfig *ChunkingConfig `json:"chunking_config,omitempty"` // DatafeedId A numerical character string that uniquely identifies the datafeed. This // identifier can contain lowercase alphanumeric characters (a-z and 0-9), // hyphens, and underscores. It must start and end with alphanumeric characters. // The default value is the job identifier. DatafeedId *string `json:"datafeed_id,omitempty"` // DelayedDataCheckConfig Specifies whether the datafeed checks for missing data and the size of the // window. The datafeed can optionally search over indices that have already // been read in an effort to determine whether any data has subsequently been // added to the index. If missing data is found, it is a good indication that // the `query_delay` option is set too low and the data is being indexed after // the datafeed has passed that moment in time. This check runs only on // real-time datafeeds. DelayedDataCheckConfig *DelayedDataCheckConfig `json:"delayed_data_check_config,omitempty"` // Frequency The interval at which scheduled queries are made while the datafeed runs in // real time. The default value is either the bucket span for short bucket // spans, or, for longer bucket spans, a sensible fraction of the bucket span. // For example: `150s`. When `frequency` is shorter than the bucket span, // interim results for the last (partial) bucket are written then eventually // overwritten by the full bucket results. If the datafeed uses aggregations, // this value must be divisible by the interval of the date histogram // aggregation. Frequency Duration `json:"frequency,omitempty"` // Indices An array of index names. Wildcards are supported. If any indices are in // remote clusters, the machine learning nodes must have the // `remote_cluster_client` role. Indices []string `json:"indices,omitempty"` // IndicesOptions Specifies index expansion options that are used during search. IndicesOptions *IndicesOptions `json:"indices_options,omitempty"` JobId *string `json:"job_id,omitempty"` // MaxEmptySearches If a real-time datafeed has never seen any data (including during any initial // training period) then it will automatically stop itself and close its // associated job after this many real-time searches that return no documents. // In other words, it will stop after `frequency` times `max_empty_searches` of // real-time operation. If not set then a datafeed with no end time that sees no // data will remain started until it is explicitly stopped. MaxEmptySearches *int `json:"max_empty_searches,omitempty"` // Query The Elasticsearch query domain-specific language (DSL). This value // corresponds to the query object in an Elasticsearch search POST body. All the // options that are supported by Elasticsearch can be used, as this object is // passed verbatim to Elasticsearch. Query *Query `json:"query,omitempty"` // QueryDelay The number of seconds behind real time that data is queried. For example, if // data from 10:04 a.m. might not be searchable in Elasticsearch until 10:06 // a.m., set this property to 120 seconds. The default value is randomly // selected between `60s` and `120s`. This randomness improves the query // performance when there are multiple jobs running on the same node. QueryDelay Duration `json:"query_delay,omitempty"` // RuntimeMappings Specifies runtime fields for the datafeed search. RuntimeMappings map[string]RuntimeField `json:"runtime_mappings,omitempty"` // ScriptFields Specifies scripts that evaluate custom expressions and returns script fields // to the datafeed. The detector configuration objects in a job can contain // functions that use these script fields. ScriptFields map[string]ScriptField `json:"script_fields,omitempty"` // ScrollSize The size parameter that is used in Elasticsearch searches when the datafeed // does not use aggregations. The maximum value is the value of // `index.max_result_window`, which is 10,000 by default. ScrollSize *int `json:"scroll_size,omitempty"` }
DatafeedConfig type.
func NewDatafeedConfig ¶
func NewDatafeedConfig() *DatafeedConfig
NewDatafeedConfig returns a DatafeedConfig.
type DatafeedRunningState ¶
type DatafeedRunningState struct { RealTimeConfigured bool `json:"real_time_configured"` RealTimeRunning bool `json:"real_time_running"` SearchInterval *RunningStateSearchInterval `json:"search_interval,omitempty"` }
DatafeedRunningState type.
func NewDatafeedRunningState ¶
func NewDatafeedRunningState() *DatafeedRunningState
NewDatafeedRunningState returns a DatafeedRunningState.
type DatafeedStats ¶
type DatafeedStats struct { AssignmentExplanation *string `json:"assignment_explanation,omitempty"` DatafeedId string `json:"datafeed_id"` Node *DiscoveryNode `json:"node,omitempty"` RunningState *DatafeedRunningState `json:"running_state,omitempty"` State datafeedstate.DatafeedState `json:"state"` TimingStats DatafeedTimingStats `json:"timing_stats"` }
DatafeedStats type.
func NewDatafeedStats ¶
func NewDatafeedStats() *DatafeedStats
NewDatafeedStats returns a DatafeedStats.
type DatafeedTimingStats ¶
type DatafeedTimingStats struct { AverageSearchTimePerBucketMs Float64 `json:"average_search_time_per_bucket_ms,omitempty"` BucketCount int64 `json:"bucket_count"` ExponentialAverageSearchTimePerHourMs Float64 `json:"exponential_average_search_time_per_hour_ms"` JobId string `json:"job_id"` SearchCount int64 `json:"search_count"` TotalSearchTimeMs Float64 `json:"total_search_time_ms"` }
DatafeedTimingStats type.
func NewDatafeedTimingStats ¶
func NewDatafeedTimingStats() *DatafeedTimingStats
NewDatafeedTimingStats returns a DatafeedTimingStats.
type DatafeedsRecord ¶
type DatafeedsRecord struct { // AssignmentExplanation why the datafeed is or is not assigned to a node AssignmentExplanation *string `json:"assignment_explanation,omitempty"` // BucketsCount bucket count BucketsCount *string `json:"buckets.count,omitempty"` // Id the datafeed_id Id *string `json:"id,omitempty"` // NodeAddress network address of the assigned node NodeAddress *string `json:"node.address,omitempty"` // NodeEphemeralId ephemeral id of the assigned node NodeEphemeralId *string `json:"node.ephemeral_id,omitempty"` // NodeId id of the assigned node NodeId *string `json:"node.id,omitempty"` // NodeName name of the assigned node NodeName *string `json:"node.name,omitempty"` // SearchBucketAvg the average search time per bucket (millisecond) SearchBucketAvg *string `json:"search.bucket_avg,omitempty"` // SearchCount number of searches ran by the datafeed SearchCount *string `json:"search.count,omitempty"` // SearchExpAvgHour the exponential average search time per hour (millisecond) SearchExpAvgHour *string `json:"search.exp_avg_hour,omitempty"` // SearchTime the total search time SearchTime *string `json:"search.time,omitempty"` // State the datafeed state State *datafeedstate.DatafeedState `json:"state,omitempty"` }
DatafeedsRecord type.
func NewDatafeedsRecord ¶
func NewDatafeedsRecord() *DatafeedsRecord
NewDatafeedsRecord returns a DatafeedsRecord.
type DataframeAnalysis ¶
type DataframeAnalysis struct { // Alpha Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss // decreases most quickly. This parameter affects loss calculations by acting as // a multiplier of the tree depth. Higher alpha values result in shallower trees // and faster training times. By default, this value is calculated during // hyperparameter optimization. It must be greater than or equal to zero. Alpha *Float64 `json:"alpha,omitempty"` // DependentVariable Defines which field of the document is to be predicted. It must match one of // the fields in the index being used to train. If this field is missing from a // document, then that document will not be used for training, but a prediction // with the trained model will be generated for it. It is also known as // continuous target variable. // For classification analysis, the data type of the field must be numeric // (`integer`, `short`, `long`, `byte`), categorical (`ip` or `keyword`), or // `boolean`. There must be no more than 30 different values in this field. // For regression analysis, the data type of the field must be numeric. DependentVariable string `json:"dependent_variable"` // DownsampleFactor Advanced configuration option. Controls the fraction of data that is used to // compute the derivatives of the loss function for tree training. A small value // results in the use of a small fraction of the data. If this value is set to // be less than 1, accuracy typically improves. However, too small a value may // result in poor convergence for the ensemble and so require more trees. By // default, this value is calculated during hyperparameter optimization. It must // be greater than zero and less than or equal to 1. DownsampleFactor *Float64 `json:"downsample_factor,omitempty"` // EarlyStoppingEnabled Advanced configuration option. Specifies whether the training process should // finish if it is not finding any better performing models. If disabled, the // training process can take significantly longer and the chance of finding a // better performing model is unremarkable. EarlyStoppingEnabled *bool `json:"early_stopping_enabled,omitempty"` // Eta Advanced configuration option. The shrinkage applied to the weights. Smaller // values result in larger forests which have a better generalization error. // However, larger forests cause slower training. By default, this value is // calculated during hyperparameter optimization. It must be a value between // 0.001 and 1. Eta *Float64 `json:"eta,omitempty"` // EtaGrowthRatePerTree Advanced configuration option. Specifies the rate at which `eta` increases // for each new tree that is added to the forest. For example, a rate of 1.05 // increases `eta` by 5% for each extra tree. By default, this value is // calculated during hyperparameter optimization. It must be between 0.5 and 2. EtaGrowthRatePerTree *Float64 `json:"eta_growth_rate_per_tree,omitempty"` // FeatureBagFraction Advanced configuration option. Defines the fraction of features that will be // used when selecting a random bag for each candidate split. By default, this // value is calculated during hyperparameter optimization. FeatureBagFraction *Float64 `json:"feature_bag_fraction,omitempty"` // FeatureProcessors Advanced configuration option. A collection of feature preprocessors that // modify one or more included fields. The analysis uses the resulting one or // more features instead of the original document field. However, these features // are ephemeral; they are not stored in the destination index. Multiple // `feature_processors` entries can refer to the same document fields. Automatic // categorical feature encoding still occurs for the fields that are unprocessed // by a custom processor or that have categorical values. Use this property only // if you want to override the automatic feature encoding of the specified // fields. FeatureProcessors []DataframeAnalysisFeatureProcessor `json:"feature_processors,omitempty"` // Gamma Advanced configuration option. Regularization parameter to prevent // overfitting on the training data set. Multiplies a linear penalty associated // with the size of individual trees in the forest. A high gamma value causes // training to prefer small trees. A small gamma value results in larger // individual trees and slower training. By default, this value is calculated // during hyperparameter optimization. It must be a nonnegative value. Gamma *Float64 `json:"gamma,omitempty"` // Lambda Advanced configuration option. Regularization parameter to prevent // overfitting on the training data set. Multiplies an L2 regularization term // which applies to leaf weights of the individual trees in the forest. A high // lambda value causes training to favor small leaf weights. This behavior makes // the prediction function smoother at the expense of potentially not being able // to capture relevant relationships between the features and the dependent // variable. A small lambda value results in large individual trees and slower // training. By default, this value is calculated during hyperparameter // optimization. It must be a nonnegative value. Lambda *Float64 `json:"lambda,omitempty"` // MaxOptimizationRoundsPerHyperparameter Advanced configuration option. A multiplier responsible for determining the // maximum number of hyperparameter optimization steps in the Bayesian // optimization procedure. The maximum number of steps is determined based on // the number of undefined hyperparameters times the maximum optimization rounds // per hyperparameter. By default, this value is calculated during // hyperparameter optimization. MaxOptimizationRoundsPerHyperparameter *int `json:"max_optimization_rounds_per_hyperparameter,omitempty"` // MaxTrees Advanced configuration option. Defines the maximum number of decision trees // in the forest. The maximum value is 2000. By default, this value is // calculated during hyperparameter optimization. MaxTrees *int `json:"max_trees,omitempty"` // NumTopFeatureImportanceValues Advanced configuration option. Specifies the maximum number of feature // importance values per document to return. By default, no feature importance // calculation occurs. NumTopFeatureImportanceValues *int `json:"num_top_feature_importance_values,omitempty"` // PredictionFieldName Defines the name of the prediction field in the results. Defaults to // `<dependent_variable>_prediction`. PredictionFieldName *string `json:"prediction_field_name,omitempty"` // RandomizeSeed Defines the seed for the random generator that is used to pick training data. // By default, it is randomly generated. Set it to a specific value to use the // same training data each time you start a job (assuming other related // parameters such as `source` and `analyzed_fields` are the same). RandomizeSeed *Float64 `json:"randomize_seed,omitempty"` // SoftTreeDepthLimit Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss // decreases most quickly. This soft limit combines with the // `soft_tree_depth_tolerance` to penalize trees that exceed the specified // depth; the regularized loss increases quickly beyond this depth. By default, // this value is calculated during hyperparameter optimization. It must be // greater than or equal to 0. SoftTreeDepthLimit *int `json:"soft_tree_depth_limit,omitempty"` // SoftTreeDepthTolerance Advanced configuration option. This option controls how quickly the // regularized loss increases when the tree depth exceeds // `soft_tree_depth_limit`. By default, this value is calculated during // hyperparameter optimization. It must be greater than or equal to 0.01. SoftTreeDepthTolerance *Float64 `json:"soft_tree_depth_tolerance,omitempty"` // TrainingPercent Defines what percentage of the eligible documents that will be used for // training. Documents that are ignored by the analysis (for example those that // contain arrays with more than one value) won’t be included in the calculation // for used percentage. TrainingPercent Percentage `json:"training_percent,omitempty"` }
DataframeAnalysis type.
func NewDataframeAnalysis ¶
func NewDataframeAnalysis() *DataframeAnalysis
NewDataframeAnalysis returns a DataframeAnalysis.
type DataframeAnalysisAnalyzedFields ¶
type DataframeAnalysisAnalyzedFields struct { // Excludes An array of strings that defines the fields that will be included in the // analysis. Excludes []string `json:"excludes"` // Includes An array of strings that defines the fields that will be excluded from the // analysis. You do not need to add fields with unsupported data types to // excludes, these fields are excluded from the analysis automatically. Includes []string `json:"includes"` }
DataframeAnalysisAnalyzedFields type.
func NewDataframeAnalysisAnalyzedFields ¶
func NewDataframeAnalysisAnalyzedFields() *DataframeAnalysisAnalyzedFields
NewDataframeAnalysisAnalyzedFields returns a DataframeAnalysisAnalyzedFields.
type DataframeAnalysisClassification ¶
type DataframeAnalysisClassification struct { // Alpha Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss // decreases most quickly. This parameter affects loss calculations by acting as // a multiplier of the tree depth. Higher alpha values result in shallower trees // and faster training times. By default, this value is calculated during // hyperparameter optimization. It must be greater than or equal to zero. Alpha *Float64 `json:"alpha,omitempty"` ClassAssignmentObjective *string `json:"class_assignment_objective,omitempty"` // DependentVariable Defines which field of the document is to be predicted. It must match one of // the fields in the index being used to train. If this field is missing from a // document, then that document will not be used for training, but a prediction // with the trained model will be generated for it. It is also known as // continuous target variable. // For classification analysis, the data type of the field must be numeric // (`integer`, `short`, `long`, `byte`), categorical (`ip` or `keyword`), or // `boolean`. There must be no more than 30 different values in this field. // For regression analysis, the data type of the field must be numeric. DependentVariable string `json:"dependent_variable"` // DownsampleFactor Advanced configuration option. Controls the fraction of data that is used to // compute the derivatives of the loss function for tree training. A small value // results in the use of a small fraction of the data. If this value is set to // be less than 1, accuracy typically improves. However, too small a value may // result in poor convergence for the ensemble and so require more trees. By // default, this value is calculated during hyperparameter optimization. It must // be greater than zero and less than or equal to 1. DownsampleFactor *Float64 `json:"downsample_factor,omitempty"` // EarlyStoppingEnabled Advanced configuration option. Specifies whether the training process should // finish if it is not finding any better performing models. If disabled, the // training process can take significantly longer and the chance of finding a // better performing model is unremarkable. EarlyStoppingEnabled *bool `json:"early_stopping_enabled,omitempty"` // Eta Advanced configuration option. The shrinkage applied to the weights. Smaller // values result in larger forests which have a better generalization error. // However, larger forests cause slower training. By default, this value is // calculated during hyperparameter optimization. It must be a value between // 0.001 and 1. Eta *Float64 `json:"eta,omitempty"` // EtaGrowthRatePerTree Advanced configuration option. Specifies the rate at which `eta` increases // for each new tree that is added to the forest. For example, a rate of 1.05 // increases `eta` by 5% for each extra tree. By default, this value is // calculated during hyperparameter optimization. It must be between 0.5 and 2. EtaGrowthRatePerTree *Float64 `json:"eta_growth_rate_per_tree,omitempty"` // FeatureBagFraction Advanced configuration option. Defines the fraction of features that will be // used when selecting a random bag for each candidate split. By default, this // value is calculated during hyperparameter optimization. FeatureBagFraction *Float64 `json:"feature_bag_fraction,omitempty"` // FeatureProcessors Advanced configuration option. A collection of feature preprocessors that // modify one or more included fields. The analysis uses the resulting one or // more features instead of the original document field. However, these features // are ephemeral; they are not stored in the destination index. Multiple // `feature_processors` entries can refer to the same document fields. Automatic // categorical feature encoding still occurs for the fields that are unprocessed // by a custom processor or that have categorical values. Use this property only // if you want to override the automatic feature encoding of the specified // fields. FeatureProcessors []DataframeAnalysisFeatureProcessor `json:"feature_processors,omitempty"` // Gamma Advanced configuration option. Regularization parameter to prevent // overfitting on the training data set. Multiplies a linear penalty associated // with the size of individual trees in the forest. A high gamma value causes // training to prefer small trees. A small gamma value results in larger // individual trees and slower training. By default, this value is calculated // during hyperparameter optimization. It must be a nonnegative value. Gamma *Float64 `json:"gamma,omitempty"` // Lambda Advanced configuration option. Regularization parameter to prevent // overfitting on the training data set. Multiplies an L2 regularization term // which applies to leaf weights of the individual trees in the forest. A high // lambda value causes training to favor small leaf weights. This behavior makes // the prediction function smoother at the expense of potentially not being able // to capture relevant relationships between the features and the dependent // variable. A small lambda value results in large individual trees and slower // training. By default, this value is calculated during hyperparameter // optimization. It must be a nonnegative value. Lambda *Float64 `json:"lambda,omitempty"` // MaxOptimizationRoundsPerHyperparameter Advanced configuration option. A multiplier responsible for determining the // maximum number of hyperparameter optimization steps in the Bayesian // optimization procedure. The maximum number of steps is determined based on // the number of undefined hyperparameters times the maximum optimization rounds // per hyperparameter. By default, this value is calculated during // hyperparameter optimization. MaxOptimizationRoundsPerHyperparameter *int `json:"max_optimization_rounds_per_hyperparameter,omitempty"` // MaxTrees Advanced configuration option. Defines the maximum number of decision trees // in the forest. The maximum value is 2000. By default, this value is // calculated during hyperparameter optimization. MaxTrees *int `json:"max_trees,omitempty"` // NumTopClasses Defines the number of categories for which the predicted probabilities are // reported. It must be non-negative or -1. If it is -1 or greater than the // total number of categories, probabilities are reported for all categories; if // you have a large number of categories, there could be a significant effect on // the size of your destination index. NOTE: To use the AUC ROC evaluation // method, `num_top_classes` must be set to -1 or a value greater than or equal // to the total number of categories. NumTopClasses *int `json:"num_top_classes,omitempty"` // NumTopFeatureImportanceValues Advanced configuration option. Specifies the maximum number of feature // importance values per document to return. By default, no feature importance // calculation occurs. NumTopFeatureImportanceValues *int `json:"num_top_feature_importance_values,omitempty"` // PredictionFieldName Defines the name of the prediction field in the results. Defaults to // `<dependent_variable>_prediction`. PredictionFieldName *string `json:"prediction_field_name,omitempty"` // RandomizeSeed Defines the seed for the random generator that is used to pick training data. // By default, it is randomly generated. Set it to a specific value to use the // same training data each time you start a job (assuming other related // parameters such as `source` and `analyzed_fields` are the same). RandomizeSeed *Float64 `json:"randomize_seed,omitempty"` // SoftTreeDepthLimit Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss // decreases most quickly. This soft limit combines with the // `soft_tree_depth_tolerance` to penalize trees that exceed the specified // depth; the regularized loss increases quickly beyond this depth. By default, // this value is calculated during hyperparameter optimization. It must be // greater than or equal to 0. SoftTreeDepthLimit *int `json:"soft_tree_depth_limit,omitempty"` // SoftTreeDepthTolerance Advanced configuration option. This option controls how quickly the // regularized loss increases when the tree depth exceeds // `soft_tree_depth_limit`. By default, this value is calculated during // hyperparameter optimization. It must be greater than or equal to 0.01. SoftTreeDepthTolerance *Float64 `json:"soft_tree_depth_tolerance,omitempty"` // TrainingPercent Defines what percentage of the eligible documents that will be used for // training. Documents that are ignored by the analysis (for example those that // contain arrays with more than one value) won’t be included in the calculation // for used percentage. TrainingPercent Percentage `json:"training_percent,omitempty"` }
DataframeAnalysisClassification type.
func NewDataframeAnalysisClassification ¶
func NewDataframeAnalysisClassification() *DataframeAnalysisClassification
NewDataframeAnalysisClassification returns a DataframeAnalysisClassification.
type DataframeAnalysisContainer ¶
type DataframeAnalysisContainer struct { // Classification The configuration information necessary to perform classification. Classification *DataframeAnalysisClassification `json:"classification,omitempty"` // OutlierDetection The configuration information necessary to perform outlier detection. NOTE: // Advanced parameters are for fine-tuning classification analysis. They are set // automatically by hyperparameter optimization to give the minimum validation // error. It is highly recommended to use the default values unless you fully // understand the function of these parameters. OutlierDetection *DataframeAnalysisOutlierDetection `json:"outlier_detection,omitempty"` // Regression The configuration information necessary to perform regression. NOTE: Advanced // parameters are for fine-tuning regression analysis. They are set // automatically by hyperparameter optimization to give the minimum validation // error. It is highly recommended to use the default values unless you fully // understand the function of these parameters. Regression *DataframeAnalysisRegression `json:"regression,omitempty"` }
DataframeAnalysisContainer type.
func NewDataframeAnalysisContainer ¶
func NewDataframeAnalysisContainer() *DataframeAnalysisContainer
NewDataframeAnalysisContainer returns a DataframeAnalysisContainer.
type DataframeAnalysisFeatureProcessor ¶
type DataframeAnalysisFeatureProcessor struct { // FrequencyEncoding The configuration information necessary to perform frequency encoding. FrequencyEncoding *DataframeAnalysisFeatureProcessorFrequencyEncoding `json:"frequency_encoding,omitempty"` // MultiEncoding The configuration information necessary to perform multi encoding. It allows // multiple processors to be changed together. This way the output of a // processor can then be passed to another as an input. MultiEncoding *DataframeAnalysisFeatureProcessorMultiEncoding `json:"multi_encoding,omitempty"` // NGramEncoding The configuration information necessary to perform n-gram encoding. Features // created by this encoder have the following name format: // <feature_prefix>.<ngram><string position>. For example, if the feature_prefix // is f, the feature name for the second unigram in a string is f.11. NGramEncoding *DataframeAnalysisFeatureProcessorNGramEncoding `json:"n_gram_encoding,omitempty"` // OneHotEncoding The configuration information necessary to perform one hot encoding. OneHotEncoding *DataframeAnalysisFeatureProcessorOneHotEncoding `json:"one_hot_encoding,omitempty"` // TargetMeanEncoding The configuration information necessary to perform target mean encoding. TargetMeanEncoding *DataframeAnalysisFeatureProcessorTargetMeanEncoding `json:"target_mean_encoding,omitempty"` }
DataframeAnalysisFeatureProcessor type.
func NewDataframeAnalysisFeatureProcessor ¶
func NewDataframeAnalysisFeatureProcessor() *DataframeAnalysisFeatureProcessor
NewDataframeAnalysisFeatureProcessor returns a DataframeAnalysisFeatureProcessor.
type DataframeAnalysisFeatureProcessorFrequencyEncoding ¶
type DataframeAnalysisFeatureProcessorFrequencyEncoding struct { // FeatureName The resulting feature name. FeatureName string `json:"feature_name"` Field string `json:"field"` // FrequencyMap The resulting frequency map for the field value. If the field value is // missing from the frequency_map, the resulting value is 0. FrequencyMap map[string]Float64 `json:"frequency_map"` }
DataframeAnalysisFeatureProcessorFrequencyEncoding type.
func NewDataframeAnalysisFeatureProcessorFrequencyEncoding ¶
func NewDataframeAnalysisFeatureProcessorFrequencyEncoding() *DataframeAnalysisFeatureProcessorFrequencyEncoding
NewDataframeAnalysisFeatureProcessorFrequencyEncoding returns a DataframeAnalysisFeatureProcessorFrequencyEncoding.
type DataframeAnalysisFeatureProcessorMultiEncoding ¶
type DataframeAnalysisFeatureProcessorMultiEncoding struct { // Processors The ordered array of custom processors to execute. Must be more than 1. Processors []int `json:"processors"` }
DataframeAnalysisFeatureProcessorMultiEncoding type.
func NewDataframeAnalysisFeatureProcessorMultiEncoding ¶
func NewDataframeAnalysisFeatureProcessorMultiEncoding() *DataframeAnalysisFeatureProcessorMultiEncoding
NewDataframeAnalysisFeatureProcessorMultiEncoding returns a DataframeAnalysisFeatureProcessorMultiEncoding.
type DataframeAnalysisFeatureProcessorNGramEncoding ¶
type DataframeAnalysisFeatureProcessorNGramEncoding struct { Custom *bool `json:"custom,omitempty"` // FeaturePrefix The feature name prefix. Defaults to ngram_<start>_<length>. FeaturePrefix *string `json:"feature_prefix,omitempty"` // Field The name of the text field to encode. Field string `json:"field"` // Length Specifies the length of the n-gram substring. Defaults to 50. Must be greater // than 0. Length *int `json:"length,omitempty"` // NGrams Specifies which n-grams to gather. It’s an array of integer values where the // minimum value is 1, and a maximum value is 5. NGrams []int `json:"n_grams"` // Start Specifies the zero-indexed start of the n-gram substring. Negative values are // allowed for encoding n-grams of string suffixes. Defaults to 0. Start *int `json:"start,omitempty"` }
DataframeAnalysisFeatureProcessorNGramEncoding type.
func NewDataframeAnalysisFeatureProcessorNGramEncoding ¶
func NewDataframeAnalysisFeatureProcessorNGramEncoding() *DataframeAnalysisFeatureProcessorNGramEncoding
NewDataframeAnalysisFeatureProcessorNGramEncoding returns a DataframeAnalysisFeatureProcessorNGramEncoding.
type DataframeAnalysisFeatureProcessorOneHotEncoding ¶
type DataframeAnalysisFeatureProcessorOneHotEncoding struct { // Field The name of the field to encode. Field string `json:"field"` // HotMap The one hot map mapping the field value with the column name. HotMap string `json:"hot_map"` }
DataframeAnalysisFeatureProcessorOneHotEncoding type.
func NewDataframeAnalysisFeatureProcessorOneHotEncoding ¶
func NewDataframeAnalysisFeatureProcessorOneHotEncoding() *DataframeAnalysisFeatureProcessorOneHotEncoding
NewDataframeAnalysisFeatureProcessorOneHotEncoding returns a DataframeAnalysisFeatureProcessorOneHotEncoding.
type DataframeAnalysisFeatureProcessorTargetMeanEncoding ¶
type DataframeAnalysisFeatureProcessorTargetMeanEncoding struct { // DefaultValue The default value if field value is not found in the target_map. DefaultValue int `json:"default_value"` // FeatureName The resulting feature name. FeatureName string `json:"feature_name"` // Field The name of the field to encode. Field string `json:"field"` // TargetMap The field value to target mean transition map. TargetMap map[string]json.RawMessage `json:"target_map"` }
DataframeAnalysisFeatureProcessorTargetMeanEncoding type.
func NewDataframeAnalysisFeatureProcessorTargetMeanEncoding ¶
func NewDataframeAnalysisFeatureProcessorTargetMeanEncoding() *DataframeAnalysisFeatureProcessorTargetMeanEncoding
NewDataframeAnalysisFeatureProcessorTargetMeanEncoding returns a DataframeAnalysisFeatureProcessorTargetMeanEncoding.
type DataframeAnalysisOutlierDetection ¶
type DataframeAnalysisOutlierDetection struct { // ComputeFeatureInfluence Specifies whether the feature influence calculation is enabled. ComputeFeatureInfluence *bool `json:"compute_feature_influence,omitempty"` // FeatureInfluenceThreshold The minimum outlier score that a document needs to have in order to calculate // its feature influence score. Value range: 0-1. FeatureInfluenceThreshold *Float64 `json:"feature_influence_threshold,omitempty"` // Method The method that outlier detection uses. Available methods are `lof`, `ldof`, // `distance_kth_nn`, `distance_knn`, and `ensemble`. The default value is // ensemble, which means that outlier detection uses an ensemble of different // methods and normalises and combines their individual outlier scores to obtain // the overall outlier score. Method *string `json:"method,omitempty"` // NNeighbors Defines the value for how many nearest neighbors each method of outlier // detection uses to calculate its outlier score. When the value is not set, // different values are used for different ensemble members. This default // behavior helps improve the diversity in the ensemble; only override it if you // are confident that the value you choose is appropriate for the data set. NNeighbors *int `json:"n_neighbors,omitempty"` // OutlierFraction The proportion of the data set that is assumed to be outlying prior to // outlier detection. For example, 0.05 means it is assumed that 5% of values // are real outliers and 95% are inliers. OutlierFraction *Float64 `json:"outlier_fraction,omitempty"` // StandardizationEnabled If true, the following operation is performed on the columns before computing // outlier scores: `(x_i - mean(x_i)) / sd(x_i)`. StandardizationEnabled *bool `json:"standardization_enabled,omitempty"` }
DataframeAnalysisOutlierDetection type.
func NewDataframeAnalysisOutlierDetection ¶
func NewDataframeAnalysisOutlierDetection() *DataframeAnalysisOutlierDetection
NewDataframeAnalysisOutlierDetection returns a DataframeAnalysisOutlierDetection.
type DataframeAnalysisRegression ¶
type DataframeAnalysisRegression struct { // Alpha Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss // decreases most quickly. This parameter affects loss calculations by acting as // a multiplier of the tree depth. Higher alpha values result in shallower trees // and faster training times. By default, this value is calculated during // hyperparameter optimization. It must be greater than or equal to zero. Alpha *Float64 `json:"alpha,omitempty"` // DependentVariable Defines which field of the document is to be predicted. It must match one of // the fields in the index being used to train. If this field is missing from a // document, then that document will not be used for training, but a prediction // with the trained model will be generated for it. It is also known as // continuous target variable. // For classification analysis, the data type of the field must be numeric // (`integer`, `short`, `long`, `byte`), categorical (`ip` or `keyword`), or // `boolean`. There must be no more than 30 different values in this field. // For regression analysis, the data type of the field must be numeric. DependentVariable string `json:"dependent_variable"` // DownsampleFactor Advanced configuration option. Controls the fraction of data that is used to // compute the derivatives of the loss function for tree training. A small value // results in the use of a small fraction of the data. If this value is set to // be less than 1, accuracy typically improves. However, too small a value may // result in poor convergence for the ensemble and so require more trees. By // default, this value is calculated during hyperparameter optimization. It must // be greater than zero and less than or equal to 1. DownsampleFactor *Float64 `json:"downsample_factor,omitempty"` // EarlyStoppingEnabled Advanced configuration option. Specifies whether the training process should // finish if it is not finding any better performing models. If disabled, the // training process can take significantly longer and the chance of finding a // better performing model is unremarkable. EarlyStoppingEnabled *bool `json:"early_stopping_enabled,omitempty"` // Eta Advanced configuration option. The shrinkage applied to the weights. Smaller // values result in larger forests which have a better generalization error. // However, larger forests cause slower training. By default, this value is // calculated during hyperparameter optimization. It must be a value between // 0.001 and 1. Eta *Float64 `json:"eta,omitempty"` // EtaGrowthRatePerTree Advanced configuration option. Specifies the rate at which `eta` increases // for each new tree that is added to the forest. For example, a rate of 1.05 // increases `eta` by 5% for each extra tree. By default, this value is // calculated during hyperparameter optimization. It must be between 0.5 and 2. EtaGrowthRatePerTree *Float64 `json:"eta_growth_rate_per_tree,omitempty"` // FeatureBagFraction Advanced configuration option. Defines the fraction of features that will be // used when selecting a random bag for each candidate split. By default, this // value is calculated during hyperparameter optimization. FeatureBagFraction *Float64 `json:"feature_bag_fraction,omitempty"` // FeatureProcessors Advanced configuration option. A collection of feature preprocessors that // modify one or more included fields. The analysis uses the resulting one or // more features instead of the original document field. However, these features // are ephemeral; they are not stored in the destination index. Multiple // `feature_processors` entries can refer to the same document fields. Automatic // categorical feature encoding still occurs for the fields that are unprocessed // by a custom processor or that have categorical values. Use this property only // if you want to override the automatic feature encoding of the specified // fields. FeatureProcessors []DataframeAnalysisFeatureProcessor `json:"feature_processors,omitempty"` // Gamma Advanced configuration option. Regularization parameter to prevent // overfitting on the training data set. Multiplies a linear penalty associated // with the size of individual trees in the forest. A high gamma value causes // training to prefer small trees. A small gamma value results in larger // individual trees and slower training. By default, this value is calculated // during hyperparameter optimization. It must be a nonnegative value. Gamma *Float64 `json:"gamma,omitempty"` // Lambda Advanced configuration option. Regularization parameter to prevent // overfitting on the training data set. Multiplies an L2 regularization term // which applies to leaf weights of the individual trees in the forest. A high // lambda value causes training to favor small leaf weights. This behavior makes // the prediction function smoother at the expense of potentially not being able // to capture relevant relationships between the features and the dependent // variable. A small lambda value results in large individual trees and slower // training. By default, this value is calculated during hyperparameter // optimization. It must be a nonnegative value. Lambda *Float64 `json:"lambda,omitempty"` // LossFunction The loss function used during regression. Available options are `mse` (mean // squared error), `msle` (mean squared logarithmic error), `huber` // (Pseudo-Huber loss). LossFunction *string `json:"loss_function,omitempty"` // LossFunctionParameter A positive number that is used as a parameter to the `loss_function`. LossFunctionParameter *Float64 `json:"loss_function_parameter,omitempty"` // MaxOptimizationRoundsPerHyperparameter Advanced configuration option. A multiplier responsible for determining the // maximum number of hyperparameter optimization steps in the Bayesian // optimization procedure. The maximum number of steps is determined based on // the number of undefined hyperparameters times the maximum optimization rounds // per hyperparameter. By default, this value is calculated during // hyperparameter optimization. MaxOptimizationRoundsPerHyperparameter *int `json:"max_optimization_rounds_per_hyperparameter,omitempty"` // MaxTrees Advanced configuration option. Defines the maximum number of decision trees // in the forest. The maximum value is 2000. By default, this value is // calculated during hyperparameter optimization. MaxTrees *int `json:"max_trees,omitempty"` // NumTopFeatureImportanceValues Advanced configuration option. Specifies the maximum number of feature // importance values per document to return. By default, no feature importance // calculation occurs. NumTopFeatureImportanceValues *int `json:"num_top_feature_importance_values,omitempty"` // PredictionFieldName Defines the name of the prediction field in the results. Defaults to // `<dependent_variable>_prediction`. PredictionFieldName *string `json:"prediction_field_name,omitempty"` // RandomizeSeed Defines the seed for the random generator that is used to pick training data. // By default, it is randomly generated. Set it to a specific value to use the // same training data each time you start a job (assuming other related // parameters such as `source` and `analyzed_fields` are the same). RandomizeSeed *Float64 `json:"randomize_seed,omitempty"` // SoftTreeDepthLimit Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss // decreases most quickly. This soft limit combines with the // `soft_tree_depth_tolerance` to penalize trees that exceed the specified // depth; the regularized loss increases quickly beyond this depth. By default, // this value is calculated during hyperparameter optimization. It must be // greater than or equal to 0. SoftTreeDepthLimit *int `json:"soft_tree_depth_limit,omitempty"` // SoftTreeDepthTolerance Advanced configuration option. This option controls how quickly the // regularized loss increases when the tree depth exceeds // `soft_tree_depth_limit`. By default, this value is calculated during // hyperparameter optimization. It must be greater than or equal to 0.01. SoftTreeDepthTolerance *Float64 `json:"soft_tree_depth_tolerance,omitempty"` // TrainingPercent Defines what percentage of the eligible documents that will be used for // training. Documents that are ignored by the analysis (for example those that // contain arrays with more than one value) won’t be included in the calculation // for used percentage. TrainingPercent Percentage `json:"training_percent,omitempty"` }
DataframeAnalysisRegression type.
func NewDataframeAnalysisRegression ¶
func NewDataframeAnalysisRegression() *DataframeAnalysisRegression
NewDataframeAnalysisRegression returns a DataframeAnalysisRegression.
type DataframeAnalytics ¶
type DataframeAnalytics struct { // AnalysisStats An object containing information about the analysis job. AnalysisStats *DataframeAnalyticsStatsContainer `json:"analysis_stats,omitempty"` // AssignmentExplanation For running jobs only, contains messages relating to the selection of a node // to run the job. AssignmentExplanation *string `json:"assignment_explanation,omitempty"` // DataCounts An object that provides counts for the quantity of documents skipped, used in // training, or available for testing. DataCounts DataframeAnalyticsStatsDataCounts `json:"data_counts"` // Id The unique identifier of the data frame analytics job. Id string `json:"id"` // MemoryUsage An object describing memory usage of the analytics. It is present only after // the job is started and memory usage is reported. MemoryUsage DataframeAnalyticsStatsMemoryUsage `json:"memory_usage"` // Node Contains properties for the node that runs the job. This information is // available only for running jobs. Node *NodeAttributes `json:"node,omitempty"` // Progress The progress report of the data frame analytics job by phase. Progress []DataframeAnalyticsStatsProgress `json:"progress"` // State The status of the data frame analytics job, which can be one of the following // values: failed, started, starting, stopping, stopped. State dataframestate.DataframeState `json:"state"` }
DataframeAnalytics type.
func NewDataframeAnalytics ¶
func NewDataframeAnalytics() *DataframeAnalytics
NewDataframeAnalytics returns a DataframeAnalytics.
type DataframeAnalyticsAuthorization ¶
type DataframeAnalyticsAuthorization struct { // ApiKey If an API key was used for the most recent update to the job, its name and // identifier are listed in the response. ApiKey *ApiKeyAuthorization `json:"api_key,omitempty"` // Roles If a user ID was used for the most recent update to the job, its roles at the // time of the update are listed in the response. Roles []string `json:"roles,omitempty"` // ServiceAccount If a service account was used for the most recent update to the job, the // account name is listed in the response. ServiceAccount *string `json:"service_account,omitempty"` }
DataframeAnalyticsAuthorization type.
func NewDataframeAnalyticsAuthorization ¶
func NewDataframeAnalyticsAuthorization() *DataframeAnalyticsAuthorization
NewDataframeAnalyticsAuthorization returns a DataframeAnalyticsAuthorization.
type DataframeAnalyticsDestination ¶
type DataframeAnalyticsDestination struct { // Index Defines the destination index to store the results of the data frame // analytics job. Index string `json:"index"` // ResultsField Defines the name of the field in which to store the results of the analysis. // Defaults to `ml`. ResultsField *string `json:"results_field,omitempty"` }
DataframeAnalyticsDestination type.
func NewDataframeAnalyticsDestination ¶
func NewDataframeAnalyticsDestination() *DataframeAnalyticsDestination
NewDataframeAnalyticsDestination returns a DataframeAnalyticsDestination.
type DataframeAnalyticsFieldSelection ¶
type DataframeAnalyticsFieldSelection struct { // FeatureType The feature type of this field for the analysis. May be categorical or // numerical. FeatureType *string `json:"feature_type,omitempty"` // IsIncluded Whether the field is selected to be included in the analysis. IsIncluded bool `json:"is_included"` // IsRequired Whether the field is required. IsRequired bool `json:"is_required"` // MappingTypes The mapping types of the field. MappingTypes []string `json:"mapping_types"` // Name The field name. Name string `json:"name"` // Reason The reason a field is not selected to be included in the analysis. Reason *string `json:"reason,omitempty"` }
DataframeAnalyticsFieldSelection type.
func NewDataframeAnalyticsFieldSelection ¶
func NewDataframeAnalyticsFieldSelection() *DataframeAnalyticsFieldSelection
NewDataframeAnalyticsFieldSelection returns a DataframeAnalyticsFieldSelection.
type DataframeAnalyticsMemoryEstimation ¶
type DataframeAnalyticsMemoryEstimation struct { // ExpectedMemoryWithDisk Estimated memory usage under the assumption that overflowing to disk is // allowed during data frame analytics. expected_memory_with_disk is usually // smaller than expected_memory_without_disk as using disk allows to limit the // main memory needed to perform data frame analytics. ExpectedMemoryWithDisk string `json:"expected_memory_with_disk"` // ExpectedMemoryWithoutDisk Estimated memory usage under the assumption that the whole data frame // analytics should happen in memory (i.e. without overflowing to disk). ExpectedMemoryWithoutDisk string `json:"expected_memory_without_disk"` }
DataframeAnalyticsMemoryEstimation type.
func NewDataframeAnalyticsMemoryEstimation ¶
func NewDataframeAnalyticsMemoryEstimation() *DataframeAnalyticsMemoryEstimation
NewDataframeAnalyticsMemoryEstimation returns a DataframeAnalyticsMemoryEstimation.
type DataframeAnalyticsSource ¶
type DataframeAnalyticsSource struct { // Index Index or indices on which to perform the analysis. It can be a single index // or index pattern as well as an array of indices or patterns. NOTE: If your // source indices contain documents with the same IDs, only the document that is // indexed last appears in the destination index. Index []string `json:"index"` // Query The Elasticsearch query domain-specific language (DSL). This value // corresponds to the query object in an Elasticsearch search POST body. All the // options that are supported by Elasticsearch can be used, as this object is // passed verbatim to Elasticsearch. By default, this property has the following // value: {"match_all": {}}. Query *Query `json:"query,omitempty"` // RuntimeMappings Definitions of runtime fields that will become part of the mapping of the // destination index. RuntimeMappings map[string]RuntimeField `json:"runtime_mappings,omitempty"` // Source_ Specify `includes` and/or `excludes patterns to select which fields will be // present in the destination. Fields that are excluded cannot be included in // the analysis. Source_ *DataframeAnalysisAnalyzedFields `json:"_source,omitempty"` }
DataframeAnalyticsSource type.
func NewDataframeAnalyticsSource ¶
func NewDataframeAnalyticsSource() *DataframeAnalyticsSource
NewDataframeAnalyticsSource returns a DataframeAnalyticsSource.
type DataframeAnalyticsStatsContainer ¶
type DataframeAnalyticsStatsContainer struct { // ClassificationStats An object containing information about the classification analysis job. ClassificationStats *DataframeAnalyticsStatsHyperparameters `json:"classification_stats,omitempty"` // OutlierDetectionStats An object containing information about the outlier detection job. OutlierDetectionStats *DataframeAnalyticsStatsOutlierDetection `json:"outlier_detection_stats,omitempty"` // RegressionStats An object containing information about the regression analysis. RegressionStats *DataframeAnalyticsStatsHyperparameters `json:"regression_stats,omitempty"` }
DataframeAnalyticsStatsContainer type.
func NewDataframeAnalyticsStatsContainer ¶
func NewDataframeAnalyticsStatsContainer() *DataframeAnalyticsStatsContainer
NewDataframeAnalyticsStatsContainer returns a DataframeAnalyticsStatsContainer.
type DataframeAnalyticsStatsDataCounts ¶
type DataframeAnalyticsStatsDataCounts struct { // SkippedDocsCount The number of documents that are skipped during the analysis because they // contained values that are not supported by the analysis. For example, outlier // detection does not support missing fields so it skips documents with missing // fields. Likewise, all types of analysis skip documents that contain arrays // with more than one element. SkippedDocsCount int `json:"skipped_docs_count"` // TestDocsCount The number of documents that are not used for training the model and can be // used for testing. TestDocsCount int `json:"test_docs_count"` // TrainingDocsCount The number of documents that are used for training the model. TrainingDocsCount int `json:"training_docs_count"` }
DataframeAnalyticsStatsDataCounts type.
func NewDataframeAnalyticsStatsDataCounts ¶
func NewDataframeAnalyticsStatsDataCounts() *DataframeAnalyticsStatsDataCounts
NewDataframeAnalyticsStatsDataCounts returns a DataframeAnalyticsStatsDataCounts.
type DataframeAnalyticsStatsHyperparameters ¶
type DataframeAnalyticsStatsHyperparameters struct { Hyperparameters Hyperparameters `json:"hyperparameters"` // Iteration The number of iterations on the analysis. Iteration int `json:"iteration"` Timestamp int64 `json:"timestamp"` TimingStats TimingStats `json:"timing_stats"` ValidationLoss ValidationLoss `json:"validation_loss"` }
DataframeAnalyticsStatsHyperparameters type.
func NewDataframeAnalyticsStatsHyperparameters ¶
func NewDataframeAnalyticsStatsHyperparameters() *DataframeAnalyticsStatsHyperparameters
NewDataframeAnalyticsStatsHyperparameters returns a DataframeAnalyticsStatsHyperparameters.
type DataframeAnalyticsStatsMemoryUsage ¶
type DataframeAnalyticsStatsMemoryUsage struct { // MemoryReestimateBytes This value is present when the status is hard_limit and it is a new estimate // of how much memory the job needs. MemoryReestimateBytes *int64 `json:"memory_reestimate_bytes,omitempty"` // PeakUsageBytes The number of bytes used at the highest peak of memory usage. PeakUsageBytes int64 `json:"peak_usage_bytes"` // Status The memory usage status. Status string `json:"status"` // Timestamp The timestamp when memory usage was calculated. Timestamp *int64 `json:"timestamp,omitempty"` }
DataframeAnalyticsStatsMemoryUsage type.
func NewDataframeAnalyticsStatsMemoryUsage ¶
func NewDataframeAnalyticsStatsMemoryUsage() *DataframeAnalyticsStatsMemoryUsage
NewDataframeAnalyticsStatsMemoryUsage returns a DataframeAnalyticsStatsMemoryUsage.
type DataframeAnalyticsStatsOutlierDetection ¶
type DataframeAnalyticsStatsOutlierDetection struct { Parameters OutlierDetectionParameters `json:"parameters"` Timestamp int64 `json:"timestamp"` TimingStats TimingStats `json:"timing_stats"` }
DataframeAnalyticsStatsOutlierDetection type.
func NewDataframeAnalyticsStatsOutlierDetection ¶
func NewDataframeAnalyticsStatsOutlierDetection() *DataframeAnalyticsStatsOutlierDetection
NewDataframeAnalyticsStatsOutlierDetection returns a DataframeAnalyticsStatsOutlierDetection.
type DataframeAnalyticsStatsProgress ¶
type DataframeAnalyticsStatsProgress struct { // Phase Defines the phase of the data frame analytics job. Phase string `json:"phase"` // ProgressPercent The progress that the data frame analytics job has made expressed in // percentage. ProgressPercent int `json:"progress_percent"` }
DataframeAnalyticsStatsProgress type.
func NewDataframeAnalyticsStatsProgress ¶
func NewDataframeAnalyticsStatsProgress() *DataframeAnalyticsStatsProgress
NewDataframeAnalyticsStatsProgress returns a DataframeAnalyticsStatsProgress.
type DataframeAnalyticsSummary ¶
type DataframeAnalyticsSummary struct { AllowLazyStart *bool `json:"allow_lazy_start,omitempty"` Analysis DataframeAnalysisContainer `json:"analysis"` AnalyzedFields *DataframeAnalysisAnalyzedFields `json:"analyzed_fields,omitempty"` // Authorization The security privileges that the job uses to run its queries. If Elastic // Stack security features were disabled at the time of the most recent update // to the job, this property is omitted. Authorization *DataframeAnalyticsAuthorization `json:"authorization,omitempty"` CreateTime *int64 `json:"create_time,omitempty"` Description *string `json:"description,omitempty"` Dest DataframeAnalyticsDestination `json:"dest"` Id string `json:"id"` MaxNumThreads *int `json:"max_num_threads,omitempty"` ModelMemoryLimit *string `json:"model_memory_limit,omitempty"` Source DataframeAnalyticsSource `json:"source"` Version *string `json:"version,omitempty"` }
DataframeAnalyticsSummary type.
func NewDataframeAnalyticsSummary ¶
func NewDataframeAnalyticsSummary() *DataframeAnalyticsSummary
NewDataframeAnalyticsSummary returns a DataframeAnalyticsSummary.
type DataframeClassificationSummary ¶
type DataframeClassificationSummary struct { Accuracy *DataframeClassificationSummaryAccuracy `json:"accuracy,omitempty"` AucRoc *DataframeEvaluationSummaryAucRoc `json:"auc_roc,omitempty"` MulticlassConfusionMatrix *DataframeClassificationSummaryMulticlassConfusionMatrix `json:"multiclass_confusion_matrix,omitempty"` Precision *DataframeClassificationSummaryPrecision `json:"precision,omitempty"` Recall *DataframeClassificationSummaryRecall `json:"recall,omitempty"` }
DataframeClassificationSummary type.
func NewDataframeClassificationSummary ¶
func NewDataframeClassificationSummary() *DataframeClassificationSummary
NewDataframeClassificationSummary returns a DataframeClassificationSummary.
type DataframeClassificationSummaryAccuracy ¶
type DataframeClassificationSummaryAccuracy struct { Classes []DataframeEvaluationClass `json:"classes"` OverallAccuracy Float64 `json:"overall_accuracy"` }
DataframeClassificationSummaryAccuracy type.
func NewDataframeClassificationSummaryAccuracy ¶
func NewDataframeClassificationSummaryAccuracy() *DataframeClassificationSummaryAccuracy
NewDataframeClassificationSummaryAccuracy returns a DataframeClassificationSummaryAccuracy.
type DataframeClassificationSummaryMulticlassConfusionMatrix ¶
type DataframeClassificationSummaryMulticlassConfusionMatrix struct { ConfusionMatrix []ConfusionMatrixItem `json:"confusion_matrix"` OtherActualClassCount int `json:"other_actual_class_count"` }
DataframeClassificationSummaryMulticlassConfusionMatrix type.
func NewDataframeClassificationSummaryMulticlassConfusionMatrix ¶
func NewDataframeClassificationSummaryMulticlassConfusionMatrix() *DataframeClassificationSummaryMulticlassConfusionMatrix
NewDataframeClassificationSummaryMulticlassConfusionMatrix returns a DataframeClassificationSummaryMulticlassConfusionMatrix.
type DataframeClassificationSummaryPrecision ¶
type DataframeClassificationSummaryPrecision struct { AvgPrecision Float64 `json:"avg_precision"` Classes []DataframeEvaluationClass `json:"classes"` }
DataframeClassificationSummaryPrecision type.
func NewDataframeClassificationSummaryPrecision ¶
func NewDataframeClassificationSummaryPrecision() *DataframeClassificationSummaryPrecision
NewDataframeClassificationSummaryPrecision returns a DataframeClassificationSummaryPrecision.
type DataframeClassificationSummaryRecall ¶
type DataframeClassificationSummaryRecall struct { AvgRecall Float64 `json:"avg_recall"` Classes []DataframeEvaluationClass `json:"classes"` }
DataframeClassificationSummaryRecall type.
func NewDataframeClassificationSummaryRecall ¶
func NewDataframeClassificationSummaryRecall() *DataframeClassificationSummaryRecall
NewDataframeClassificationSummaryRecall returns a DataframeClassificationSummaryRecall.
type DataframeEvaluationClass ¶
type DataframeEvaluationClass struct { ClassName string `json:"class_name"` Value Float64 `json:"value"` }
DataframeEvaluationClass type.
func NewDataframeEvaluationClass ¶
func NewDataframeEvaluationClass() *DataframeEvaluationClass
NewDataframeEvaluationClass returns a DataframeEvaluationClass.
type DataframeEvaluationClassification ¶
type DataframeEvaluationClassification struct { // ActualField The field of the index which contains the ground truth. The data type of this // field can be boolean or integer. If the data type is integer, the value has // to be either 0 (false) or 1 (true). ActualField string `json:"actual_field"` // Metrics Specifies the metrics that are used for the evaluation. Metrics *DataframeEvaluationClassificationMetrics `json:"metrics,omitempty"` // PredictedField The field in the index which contains the predicted value, in other words the // results of the classification analysis. PredictedField *string `json:"predicted_field,omitempty"` // TopClassesField The field of the index which is an array of documents of the form { // "class_name": XXX, "class_probability": YYY }. This field must be defined as // nested in the mappings. TopClassesField *string `json:"top_classes_field,omitempty"` }
DataframeEvaluationClassification type.
func NewDataframeEvaluationClassification ¶
func NewDataframeEvaluationClassification() *DataframeEvaluationClassification
NewDataframeEvaluationClassification returns a DataframeEvaluationClassification.
type DataframeEvaluationClassificationMetrics ¶
type DataframeEvaluationClassificationMetrics struct { // Accuracy Accuracy of predictions (per-class and overall). Accuracy map[string]json.RawMessage `json:"accuracy,omitempty"` // AucRoc The AUC ROC (area under the curve of the receiver operating characteristic) // score and optionally the curve. It is calculated for a specific class // (provided as "class_name") treated as positive. AucRoc *DataframeEvaluationClassificationMetricsAucRoc `json:"auc_roc,omitempty"` // MulticlassConfusionMatrix Multiclass confusion matrix. MulticlassConfusionMatrix map[string]json.RawMessage `json:"multiclass_confusion_matrix,omitempty"` // Precision Precision of predictions (per-class and average). Precision map[string]json.RawMessage `json:"precision,omitempty"` // Recall Recall of predictions (per-class and average). Recall map[string]json.RawMessage `json:"recall,omitempty"` }
DataframeEvaluationClassificationMetrics type.
func NewDataframeEvaluationClassificationMetrics ¶
func NewDataframeEvaluationClassificationMetrics() *DataframeEvaluationClassificationMetrics
NewDataframeEvaluationClassificationMetrics returns a DataframeEvaluationClassificationMetrics.
type DataframeEvaluationClassificationMetricsAucRoc ¶
type DataframeEvaluationClassificationMetricsAucRoc struct { // ClassName Name of the only class that is treated as positive during AUC ROC // calculation. Other classes are treated as negative ("one-vs-all" strategy). // All the evaluated documents must have class_name in the list of their top // classes. ClassName *string `json:"class_name,omitempty"` // IncludeCurve Whether or not the curve should be returned in addition to the score. Default // value is false. IncludeCurve *bool `json:"include_curve,omitempty"` }
DataframeEvaluationClassificationMetricsAucRoc type.
func NewDataframeEvaluationClassificationMetricsAucRoc ¶
func NewDataframeEvaluationClassificationMetricsAucRoc() *DataframeEvaluationClassificationMetricsAucRoc
NewDataframeEvaluationClassificationMetricsAucRoc returns a DataframeEvaluationClassificationMetricsAucRoc.
type DataframeEvaluationContainer ¶
type DataframeEvaluationContainer struct { // Classification Classification evaluation evaluates the results of a classification analysis // which outputs a prediction that identifies to which of the classes each // document belongs. Classification *DataframeEvaluationClassification `json:"classification,omitempty"` // OutlierDetection Outlier detection evaluates the results of an outlier detection analysis // which outputs the probability that each document is an outlier. OutlierDetection *DataframeEvaluationOutlierDetection `json:"outlier_detection,omitempty"` // Regression Regression evaluation evaluates the results of a regression analysis which // outputs a prediction of values. Regression *DataframeEvaluationRegression `json:"regression,omitempty"` }
DataframeEvaluationContainer type.
func NewDataframeEvaluationContainer ¶
func NewDataframeEvaluationContainer() *DataframeEvaluationContainer
NewDataframeEvaluationContainer returns a DataframeEvaluationContainer.
type DataframeEvaluationMetrics ¶
type DataframeEvaluationMetrics struct { // AucRoc The AUC ROC (area under the curve of the receiver operating characteristic) // score and optionally the curve. It is calculated for a specific class // (provided as "class_name") treated as positive. AucRoc *DataframeEvaluationClassificationMetricsAucRoc `json:"auc_roc,omitempty"` // Precision Precision of predictions (per-class and average). Precision map[string]json.RawMessage `json:"precision,omitempty"` // Recall Recall of predictions (per-class and average). Recall map[string]json.RawMessage `json:"recall,omitempty"` }
DataframeEvaluationMetrics type.
func NewDataframeEvaluationMetrics ¶
func NewDataframeEvaluationMetrics() *DataframeEvaluationMetrics
NewDataframeEvaluationMetrics returns a DataframeEvaluationMetrics.
type DataframeEvaluationOutlierDetection ¶
type DataframeEvaluationOutlierDetection struct { // ActualField The field of the index which contains the ground truth. The data type of this // field can be boolean or integer. If the data type is integer, the value has // to be either 0 (false) or 1 (true). ActualField string `json:"actual_field"` // Metrics Specifies the metrics that are used for the evaluation. Metrics *DataframeEvaluationOutlierDetectionMetrics `json:"metrics,omitempty"` // PredictedProbabilityField The field of the index that defines the probability of whether the item // belongs to the class in question or not. It’s the field that contains the // results of the analysis. PredictedProbabilityField string `json:"predicted_probability_field"` }
DataframeEvaluationOutlierDetection type.
func NewDataframeEvaluationOutlierDetection ¶
func NewDataframeEvaluationOutlierDetection() *DataframeEvaluationOutlierDetection
NewDataframeEvaluationOutlierDetection returns a DataframeEvaluationOutlierDetection.
type DataframeEvaluationOutlierDetectionMetrics ¶
type DataframeEvaluationOutlierDetectionMetrics struct { // AucRoc The AUC ROC (area under the curve of the receiver operating characteristic) // score and optionally the curve. It is calculated for a specific class // (provided as "class_name") treated as positive. AucRoc *DataframeEvaluationClassificationMetricsAucRoc `json:"auc_roc,omitempty"` // ConfusionMatrix Accuracy of predictions (per-class and overall). ConfusionMatrix map[string]json.RawMessage `json:"confusion_matrix,omitempty"` // Precision Precision of predictions (per-class and average). Precision map[string]json.RawMessage `json:"precision,omitempty"` // Recall Recall of predictions (per-class and average). Recall map[string]json.RawMessage `json:"recall,omitempty"` }
DataframeEvaluationOutlierDetectionMetrics type.
func NewDataframeEvaluationOutlierDetectionMetrics ¶
func NewDataframeEvaluationOutlierDetectionMetrics() *DataframeEvaluationOutlierDetectionMetrics
NewDataframeEvaluationOutlierDetectionMetrics returns a DataframeEvaluationOutlierDetectionMetrics.
type DataframeEvaluationRegression ¶
type DataframeEvaluationRegression struct { // ActualField The field of the index which contains the ground truth. The data type of this // field must be numerical. ActualField string `json:"actual_field"` // Metrics Specifies the metrics that are used for the evaluation. For more information // on mse, msle, and huber, consult the Jupyter notebook on regression loss // functions. Metrics *DataframeEvaluationRegressionMetrics `json:"metrics,omitempty"` // PredictedField The field in the index that contains the predicted value, in other words the // results of the regression analysis. PredictedField string `json:"predicted_field"` }
DataframeEvaluationRegression type.
func NewDataframeEvaluationRegression ¶
func NewDataframeEvaluationRegression() *DataframeEvaluationRegression
NewDataframeEvaluationRegression returns a DataframeEvaluationRegression.
type DataframeEvaluationRegressionMetrics ¶
type DataframeEvaluationRegressionMetrics struct { // Huber Pseudo Huber loss function. Huber *DataframeEvaluationRegressionMetricsHuber `json:"huber,omitempty"` // Mse Average squared difference between the predicted values and the actual // (ground truth) value. For more information, read this wiki article. Mse map[string]json.RawMessage `json:"mse,omitempty"` // Msle Average squared difference between the logarithm of the predicted values and // the logarithm of the actual (ground truth) value. Msle *DataframeEvaluationRegressionMetricsMsle `json:"msle,omitempty"` // RSquared Proportion of the variance in the dependent variable that is predictable from // the independent variables. RSquared map[string]json.RawMessage `json:"r_squared,omitempty"` }
DataframeEvaluationRegressionMetrics type.
func NewDataframeEvaluationRegressionMetrics ¶
func NewDataframeEvaluationRegressionMetrics() *DataframeEvaluationRegressionMetrics
NewDataframeEvaluationRegressionMetrics returns a DataframeEvaluationRegressionMetrics.
type DataframeEvaluationRegressionMetricsHuber ¶
type DataframeEvaluationRegressionMetricsHuber struct { // Delta Approximates 1/2 (prediction - actual)2 for values much less than delta and // approximates a straight line with slope delta for values much larger than // delta. Defaults to 1. Delta needs to be greater than 0. Delta *Float64 `json:"delta,omitempty"` }
DataframeEvaluationRegressionMetricsHuber type.
func NewDataframeEvaluationRegressionMetricsHuber ¶
func NewDataframeEvaluationRegressionMetricsHuber() *DataframeEvaluationRegressionMetricsHuber
NewDataframeEvaluationRegressionMetricsHuber returns a DataframeEvaluationRegressionMetricsHuber.
type DataframeEvaluationRegressionMetricsMsle ¶
type DataframeEvaluationRegressionMetricsMsle struct { // Offset Defines the transition point at which you switch from minimizing quadratic // error to minimizing quadratic log error. Defaults to 1. Offset *Float64 `json:"offset,omitempty"` }
DataframeEvaluationRegressionMetricsMsle type.
func NewDataframeEvaluationRegressionMetricsMsle ¶
func NewDataframeEvaluationRegressionMetricsMsle() *DataframeEvaluationRegressionMetricsMsle
NewDataframeEvaluationRegressionMetricsMsle returns a DataframeEvaluationRegressionMetricsMsle.
type DataframeEvaluationSummaryAucRoc ¶
type DataframeEvaluationSummaryAucRoc struct { Curve []DataframeEvaluationSummaryAucRocCurveItem `json:"curve,omitempty"` Value Float64 `json:"value"` }
DataframeEvaluationSummaryAucRoc type.
func NewDataframeEvaluationSummaryAucRoc ¶
func NewDataframeEvaluationSummaryAucRoc() *DataframeEvaluationSummaryAucRoc
NewDataframeEvaluationSummaryAucRoc returns a DataframeEvaluationSummaryAucRoc.
type DataframeEvaluationSummaryAucRocCurveItem ¶
type DataframeEvaluationSummaryAucRocCurveItem struct { Fpr Float64 `json:"fpr"` Threshold Float64 `json:"threshold"` Tpr Float64 `json:"tpr"` }
DataframeEvaluationSummaryAucRocCurveItem type.
func NewDataframeEvaluationSummaryAucRocCurveItem ¶
func NewDataframeEvaluationSummaryAucRocCurveItem() *DataframeEvaluationSummaryAucRocCurveItem
NewDataframeEvaluationSummaryAucRocCurveItem returns a DataframeEvaluationSummaryAucRocCurveItem.
type DataframeEvaluationValue ¶
type DataframeEvaluationValue struct {
Value Float64 `json:"value"`
}
DataframeEvaluationValue type.
func NewDataframeEvaluationValue ¶
func NewDataframeEvaluationValue() *DataframeEvaluationValue
NewDataframeEvaluationValue returns a DataframeEvaluationValue.
type DataframeOutlierDetectionSummary ¶
type DataframeOutlierDetectionSummary struct { AucRoc *DataframeEvaluationSummaryAucRoc `json:"auc_roc,omitempty"` ConfusionMatrix map[string]ConfusionMatrixThreshold `json:"confusion_matrix,omitempty"` Precision map[string]Float64 `json:"precision,omitempty"` Recall map[string]Float64 `json:"recall,omitempty"` }
DataframeOutlierDetectionSummary type.
func NewDataframeOutlierDetectionSummary ¶
func NewDataframeOutlierDetectionSummary() *DataframeOutlierDetectionSummary
NewDataframeOutlierDetectionSummary returns a DataframeOutlierDetectionSummary.
type DataframePreviewConfig ¶
type DataframePreviewConfig struct { Analysis DataframeAnalysisContainer `json:"analysis"` AnalyzedFields *DataframeAnalysisAnalyzedFields `json:"analyzed_fields,omitempty"` MaxNumThreads *int `json:"max_num_threads,omitempty"` ModelMemoryLimit *string `json:"model_memory_limit,omitempty"` Source DataframeAnalyticsSource `json:"source"` }
DataframePreviewConfig type.
func NewDataframePreviewConfig ¶
func NewDataframePreviewConfig() *DataframePreviewConfig
NewDataframePreviewConfig returns a DataframePreviewConfig.
type DataframeRegressionSummary ¶
type DataframeRegressionSummary struct { Huber *DataframeEvaluationValue `json:"huber,omitempty"` Mse *DataframeEvaluationValue `json:"mse,omitempty"` Msle *DataframeEvaluationValue `json:"msle,omitempty"` RSquared *DataframeEvaluationValue `json:"r_squared,omitempty"` }
DataframeRegressionSummary type.
func NewDataframeRegressionSummary ¶
func NewDataframeRegressionSummary() *DataframeRegressionSummary
NewDataframeRegressionSummary returns a DataframeRegressionSummary.
type DateDecayFunction ¶
type DateDecayFunction struct { DateDecayFunction map[string]DecayPlacementDateMathDuration `json:"-"` MultiValueMode *multivaluemode.MultiValueMode `json:"multi_value_mode,omitempty"` }
DateDecayFunction type.
func NewDateDecayFunction ¶
func NewDateDecayFunction() *DateDecayFunction
NewDateDecayFunction returns a DateDecayFunction.
func (DateDecayFunction) MarshalJSON ¶
func (s DateDecayFunction) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type DateDistanceFeatureQuery ¶
type DateDistanceFeatureQuery struct { Boost *float32 `json:"boost,omitempty"` Field string `json:"field"` Origin string `json:"origin"` Pivot Duration `json:"pivot"` QueryName_ *string `json:"_name,omitempty"` }
DateDistanceFeatureQuery type.
func NewDateDistanceFeatureQuery ¶
func NewDateDistanceFeatureQuery() *DateDistanceFeatureQuery
NewDateDistanceFeatureQuery returns a DateDistanceFeatureQuery.
type DateHistogramAggregate ¶
type DateHistogramAggregate struct { Buckets BucketsDateHistogramBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
DateHistogramAggregate type.
func NewDateHistogramAggregate ¶
func NewDateHistogramAggregate() *DateHistogramAggregate
NewDateHistogramAggregate returns a DateHistogramAggregate.
func (*DateHistogramAggregate) UnmarshalJSON ¶
func (s *DateHistogramAggregate) UnmarshalJSON(data []byte) error
type DateHistogramAggregation ¶
type DateHistogramAggregation struct { CalendarInterval *calendarinterval.CalendarInterval `json:"calendar_interval,omitempty"` ExtendedBounds *ExtendedBoundsFieldDateMath `json:"extended_bounds,omitempty"` Field *string `json:"field,omitempty"` FixedInterval Duration `json:"fixed_interval,omitempty"` Format *string `json:"format,omitempty"` HardBounds *ExtendedBoundsFieldDateMath `json:"hard_bounds,omitempty"` Interval Duration `json:"interval,omitempty"` Keyed *bool `json:"keyed,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` MinDocCount *int `json:"min_doc_count,omitempty"` Missing DateTime `json:"missing,omitempty"` Name *string `json:"name,omitempty"` Offset Duration `json:"offset,omitempty"` Order AggregateOrder `json:"order,omitempty"` Params map[string]json.RawMessage `json:"params,omitempty"` Script Script `json:"script,omitempty"` TimeZone *string `json:"time_zone,omitempty"` }
DateHistogramAggregation type.
func NewDateHistogramAggregation ¶
func NewDateHistogramAggregation() *DateHistogramAggregation
NewDateHistogramAggregation returns a DateHistogramAggregation.
func (*DateHistogramAggregation) UnmarshalJSON ¶
func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error
type DateHistogramBucket ¶
type DateHistogramBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Key int64 `json:"key"` KeyAsString *string `json:"key_as_string,omitempty"` }
DateHistogramBucket type.
func NewDateHistogramBucket ¶
func NewDateHistogramBucket() *DateHistogramBucket
NewDateHistogramBucket returns a DateHistogramBucket.
func (DateHistogramBucket) MarshalJSON ¶
func (s DateHistogramBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*DateHistogramBucket) UnmarshalJSON ¶
func (s *DateHistogramBucket) UnmarshalJSON(data []byte) error
type DateHistogramGrouping ¶
type DateHistogramGrouping struct { CalendarInterval Duration `json:"calendar_interval,omitempty"` Delay Duration `json:"delay,omitempty"` Field string `json:"field"` FixedInterval Duration `json:"fixed_interval,omitempty"` Format *string `json:"format,omitempty"` Interval Duration `json:"interval,omitempty"` TimeZone *string `json:"time_zone,omitempty"` }
DateHistogramGrouping type.
func NewDateHistogramGrouping ¶
func NewDateHistogramGrouping() *DateHistogramGrouping
NewDateHistogramGrouping returns a DateHistogramGrouping.
type DateIndexNameProcessor ¶
type DateIndexNameProcessor struct { DateFormats []string `json:"date_formats"` // DateRounding How to round the date when formatting the date into the index name. Valid // values are: // `y` (year), `M` (month), `w` (week), `d` (day), `h` (hour), `m` (minute) and // `s` (second). // Supports template snippets. DateRounding string `json:"date_rounding"` Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IndexNameFormat *string `json:"index_name_format,omitempty"` IndexNamePrefix *string `json:"index_name_prefix,omitempty"` Locale *string `json:"locale,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` Timezone *string `json:"timezone,omitempty"` }
DateIndexNameProcessor type.
func NewDateIndexNameProcessor ¶
func NewDateIndexNameProcessor() *DateIndexNameProcessor
NewDateIndexNameProcessor returns a DateIndexNameProcessor.
type DateNanosProperty ¶
type DateNanosProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` Format *string `json:"format,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue DateTime `json:"null_value,omitempty"` PrecisionStep *int `json:"precision_step,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
DateNanosProperty type.
func NewDateNanosProperty ¶
func NewDateNanosProperty() *DateNanosProperty
NewDateNanosProperty returns a DateNanosProperty.
func (*DateNanosProperty) UnmarshalJSON ¶
func (s *DateNanosProperty) UnmarshalJSON(data []byte) error
type DateProcessor ¶
type DateProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` Formats []string `json:"formats"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` Locale *string `json:"locale,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` Timezone *string `json:"timezone,omitempty"` }
DateProcessor type.
func NewDateProcessor ¶
func NewDateProcessor() *DateProcessor
NewDateProcessor returns a DateProcessor.
type DateProperty ¶
type DateProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fielddata *NumericFielddata `json:"fielddata,omitempty"` Fields map[string]Property `json:"fields,omitempty"` Format *string `json:"format,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` Index *bool `json:"index,omitempty"` Locale *string `json:"locale,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue DateTime `json:"null_value,omitempty"` PrecisionStep *int `json:"precision_step,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
DateProperty type.
func NewDateProperty ¶
func NewDateProperty() *DateProperty
NewDateProperty returns a DateProperty.
func (*DateProperty) UnmarshalJSON ¶
func (s *DateProperty) UnmarshalJSON(data []byte) error
type DateRangeAggregate ¶
type DateRangeAggregate struct { Buckets BucketsRangeBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
DateRangeAggregate type.
func NewDateRangeAggregate ¶
func NewDateRangeAggregate() *DateRangeAggregate
NewDateRangeAggregate returns a DateRangeAggregate.
func (*DateRangeAggregate) UnmarshalJSON ¶
func (s *DateRangeAggregate) UnmarshalJSON(data []byte) error
type DateRangeAggregation ¶
type DateRangeAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Keyed *bool `json:"keyed,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Missing Missing `json:"missing,omitempty"` Name *string `json:"name,omitempty"` Ranges []DateRangeExpression `json:"ranges,omitempty"` TimeZone *string `json:"time_zone,omitempty"` }
DateRangeAggregation type.
func NewDateRangeAggregation ¶
func NewDateRangeAggregation() *DateRangeAggregation
NewDateRangeAggregation returns a DateRangeAggregation.
type DateRangeExpression ¶
type DateRangeExpression struct { From FieldDateMath `json:"from,omitempty"` Key *string `json:"key,omitempty"` To FieldDateMath `json:"to,omitempty"` }
DateRangeExpression type.
func NewDateRangeExpression ¶
func NewDateRangeExpression() *DateRangeExpression
NewDateRangeExpression returns a DateRangeExpression.
type DateRangeProperty ¶
type DateRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` Format *string `json:"format,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
DateRangeProperty type.
func NewDateRangeProperty ¶
func NewDateRangeProperty() *DateRangeProperty
NewDateRangeProperty returns a DateRangeProperty.
func (*DateRangeProperty) UnmarshalJSON ¶
func (s *DateRangeProperty) UnmarshalJSON(data []byte) error
type DateRangeQuery ¶
type DateRangeQuery struct { Boost *float32 `json:"boost,omitempty"` Format *string `json:"format,omitempty"` From string `json:"from,omitempty"` Gt *string `json:"gt,omitempty"` Gte *string `json:"gte,omitempty"` Lt *string `json:"lt,omitempty"` Lte *string `json:"lte,omitempty"` QueryName_ *string `json:"_name,omitempty"` Relation *rangerelation.RangeRelation `json:"relation,omitempty"` TimeZone *string `json:"time_zone,omitempty"` To string `json:"to,omitempty"` }
DateRangeQuery type.
func NewDateRangeQuery ¶
func NewDateRangeQuery() *DateRangeQuery
NewDateRangeQuery returns a DateRangeQuery.
type DateTime ¶
type DateTime interface{}
DateTime holds the union for the following types:
string int64
type DecayFunction ¶
type DecayFunction interface{}
DecayFunction holds the union for the following types:
DateDecayFunction NumericDecayFunction GeoDecayFunction
type DecayPlacementDateMathDuration ¶
type DecayPlacementDateMathDuration struct { Decay *Float64 `json:"decay,omitempty"` Offset Duration `json:"offset,omitempty"` Origin *string `json:"origin,omitempty"` Scale Duration `json:"scale,omitempty"` }
DecayPlacementDateMathDuration type.
func NewDecayPlacementDateMathDuration ¶
func NewDecayPlacementDateMathDuration() *DecayPlacementDateMathDuration
NewDecayPlacementDateMathDuration returns a DecayPlacementDateMathDuration.
type DecayPlacementGeoLocationDistance ¶
type DecayPlacementGeoLocationDistance struct { Decay *Float64 `json:"decay,omitempty"` Offset *string `json:"offset,omitempty"` Origin GeoLocation `json:"origin,omitempty"` Scale *string `json:"scale,omitempty"` }
DecayPlacementGeoLocationDistance type.
func NewDecayPlacementGeoLocationDistance ¶
func NewDecayPlacementGeoLocationDistance() *DecayPlacementGeoLocationDistance
NewDecayPlacementGeoLocationDistance returns a DecayPlacementGeoLocationDistance.
type DecayPlacementdoubledouble ¶
type DecayPlacementdoubledouble struct { Decay *Float64 `json:"decay,omitempty"` Offset *Float64 `json:"offset,omitempty"` Origin *Float64 `json:"origin,omitempty"` Scale *Float64 `json:"scale,omitempty"` }
DecayPlacementdoubledouble type.
func NewDecayPlacementdoubledouble ¶
func NewDecayPlacementdoubledouble() *DecayPlacementdoubledouble
NewDecayPlacementdoubledouble returns a DecayPlacementdoubledouble.
type Defaults ¶
type Defaults struct { AnomalyDetectors AnomalyDetectors `json:"anomaly_detectors"` Datafeeds Datafeeds `json:"datafeeds"` }
Defaults type.
type Definition ¶
type Definition struct { // Preprocessors Collection of preprocessors Preprocessors []Preprocessor `json:"preprocessors,omitempty"` // TrainedModel The definition of the trained model. TrainedModel TrainedModel `json:"trained_model"` }
Definition type.
type DelayedDataCheckConfig ¶
type DelayedDataCheckConfig struct { // CheckWindow The window of time that is searched for late data. This window of time ends // with the latest finalized bucket. // It defaults to null, which causes an appropriate `check_window` to be // calculated when the real-time datafeed runs. // In particular, the default `check_window` span calculation is based on the // maximum of `2h` or `8 * bucket_span`. CheckWindow Duration `json:"check_window,omitempty"` // Enabled Specifies whether the datafeed periodically checks for delayed data. Enabled bool `json:"enabled"` }
DelayedDataCheckConfig type.
func NewDelayedDataCheckConfig ¶
func NewDelayedDataCheckConfig() *DelayedDataCheckConfig
NewDelayedDataCheckConfig returns a DelayedDataCheckConfig.
type DelimitedPayloadTokenFilter ¶
type DelimitedPayloadTokenFilter struct { Delimiter *string `json:"delimiter,omitempty"` Encoding *delimitedpayloadencoding.DelimitedPayloadEncoding `json:"encoding,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
DelimitedPayloadTokenFilter type.
func NewDelimitedPayloadTokenFilter ¶
func NewDelimitedPayloadTokenFilter() *DelimitedPayloadTokenFilter
NewDelimitedPayloadTokenFilter returns a DelimitedPayloadTokenFilter.
type DenseVectorIndexOptions ¶
type DenseVectorIndexOptions struct { EfConstruction int `json:"ef_construction"` M int `json:"m"` Type string `json:"type"` }
DenseVectorIndexOptions type.
func NewDenseVectorIndexOptions ¶
func NewDenseVectorIndexOptions() *DenseVectorIndexOptions
NewDenseVectorIndexOptions returns a DenseVectorIndexOptions.
type DenseVectorProperty ¶
type DenseVectorProperty struct { Dims int `json:"dims"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` Index *bool `json:"index,omitempty"` IndexOptions *DenseVectorIndexOptions `json:"index_options,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Type string `json:"type,omitempty"` }
DenseVectorProperty type.
func NewDenseVectorProperty ¶
func NewDenseVectorProperty() *DenseVectorProperty
NewDenseVectorProperty returns a DenseVectorProperty.
func (*DenseVectorProperty) UnmarshalJSON ¶
func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error
type Deprecation ¶
type Deprecation struct { Details string `json:"details"` // Level The level property describes the significance of the issue. Level deprecationlevel.DeprecationLevel `json:"level"` Message string `json:"message"` Url string `json:"url"` }
Deprecation type.
type DeprecationIndexing ¶
type DeprecationIndexing struct {
Enabled string `json:"enabled"`
}
DeprecationIndexing type.
func NewDeprecationIndexing ¶
func NewDeprecationIndexing() *DeprecationIndexing
NewDeprecationIndexing returns a DeprecationIndexing.
type DerivativeAggregate ¶
type DerivativeAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` NormalizedValue *Float64 `json:"normalized_value,omitempty"` NormalizedValueAsString *string `json:"normalized_value_as_string,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. Value Float64 `json:"value,omitempty"` ValueAsString *string `json:"value_as_string,omitempty"` }
DerivativeAggregate type.
func NewDerivativeAggregate ¶
func NewDerivativeAggregate() *DerivativeAggregate
NewDerivativeAggregate returns a DerivativeAggregate.
type DerivativeAggregation ¶
type DerivativeAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` }
DerivativeAggregation type.
func NewDerivativeAggregation ¶
func NewDerivativeAggregation() *DerivativeAggregation
NewDerivativeAggregation returns a DerivativeAggregation.
func (*DerivativeAggregation) UnmarshalJSON ¶
func (s *DerivativeAggregation) UnmarshalJSON(data []byte) error
type DetectionRule ¶
type DetectionRule struct { // Actions The set of actions to be triggered when the rule applies. If more than one // action is specified the effects of all actions are combined. Actions []ruleaction.RuleAction `json:"actions,omitempty"` // Conditions An array of numeric conditions when the rule applies. A rule must either have // a non-empty scope or at least one condition. Multiple conditions are combined // together with a logical AND. Conditions []RuleCondition `json:"conditions,omitempty"` // Scope A scope of series where the rule applies. A rule must either have a non-empty // scope or at least one condition. By default, the scope includes all series. // Scoping is allowed for any of the fields that are also specified in // `by_field_name`, `over_field_name`, or `partition_field_name`. Scope map[string]FilterRef `json:"scope,omitempty"` }
DetectionRule type.
func NewDetectionRule ¶
func NewDetectionRule() *DetectionRule
NewDetectionRule returns a DetectionRule.
type Detector ¶
type Detector struct { // ByFieldName The field used to split the data. In particular, this property is used for // analyzing the splits with respect to their own history. It is used for // finding unusual values in the context of the split. ByFieldName *string `json:"by_field_name,omitempty"` // CustomRules Custom rules enable you to customize the way detectors operate. For example, // a rule may dictate conditions under which results should be skipped. Kibana // refers to custom rules as job rules. CustomRules []DetectionRule `json:"custom_rules,omitempty"` // DetectorDescription A description of the detector. DetectorDescription *string `json:"detector_description,omitempty"` // DetectorIndex A unique identifier for the detector. This identifier is based on the order // of the detectors in the `analysis_config`, starting at zero. If you specify a // value for this property, it is ignored. DetectorIndex *int `json:"detector_index,omitempty"` // ExcludeFrequent If set, frequent entities are excluded from influencing the anomaly results. // Entities can be considered frequent over time or frequent in a population. If // you are working with both over and by fields, you can set `exclude_frequent` // to `all` for both fields, or to `by` or `over` for those specific fields. ExcludeFrequent *excludefrequent.ExcludeFrequent `json:"exclude_frequent,omitempty"` // FieldName The field that the detector uses in the function. If you use an event rate // function such as count or rare, do not specify this field. The `field_name` // cannot contain double quotes or backslashes. FieldName *string `json:"field_name,omitempty"` // Function The analysis function that is used. For example, `count`, `rare`, `mean`, // `min`, `max`, or `sum`. Function *string `json:"function,omitempty"` // OverFieldName The field used to split the data. In particular, this property is used for // analyzing the splits with respect to the history of all splits. It is used // for finding unusual values in the population of all splits. OverFieldName *string `json:"over_field_name,omitempty"` // PartitionFieldName The field used to segment the analysis. When you use this property, you have // completely independent baselines for each value of this field. PartitionFieldName *string `json:"partition_field_name,omitempty"` // UseNull Defines whether a new series is used as the null series when there is no // value for the by or partition fields. UseNull *bool `json:"use_null,omitempty"` }
Detector type.
type DetectorRead ¶
type DetectorRead struct { // ByFieldName The field used to split the data. In particular, this property is used for // analyzing the splits with respect to their own history. It is used for // finding unusual values in the context of the split. ByFieldName *string `json:"by_field_name,omitempty"` // CustomRules Custom rules enable you to customize the way detectors operate. For example, // a rule may dictate conditions under which results should be skipped. Kibana // refers to custom rules as job rules. CustomRules []DetectionRule `json:"custom_rules,omitempty"` // DetectorDescription A description of the detector. DetectorDescription *string `json:"detector_description,omitempty"` // DetectorIndex A unique identifier for the detector. This identifier is based on the order // of the detectors in the `analysis_config`, starting at zero. If you specify a // value for this property, it is ignored. DetectorIndex *int `json:"detector_index,omitempty"` // ExcludeFrequent If set, frequent entities are excluded from influencing the anomaly results. // Entities can be considered frequent over time or frequent in a population. If // you are working with both over and by fields, you can set `exclude_frequent` // to `all` for both fields, or to `by` or `over` for those specific fields. ExcludeFrequent *excludefrequent.ExcludeFrequent `json:"exclude_frequent,omitempty"` // FieldName The field that the detector uses in the function. If you use an event rate // function such as count or rare, do not specify this field. The `field_name` // cannot contain double quotes or backslashes. FieldName *string `json:"field_name,omitempty"` // Function The analysis function that is used. For example, `count`, `rare`, `mean`, // `min`, `max`, or `sum`. Function string `json:"function"` // OverFieldName The field used to split the data. In particular, this property is used for // analyzing the splits with respect to the history of all splits. It is used // for finding unusual values in the population of all splits. OverFieldName *string `json:"over_field_name,omitempty"` // PartitionFieldName The field used to segment the analysis. When you use this property, you have // completely independent baselines for each value of this field. PartitionFieldName *string `json:"partition_field_name,omitempty"` // UseNull Defines whether a new series is used as the null series when there is no // value for the by or partition fields. UseNull *bool `json:"use_null,omitempty"` }
DetectorRead type.
func NewDetectorRead ¶
func NewDetectorRead() *DetectorRead
NewDetectorRead returns a DetectorRead.
type DictionaryDecompounderTokenFilter ¶
type DictionaryDecompounderTokenFilter struct { HyphenationPatternsPath *string `json:"hyphenation_patterns_path,omitempty"` MaxSubwordSize *int `json:"max_subword_size,omitempty"` MinSubwordSize *int `json:"min_subword_size,omitempty"` MinWordSize *int `json:"min_word_size,omitempty"` OnlyLongestMatch *bool `json:"only_longest_match,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` WordList []string `json:"word_list,omitempty"` WordListPath *string `json:"word_list_path,omitempty"` }
DictionaryDecompounderTokenFilter type.
func NewDictionaryDecompounderTokenFilter ¶
func NewDictionaryDecompounderTokenFilter() *DictionaryDecompounderTokenFilter
NewDictionaryDecompounderTokenFilter returns a DictionaryDecompounderTokenFilter.
type DirectGenerator ¶
type DirectGenerator struct { Field string `json:"field"` MaxEdits *int `json:"max_edits,omitempty"` MaxInspections *float32 `json:"max_inspections,omitempty"` MaxTermFreq *float32 `json:"max_term_freq,omitempty"` MinDocFreq *float32 `json:"min_doc_freq,omitempty"` MinWordLength *int `json:"min_word_length,omitempty"` PostFilter *string `json:"post_filter,omitempty"` PreFilter *string `json:"pre_filter,omitempty"` PrefixLength *int `json:"prefix_length,omitempty"` Size *int `json:"size,omitempty"` SuggestMode *suggestmode.SuggestMode `json:"suggest_mode,omitempty"` }
DirectGenerator type.
func NewDirectGenerator ¶
func NewDirectGenerator() *DirectGenerator
NewDirectGenerator returns a DirectGenerator.
type DisMaxQuery ¶
type DisMaxQuery struct { Boost *float32 `json:"boost,omitempty"` Queries []Query `json:"queries"` QueryName_ *string `json:"_name,omitempty"` TieBreaker *Float64 `json:"tie_breaker,omitempty"` }
DisMaxQuery type.
type Discovery ¶
type Discovery struct { ClusterApplierStats *ClusterAppliedStats `json:"cluster_applier_stats,omitempty"` ClusterStateQueue *ClusterStateQueue `json:"cluster_state_queue,omitempty"` ClusterStateUpdate map[string]ClusterStateUpdate `json:"cluster_state_update,omitempty"` PublishedClusterStates *PublishedClusterStates `json:"published_cluster_states,omitempty"` SerializedClusterStates *SerializedClusterState `json:"serialized_cluster_states,omitempty"` }
Discovery type.
type DiscoveryNode ¶
type DiscoveryNode struct { Attributes map[string]string `json:"attributes"` EphemeralId string `json:"ephemeral_id"` Id string `json:"id"` Name string `json:"name"` TransportAddress string `json:"transport_address"` }
DiscoveryNode type.
func NewDiscoveryNode ¶
func NewDiscoveryNode() *DiscoveryNode
NewDiscoveryNode returns a DiscoveryNode.
type DiskUsage ¶
type DiskUsage struct { FreeBytes int64 `json:"free_bytes"` FreeDiskPercent Float64 `json:"free_disk_percent"` Path string `json:"path"` TotalBytes int64 `json:"total_bytes"` UsedBytes int64 `json:"used_bytes"` UsedDiskPercent Float64 `json:"used_disk_percent"` }
DiskUsage type.
type DissectProcessor ¶
type DissectProcessor struct { AppendSeparator *string `json:"append_separator,omitempty"` Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Pattern string `json:"pattern"` Tag *string `json:"tag,omitempty"` }
DissectProcessor type.
func NewDissectProcessor ¶
func NewDissectProcessor() *DissectProcessor
NewDissectProcessor returns a DissectProcessor.
type DistanceFeatureQuery ¶
type DistanceFeatureQuery interface{}
DistanceFeatureQuery holds the union for the following types:
GeoDistanceFeatureQuery DateDistanceFeatureQuery
type DistanceFeatureQueryBaseDateMathDuration ¶
type DistanceFeatureQueryBaseDateMathDuration struct { Boost *float32 `json:"boost,omitempty"` Field string `json:"field"` Origin string `json:"origin"` Pivot Duration `json:"pivot"` QueryName_ *string `json:"_name,omitempty"` }
DistanceFeatureQueryBaseDateMathDuration type.
func NewDistanceFeatureQueryBaseDateMathDuration ¶
func NewDistanceFeatureQueryBaseDateMathDuration() *DistanceFeatureQueryBaseDateMathDuration
NewDistanceFeatureQueryBaseDateMathDuration returns a DistanceFeatureQueryBaseDateMathDuration.
type DistanceFeatureQueryBaseGeoLocationDistance ¶
type DistanceFeatureQueryBaseGeoLocationDistance struct { Boost *float32 `json:"boost,omitempty"` Field string `json:"field"` Origin GeoLocation `json:"origin"` Pivot string `json:"pivot"` QueryName_ *string `json:"_name,omitempty"` }
DistanceFeatureQueryBaseGeoLocationDistance type.
func NewDistanceFeatureQueryBaseGeoLocationDistance ¶
func NewDistanceFeatureQueryBaseGeoLocationDistance() *DistanceFeatureQueryBaseGeoLocationDistance
NewDistanceFeatureQueryBaseGeoLocationDistance returns a DistanceFeatureQueryBaseGeoLocationDistance.
type DiversifiedSamplerAggregation ¶
type DiversifiedSamplerAggregation struct { ExecutionHint *sampleraggregationexecutionhint.SamplerAggregationExecutionHint `json:"execution_hint,omitempty"` Field *string `json:"field,omitempty"` MaxDocsPerValue *int `json:"max_docs_per_value,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Script Script `json:"script,omitempty"` ShardSize *int `json:"shard_size,omitempty"` }
DiversifiedSamplerAggregation type.
func NewDiversifiedSamplerAggregation ¶
func NewDiversifiedSamplerAggregation() *DiversifiedSamplerAggregation
NewDiversifiedSamplerAggregation returns a DiversifiedSamplerAggregation.
type Document ¶
type Document struct { Id_ *string `json:"_id,omitempty"` Index_ *string `json:"_index,omitempty"` Source_ json.RawMessage `json:"_source,omitempty"` }
Document type.
type DocumentRating ¶
type DocumentRating struct { // Id_ The document ID. Id_ string `json:"_id"` // Index_ The document’s index. For data streams, this should be the document’s backing // index. Index_ string `json:"_index"` // Rating The document’s relevance with regard to this search request. Rating int `json:"rating"` }
DocumentRating type.
func NewDocumentRating ¶
func NewDocumentRating() *DocumentRating
NewDocumentRating returns a DocumentRating.
type DocumentSimulation ¶
type DocumentSimulation struct { DocumentSimulation map[string]string `json:"-"` Id_ string `json:"_id"` Index_ string `json:"_index"` Ingest_ SimulateIngest `json:"_ingest"` Routing_ *string `json:"_routing,omitempty"` Source_ map[string]json.RawMessage `json:"_source"` VersionType_ *versiontype.VersionType `json:"_version_type,omitempty"` Version_ StringifiedVersionNumber `json:"_version,omitempty"` }
DocumentSimulation type.
func NewDocumentSimulation ¶
func NewDocumentSimulation() *DocumentSimulation
NewDocumentSimulation returns a DocumentSimulation.
func (DocumentSimulation) MarshalJSON ¶
func (s DocumentSimulation) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type DotExpanderProcessor ¶
type DotExpanderProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Path *string `json:"path,omitempty"` Tag *string `json:"tag,omitempty"` }
DotExpanderProcessor type.
func NewDotExpanderProcessor ¶
func NewDotExpanderProcessor() *DotExpanderProcessor
NewDotExpanderProcessor returns a DotExpanderProcessor.
type DoubleNumberProperty ¶
type DoubleNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue *Float64 `json:"null_value,omitempty"` OnScriptError *onscripterror.OnScriptError `json:"on_script_error,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Script Script `json:"script,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` // TimeSeriesDimension For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesDimension *bool `json:"time_series_dimension,omitempty"` // TimeSeriesMetric For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesMetric *timeseriesmetrictype.TimeSeriesMetricType `json:"time_series_metric,omitempty"` Type string `json:"type,omitempty"` }
DoubleNumberProperty type.
func NewDoubleNumberProperty ¶
func NewDoubleNumberProperty() *DoubleNumberProperty
NewDoubleNumberProperty returns a DoubleNumberProperty.
func (*DoubleNumberProperty) UnmarshalJSON ¶
func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error
type DoubleRangeProperty ¶
type DoubleRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
DoubleRangeProperty type.
func NewDoubleRangeProperty ¶
func NewDoubleRangeProperty() *DoubleRangeProperty
NewDoubleRangeProperty returns a DoubleRangeProperty.
func (*DoubleRangeProperty) UnmarshalJSON ¶
func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error
type DoubleTermsAggregate ¶
type DoubleTermsAggregate struct { Buckets BucketsDoubleTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` }
DoubleTermsAggregate type.
func NewDoubleTermsAggregate ¶
func NewDoubleTermsAggregate() *DoubleTermsAggregate
NewDoubleTermsAggregate returns a DoubleTermsAggregate.
func (*DoubleTermsAggregate) UnmarshalJSON ¶
func (s *DoubleTermsAggregate) UnmarshalJSON(data []byte) error
type DoubleTermsBucket ¶
type DoubleTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` DocCountError *int64 `json:"doc_count_error,omitempty"` Key Float64 `json:"key"` KeyAsString *string `json:"key_as_string,omitempty"` }
DoubleTermsBucket type.
func NewDoubleTermsBucket ¶
func NewDoubleTermsBucket() *DoubleTermsBucket
NewDoubleTermsBucket returns a DoubleTermsBucket.
func (DoubleTermsBucket) MarshalJSON ¶
func (s DoubleTermsBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*DoubleTermsBucket) UnmarshalJSON ¶
func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error
type DownsampleConfig ¶
type DownsampleConfig struct {
FixedInterval string `json:"fixed_interval"`
}
DownsampleConfig type.
func NewDownsampleConfig ¶
func NewDownsampleConfig() *DownsampleConfig
NewDownsampleConfig returns a DownsampleConfig.
type DropProcessor ¶
type DropProcessor struct { Description *string `json:"description,omitempty"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` }
DropProcessor type.
func NewDropProcessor ¶
func NewDropProcessor() *DropProcessor
NewDropProcessor returns a DropProcessor.
type DurationValueUnitFloatMillis ¶
type DurationValueUnitFloatMillis Float64
DurationValueUnitFloatMillis type alias.
type DurationValueUnitMillis ¶
type DurationValueUnitMillis int64
DurationValueUnitMillis type alias.
type DurationValueUnitSeconds ¶
type DurationValueUnitSeconds int64
DurationValueUnitSeconds type alias.
type DutchAnalyzer ¶
type DutchAnalyzer struct { Stopwords []string `json:"stopwords,omitempty"` Type string `json:"type,omitempty"` }
DutchAnalyzer type.
func NewDutchAnalyzer ¶
func NewDutchAnalyzer() *DutchAnalyzer
NewDutchAnalyzer returns a DutchAnalyzer.
type DynamicProperty ¶
type DynamicProperty struct { Analyzer *string `json:"analyzer,omitempty"` Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` EagerGlobalOrdinals *bool `json:"eager_global_ordinals,omitempty"` Enabled *bool `json:"enabled,omitempty"` Fields map[string]Property `json:"fields,omitempty"` Format *string `json:"format,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` Index *bool `json:"index,omitempty"` IndexOptions *indexoptions.IndexOptions `json:"index_options,omitempty"` IndexPhrases *bool `json:"index_phrases,omitempty"` IndexPrefixes *TextIndexPrefixes `json:"index_prefixes,omitempty"` Locale *string `json:"locale,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Norms *bool `json:"norms,omitempty"` NullValue FieldValue `json:"null_value,omitempty"` OnScriptError *onscripterror.OnScriptError `json:"on_script_error,omitempty"` PositionIncrementGap *int `json:"position_increment_gap,omitempty"` PrecisionStep *int `json:"precision_step,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Script Script `json:"script,omitempty"` SearchAnalyzer *string `json:"search_analyzer,omitempty"` SearchQuoteAnalyzer *string `json:"search_quote_analyzer,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` TermVector *termvectoroption.TermVectorOption `json:"term_vector,omitempty"` TimeSeriesMetric *timeseriesmetrictype.TimeSeriesMetricType `json:"time_series_metric,omitempty"` Type string `json:"type,omitempty"` }
DynamicProperty type.
func NewDynamicProperty ¶
func NewDynamicProperty() *DynamicProperty
NewDynamicProperty returns a DynamicProperty.
func (*DynamicProperty) UnmarshalJSON ¶
func (s *DynamicProperty) UnmarshalJSON(data []byte) error
type DynamicTemplate ¶
type DynamicTemplate struct { Mapping Property `json:"mapping,omitempty"` Match *string `json:"match,omitempty"` MatchMappingType *string `json:"match_mapping_type,omitempty"` MatchPattern *matchtype.MatchType `json:"match_pattern,omitempty"` PathMatch *string `json:"path_match,omitempty"` PathUnmatch *string `json:"path_unmatch,omitempty"` Unmatch *string `json:"unmatch,omitempty"` }
DynamicTemplate type.
func NewDynamicTemplate ¶
func NewDynamicTemplate() *DynamicTemplate
NewDynamicTemplate returns a DynamicTemplate.
func (*DynamicTemplate) UnmarshalJSON ¶
func (s *DynamicTemplate) UnmarshalJSON(data []byte) error
type EdgeNGramTokenFilter ¶
type EdgeNGramTokenFilter struct { MaxGram *int `json:"max_gram,omitempty"` MinGram *int `json:"min_gram,omitempty"` PreserveOriginal *bool `json:"preserve_original,omitempty"` Side *edgengramside.EdgeNGramSide `json:"side,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
EdgeNGramTokenFilter type.
func NewEdgeNGramTokenFilter ¶
func NewEdgeNGramTokenFilter() *EdgeNGramTokenFilter
NewEdgeNGramTokenFilter returns a EdgeNGramTokenFilter.
type EdgeNGramTokenizer ¶
type EdgeNGramTokenizer struct { CustomTokenChars *string `json:"custom_token_chars,omitempty"` MaxGram int `json:"max_gram"` MinGram int `json:"min_gram"` TokenChars []tokenchar.TokenChar `json:"token_chars"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
EdgeNGramTokenizer type.
func NewEdgeNGramTokenizer ¶
func NewEdgeNGramTokenizer() *EdgeNGramTokenizer
NewEdgeNGramTokenizer returns a EdgeNGramTokenizer.
type ElasticsearchError ¶
type ElasticsearchError struct { ErrorCause ErrorCause `json:"error"` Status int `json:"status"` }
An ElasticsearchError represent the exception raised by the server and sent as json payloads.
func NewElasticsearchError ¶
func NewElasticsearchError() *ElasticsearchError
NewElasticsearchError returns a ElasticsearchError.
func (ElasticsearchError) As ¶
func (e ElasticsearchError) As(err interface{}) bool
As implements errors.As interface to allow type matching of ElasticsearchError.
func (ElasticsearchError) Error ¶
func (e ElasticsearchError) Error() string
Error implements error string serialization of the ElasticsearchError.
func (ElasticsearchError) Is ¶
func (e ElasticsearchError) Is(err error) bool
Is implements errors.Is interface to allow value comparison within ElasticsearchError. It checks for always present values only: Status & ErrorCause.Type.
type ElasticsearchVersionInfo ¶
type ElasticsearchVersionInfo struct { BuildDate DateTime `json:"build_date"` BuildFlavor string `json:"build_flavor"` BuildHash string `json:"build_hash"` BuildSnapshot bool `json:"build_snapshot"` BuildType string `json:"build_type"` Int string `json:"number"` LuceneVersion string `json:"lucene_version"` MinimumIndexCompatibilityVersion string `json:"minimum_index_compatibility_version"` MinimumWireCompatibilityVersion string `json:"minimum_wire_compatibility_version"` }
ElasticsearchVersionInfo type.
func NewElasticsearchVersionInfo ¶
func NewElasticsearchVersionInfo() *ElasticsearchVersionInfo
NewElasticsearchVersionInfo returns a ElasticsearchVersionInfo.
type ElisionTokenFilter ¶
type ElisionTokenFilter struct { Articles []string `json:"articles,omitempty"` ArticlesCase *bool `json:"articles_case,omitempty"` ArticlesPath *string `json:"articles_path,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
ElisionTokenFilter type.
func NewElisionTokenFilter ¶
func NewElisionTokenFilter() *ElisionTokenFilter
NewElisionTokenFilter returns a ElisionTokenFilter.
type Email ¶
type Email struct { Attachments map[string]EmailAttachmentContainer `json:"attachments,omitempty"` Bcc []string `json:"bcc,omitempty"` Body *EmailBody `json:"body,omitempty"` Cc []string `json:"cc,omitempty"` From *string `json:"from,omitempty"` Id *string `json:"id,omitempty"` Priority *emailpriority.EmailPriority `json:"priority,omitempty"` ReplyTo []string `json:"reply_to,omitempty"` SentDate DateTime `json:"sent_date,omitempty"` Subject string `json:"subject"` To []string `json:"to"` }
Email type.
type EmailAction ¶
type EmailAction struct { Attachments map[string]EmailAttachmentContainer `json:"attachments,omitempty"` Bcc []string `json:"bcc,omitempty"` Body *EmailBody `json:"body,omitempty"` Cc []string `json:"cc,omitempty"` From *string `json:"from,omitempty"` Id *string `json:"id,omitempty"` Priority *emailpriority.EmailPriority `json:"priority,omitempty"` ReplyTo []string `json:"reply_to,omitempty"` SentDate DateTime `json:"sent_date,omitempty"` Subject string `json:"subject"` To []string `json:"to"` }
EmailAction type.
type EmailAttachmentContainer ¶
type EmailAttachmentContainer struct { Data *DataEmailAttachment `json:"data,omitempty"` Http *HttpEmailAttachment `json:"http,omitempty"` Reporting *ReportingEmailAttachment `json:"reporting,omitempty"` }
EmailAttachmentContainer type.
func NewEmailAttachmentContainer ¶
func NewEmailAttachmentContainer() *EmailAttachmentContainer
NewEmailAttachmentContainer returns a EmailAttachmentContainer.
type EmailResult ¶
type EmailResult struct { Account *string `json:"account,omitempty"` Message Email `json:"message"` Reason *string `json:"reason,omitempty"` }
EmailResult type.
type EnrichPolicy ¶
type EnrichPolicy struct { ElasticsearchVersion *string `json:"elasticsearch_version,omitempty"` EnrichFields []string `json:"enrich_fields"` Indices []string `json:"indices"` MatchField string `json:"match_field"` Name *string `json:"name,omitempty"` Query *string `json:"query,omitempty"` }
EnrichPolicy type.
func NewEnrichPolicy ¶
func NewEnrichPolicy() *EnrichPolicy
NewEnrichPolicy returns a EnrichPolicy.
type EnrichProcessor ¶
type EnrichProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` MaxMatches *int `json:"max_matches,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Override *bool `json:"override,omitempty"` PolicyName string `json:"policy_name"` ShapeRelation *geoshaperelation.GeoShapeRelation `json:"shape_relation,omitempty"` Tag *string `json:"tag,omitempty"` TargetField string `json:"target_field"` }
EnrichProcessor type.
func NewEnrichProcessor ¶
func NewEnrichProcessor() *EnrichProcessor
NewEnrichProcessor returns a EnrichProcessor.
type Ensemble ¶
type Ensemble struct { AggregateOutput *AggregateOutput `json:"aggregate_output,omitempty"` ClassificationLabels []string `json:"classification_labels,omitempty"` FeatureNames []string `json:"feature_names,omitempty"` TargetType *string `json:"target_type,omitempty"` TrainedModels []TrainedModel `json:"trained_models"` }
Ensemble type.
type Eql ¶
type Eql struct { Available bool `json:"available"` Enabled bool `json:"enabled"` Features EqlFeatures `json:"features"` Queries map[string]XpackQuery `json:"queries"` }
Eql type.
type EqlFeatures ¶
type EqlFeatures struct { Event uint `json:"event"` Join uint `json:"join"` Joins EqlFeaturesJoin `json:"joins"` Keys EqlFeaturesKeys `json:"keys"` Pipes EqlFeaturesPipes `json:"pipes"` Sequence uint `json:"sequence"` Sequences EqlFeaturesSequences `json:"sequences"` }
EqlFeatures type.
type EqlFeaturesJoin ¶
type EqlFeaturesJoin struct { JoinQueriesFiveOrMore uint `json:"join_queries_five_or_more"` JoinQueriesFour uint `json:"join_queries_four"` JoinQueriesThree uint `json:"join_queries_three"` JoinQueriesTwo uint `json:"join_queries_two"` JoinUntil uint `json:"join_until"` }
EqlFeaturesJoin type.
func NewEqlFeaturesJoin ¶
func NewEqlFeaturesJoin() *EqlFeaturesJoin
NewEqlFeaturesJoin returns a EqlFeaturesJoin.
type EqlFeaturesKeys ¶
type EqlFeaturesKeys struct { JoinKeysFiveOrMore uint `json:"join_keys_five_or_more"` JoinKeysFour uint `json:"join_keys_four"` JoinKeysOne uint `json:"join_keys_one"` JoinKeysThree uint `json:"join_keys_three"` JoinKeysTwo uint `json:"join_keys_two"` }
EqlFeaturesKeys type.
func NewEqlFeaturesKeys ¶
func NewEqlFeaturesKeys() *EqlFeaturesKeys
NewEqlFeaturesKeys returns a EqlFeaturesKeys.
type EqlFeaturesPipes ¶
EqlFeaturesPipes type.
func NewEqlFeaturesPipes ¶
func NewEqlFeaturesPipes() *EqlFeaturesPipes
NewEqlFeaturesPipes returns a EqlFeaturesPipes.
type EqlFeaturesSequences ¶
type EqlFeaturesSequences struct { SequenceMaxspan uint `json:"sequence_maxspan"` SequenceQueriesFiveOrMore uint `json:"sequence_queries_five_or_more"` SequenceQueriesFour uint `json:"sequence_queries_four"` SequenceQueriesThree uint `json:"sequence_queries_three"` SequenceQueriesTwo uint `json:"sequence_queries_two"` SequenceUntil uint `json:"sequence_until"` }
EqlFeaturesSequences type.
func NewEqlFeaturesSequences ¶
func NewEqlFeaturesSequences() *EqlFeaturesSequences
NewEqlFeaturesSequences returns a EqlFeaturesSequences.
type EqlHits ¶
type EqlHits struct { // Events Contains events matching the query. Each object represents a matching event. Events []HitsEvent `json:"events,omitempty"` // Sequences Contains event sequences matching the query. Each object represents a // matching sequence. This parameter is only returned for EQL queries containing // a sequence. Sequences []HitsSequence `json:"sequences,omitempty"` // Total Metadata about the number of matching events or sequences. Total *TotalHits `json:"total,omitempty"` }
EqlHits type.
type ErrorCause ¶
type ErrorCause struct { CausedBy *ErrorCause `json:"caused_by,omitempty"` Metadata map[string]json.RawMessage `json:"-"` // Reason A human-readable explanation of the error, in english Reason *string `json:"reason,omitempty"` RootCause []ErrorCause `json:"root_cause,omitempty"` // StackTrace The server stack trace. Present only if the `error_trace=true` parameter was // sent with the request. StackTrace *string `json:"stack_trace,omitempty"` Suppressed []ErrorCause `json:"suppressed,omitempty"` // Type The type of error Type string `json:"type"` }
ErrorCause type.
func (ErrorCause) MarshalJSON ¶
func (s ErrorCause) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type EwmaModelSettings ¶
type EwmaModelSettings struct {
Alpha *float32 `json:"alpha,omitempty"`
}
EwmaModelSettings type.
func NewEwmaModelSettings ¶
func NewEwmaModelSettings() *EwmaModelSettings
NewEwmaModelSettings returns a EwmaModelSettings.
type EwmaMovingAverageAggregation ¶
type EwmaMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Minimize *bool `json:"minimize,omitempty"` Model string `json:"model,omitempty"` Name *string `json:"name,omitempty"` Predict *int `json:"predict,omitempty"` Settings EwmaModelSettings `json:"settings"` Window *int `json:"window,omitempty"` }
EwmaMovingAverageAggregation type.
func NewEwmaMovingAverageAggregation ¶
func NewEwmaMovingAverageAggregation() *EwmaMovingAverageAggregation
NewEwmaMovingAverageAggregation returns a EwmaMovingAverageAggregation.
func (*EwmaMovingAverageAggregation) UnmarshalJSON ¶
func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error
type ExecuteEnrichPolicyStatus ¶
type ExecuteEnrichPolicyStatus struct {
Phase enrichpolicyphase.EnrichPolicyPhase `json:"phase"`
}
ExecuteEnrichPolicyStatus type.
func NewExecuteEnrichPolicyStatus ¶
func NewExecuteEnrichPolicyStatus() *ExecuteEnrichPolicyStatus
NewExecuteEnrichPolicyStatus returns a ExecuteEnrichPolicyStatus.
type ExecutingPolicy ¶
ExecutingPolicy type.
func NewExecutingPolicy ¶
func NewExecutingPolicy() *ExecutingPolicy
NewExecutingPolicy returns a ExecutingPolicy.
type ExecutionResult ¶
type ExecutionResult struct { Actions []ExecutionResultAction `json:"actions"` Condition ExecutionResultCondition `json:"condition"` ExecutionDuration int64 `json:"execution_duration"` ExecutionTime DateTime `json:"execution_time"` Input ExecutionResultInput `json:"input"` }
ExecutionResult type.
func NewExecutionResult ¶
func NewExecutionResult() *ExecutionResult
NewExecutionResult returns a ExecutionResult.
type ExecutionResultAction ¶
type ExecutionResultAction struct { Email *EmailResult `json:"email,omitempty"` Error *ErrorCause `json:"error,omitempty"` Id string `json:"id"` Index *IndexResult `json:"index,omitempty"` Logging *LoggingResult `json:"logging,omitempty"` Pagerduty *PagerDutyResult `json:"pagerduty,omitempty"` Reason *string `json:"reason,omitempty"` Slack *SlackResult `json:"slack,omitempty"` Status actionstatusoptions.ActionStatusOptions `json:"status"` Type actiontype.ActionType `json:"type"` Webhook *WebhookResult `json:"webhook,omitempty"` }
ExecutionResultAction type.
func NewExecutionResultAction ¶
func NewExecutionResultAction() *ExecutionResultAction
NewExecutionResultAction returns a ExecutionResultAction.
type ExecutionResultCondition ¶
type ExecutionResultCondition struct { Met bool `json:"met"` Status actionstatusoptions.ActionStatusOptions `json:"status"` Type conditiontype.ConditionType `json:"type"` }
ExecutionResultCondition type.
func NewExecutionResultCondition ¶
func NewExecutionResultCondition() *ExecutionResultCondition
NewExecutionResultCondition returns a ExecutionResultCondition.
type ExecutionResultInput ¶
type ExecutionResultInput struct { Payload map[string]json.RawMessage `json:"payload"` Status actionstatusoptions.ActionStatusOptions `json:"status"` Type inputtype.InputType `json:"type"` }
ExecutionResultInput type.
func NewExecutionResultInput ¶
func NewExecutionResultInput() *ExecutionResultInput
NewExecutionResultInput returns a ExecutionResultInput.
type ExecutionState ¶
type ExecutionState struct { Reason *string `json:"reason,omitempty"` Successful bool `json:"successful"` Timestamp DateTime `json:"timestamp"` }
ExecutionState type.
func NewExecutionState ¶
func NewExecutionState() *ExecutionState
NewExecutionState returns a ExecutionState.
type ExecutionThreadPool ¶
type ExecutionThreadPool struct { MaxSize int64 `json:"max_size"` QueueSize int64 `json:"queue_size"` }
ExecutionThreadPool type.
func NewExecutionThreadPool ¶
func NewExecutionThreadPool() *ExecutionThreadPool
NewExecutionThreadPool returns a ExecutionThreadPool.
type ExistsQuery ¶
type ExistsQuery struct { Boost *float32 `json:"boost,omitempty"` Field string `json:"field"` QueryName_ *string `json:"_name,omitempty"` }
ExistsQuery type.
type ExpandWildcards ¶
type ExpandWildcards []expandwildcard.ExpandWildcard
ExpandWildcards type alias.
type ExplainAnalyzeToken ¶
type ExplainAnalyzeToken struct { Bytes string `json:"bytes"` EndOffset int64 `json:"end_offset"` ExplainAnalyzeToken map[string]json.RawMessage `json:"-"` Keyword *bool `json:"keyword,omitempty"` Position int64 `json:"position"` PositionLength int64 `json:"positionLength"` StartOffset int64 `json:"start_offset"` TermFrequency int64 `json:"termFrequency"` Token string `json:"token"` Type string `json:"type"` }
ExplainAnalyzeToken type.
func NewExplainAnalyzeToken ¶
func NewExplainAnalyzeToken() *ExplainAnalyzeToken
NewExplainAnalyzeToken returns a ExplainAnalyzeToken.
func (ExplainAnalyzeToken) MarshalJSON ¶
func (s ExplainAnalyzeToken) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type Explanation ¶
type Explanation struct { Description string `json:"description"` Details []ExplanationDetail `json:"details"` Value float32 `json:"value"` }
Explanation type.
type ExplanationDetail ¶
type ExplanationDetail struct { Description string `json:"description"` Details []ExplanationDetail `json:"details,omitempty"` Value float32 `json:"value"` }
ExplanationDetail type.
func NewExplanationDetail ¶
func NewExplanationDetail() *ExplanationDetail
NewExplanationDetail returns a ExplanationDetail.
type ExploreControls ¶
type ExploreControls struct { SampleDiversity *SampleDiversity `json:"sample_diversity,omitempty"` SampleSize *int `json:"sample_size,omitempty"` Timeout Duration `json:"timeout,omitempty"` UseSignificance bool `json:"use_significance"` }
ExploreControls type.
func NewExploreControls ¶
func NewExploreControls() *ExploreControls
NewExploreControls returns a ExploreControls.
type ExtendedBoundsFieldDateMath ¶
type ExtendedBoundsFieldDateMath struct { Max FieldDateMath `json:"max"` Min FieldDateMath `json:"min"` }
ExtendedBoundsFieldDateMath type.
func NewExtendedBoundsFieldDateMath ¶
func NewExtendedBoundsFieldDateMath() *ExtendedBoundsFieldDateMath
NewExtendedBoundsFieldDateMath returns a ExtendedBoundsFieldDateMath.
type ExtendedBoundsdouble ¶
ExtendedBoundsdouble type.
func NewExtendedBoundsdouble ¶
func NewExtendedBoundsdouble() *ExtendedBoundsdouble
NewExtendedBoundsdouble returns a ExtendedBoundsdouble.
type ExtendedMemoryStats ¶
type ExtendedMemoryStats struct { AdjustedTotalInBytes *int64 `json:"adjusted_total_in_bytes,omitempty"` FreeInBytes *int64 `json:"free_in_bytes,omitempty"` FreePercent *int `json:"free_percent,omitempty"` Resident *string `json:"resident,omitempty"` ResidentInBytes *int64 `json:"resident_in_bytes,omitempty"` TotalInBytes *int64 `json:"total_in_bytes,omitempty"` TotalVirtual *string `json:"total_virtual,omitempty"` TotalVirtualInBytes *int64 `json:"total_virtual_in_bytes,omitempty"` UsedInBytes *int64 `json:"used_in_bytes,omitempty"` UsedPercent *int `json:"used_percent,omitempty"` }
ExtendedMemoryStats type.
func NewExtendedMemoryStats ¶
func NewExtendedMemoryStats() *ExtendedMemoryStats
NewExtendedMemoryStats returns a ExtendedMemoryStats.
type ExtendedStatsAggregate ¶
type ExtendedStatsAggregate struct { Avg Float64 `json:"avg,omitempty"` AvgAsString *string `json:"avg_as_string,omitempty"` Count int64 `json:"count"` Max Float64 `json:"max,omitempty"` MaxAsString *string `json:"max_as_string,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Min Float64 `json:"min,omitempty"` MinAsString *string `json:"min_as_string,omitempty"` StdDeviation Float64 `json:"std_deviation,omitempty"` StdDeviationAsString *string `json:"std_deviation_as_string,omitempty"` StdDeviationBounds *StandardDeviationBounds `json:"std_deviation_bounds,omitempty"` StdDeviationBoundsAsString *StandardDeviationBoundsAsString `json:"std_deviation_bounds_as_string,omitempty"` StdDeviationPopulation Float64 `json:"std_deviation_population,omitempty"` StdDeviationSampling Float64 `json:"std_deviation_sampling,omitempty"` Sum Float64 `json:"sum"` SumAsString *string `json:"sum_as_string,omitempty"` SumOfSquares Float64 `json:"sum_of_squares,omitempty"` SumOfSquaresAsString *string `json:"sum_of_squares_as_string,omitempty"` Variance Float64 `json:"variance,omitempty"` VarianceAsString *string `json:"variance_as_string,omitempty"` VariancePopulation Float64 `json:"variance_population,omitempty"` VariancePopulationAsString *string `json:"variance_population_as_string,omitempty"` VarianceSampling Float64 `json:"variance_sampling,omitempty"` VarianceSamplingAsString *string `json:"variance_sampling_as_string,omitempty"` }
ExtendedStatsAggregate type.
func NewExtendedStatsAggregate ¶
func NewExtendedStatsAggregate() *ExtendedStatsAggregate
NewExtendedStatsAggregate returns a ExtendedStatsAggregate.
type ExtendedStatsAggregation ¶
type ExtendedStatsAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` Sigma *Float64 `json:"sigma,omitempty"` }
ExtendedStatsAggregation type.
func NewExtendedStatsAggregation ¶
func NewExtendedStatsAggregation() *ExtendedStatsAggregation
NewExtendedStatsAggregation returns a ExtendedStatsAggregation.
type ExtendedStatsBucketAggregate ¶
type ExtendedStatsBucketAggregate struct { Avg Float64 `json:"avg,omitempty"` AvgAsString *string `json:"avg_as_string,omitempty"` Count int64 `json:"count"` Max Float64 `json:"max,omitempty"` MaxAsString *string `json:"max_as_string,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Min Float64 `json:"min,omitempty"` MinAsString *string `json:"min_as_string,omitempty"` StdDeviation Float64 `json:"std_deviation,omitempty"` StdDeviationAsString *string `json:"std_deviation_as_string,omitempty"` StdDeviationBounds *StandardDeviationBounds `json:"std_deviation_bounds,omitempty"` StdDeviationBoundsAsString *StandardDeviationBoundsAsString `json:"std_deviation_bounds_as_string,omitempty"` StdDeviationPopulation Float64 `json:"std_deviation_population,omitempty"` StdDeviationSampling Float64 `json:"std_deviation_sampling,omitempty"` Sum Float64 `json:"sum"` SumAsString *string `json:"sum_as_string,omitempty"` SumOfSquares Float64 `json:"sum_of_squares,omitempty"` SumOfSquaresAsString *string `json:"sum_of_squares_as_string,omitempty"` Variance Float64 `json:"variance,omitempty"` VarianceAsString *string `json:"variance_as_string,omitempty"` VariancePopulation Float64 `json:"variance_population,omitempty"` VariancePopulationAsString *string `json:"variance_population_as_string,omitempty"` VarianceSampling Float64 `json:"variance_sampling,omitempty"` VarianceSamplingAsString *string `json:"variance_sampling_as_string,omitempty"` }
ExtendedStatsBucketAggregate type.
func NewExtendedStatsBucketAggregate ¶
func NewExtendedStatsBucketAggregate() *ExtendedStatsBucketAggregate
NewExtendedStatsBucketAggregate returns a ExtendedStatsBucketAggregate.
type ExtendedStatsBucketAggregation ¶
type ExtendedStatsBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Sigma *Float64 `json:"sigma,omitempty"` }
ExtendedStatsBucketAggregation type.
func NewExtendedStatsBucketAggregation ¶
func NewExtendedStatsBucketAggregation() *ExtendedStatsBucketAggregation
NewExtendedStatsBucketAggregation returns a ExtendedStatsBucketAggregation.
func (*ExtendedStatsBucketAggregation) UnmarshalJSON ¶
func (s *ExtendedStatsBucketAggregation) UnmarshalJSON(data []byte) error
type FailProcessor ¶
type FailProcessor struct { Description *string `json:"description,omitempty"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` Message string `json:"message"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` }
FailProcessor type.
func NewFailProcessor ¶
func NewFailProcessor() *FailProcessor
NewFailProcessor returns a FailProcessor.
type FeatureToggle ¶
type FeatureToggle struct {
Enabled bool `json:"enabled"`
}
FeatureToggle type.
func NewFeatureToggle ¶
func NewFeatureToggle() *FeatureToggle
NewFeatureToggle returns a FeatureToggle.
type FetchProfile ¶
type FetchProfile struct { Breakdown FetchProfileBreakdown `json:"breakdown"` Children []FetchProfile `json:"children,omitempty"` Debug *FetchProfileDebug `json:"debug,omitempty"` Description string `json:"description"` TimeInNanos int64 `json:"time_in_nanos"` Type string `json:"type"` }
FetchProfile type.
func NewFetchProfile ¶
func NewFetchProfile() *FetchProfile
NewFetchProfile returns a FetchProfile.
type FetchProfileBreakdown ¶
type FetchProfileBreakdown struct { LoadSource *int `json:"load_source,omitempty"` LoadSourceCount *int `json:"load_source_count,omitempty"` LoadStoredFields *int `json:"load_stored_fields,omitempty"` LoadStoredFieldsCount *int `json:"load_stored_fields_count,omitempty"` NextReader *int `json:"next_reader,omitempty"` NextReaderCount *int `json:"next_reader_count,omitempty"` Process *int `json:"process,omitempty"` ProcessCount *int `json:"process_count,omitempty"` }
FetchProfileBreakdown type.
func NewFetchProfileBreakdown ¶
func NewFetchProfileBreakdown() *FetchProfileBreakdown
NewFetchProfileBreakdown returns a FetchProfileBreakdown.
type FetchProfileDebug ¶
type FetchProfileDebug struct { FastPath *int `json:"fast_path,omitempty"` StoredFields []string `json:"stored_fields,omitempty"` }
FetchProfileDebug type.
func NewFetchProfileDebug ¶
func NewFetchProfileDebug() *FetchProfileDebug
NewFetchProfileDebug returns a FetchProfileDebug.
type FieldAliasProperty ¶
type FieldAliasProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Path *string `json:"path,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Type string `json:"type,omitempty"` }
FieldAliasProperty type.
func NewFieldAliasProperty ¶
func NewFieldAliasProperty() *FieldAliasProperty
NewFieldAliasProperty returns a FieldAliasProperty.
func (*FieldAliasProperty) UnmarshalJSON ¶
func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error
type FieldAndFormat ¶
type FieldAndFormat struct { // Field Wildcard pattern. The request returns values for field names matching this // pattern. Field string `json:"field"` // Format Format in which the values are returned. Format *string `json:"format,omitempty"` IncludeUnmapped *bool `json:"include_unmapped,omitempty"` }
FieldAndFormat type.
func NewFieldAndFormat ¶
func NewFieldAndFormat() *FieldAndFormat
NewFieldAndFormat returns a FieldAndFormat.
type FieldCapability ¶
type FieldCapability struct { // Aggregatable Whether this field can be aggregated on all indices. Aggregatable bool `json:"aggregatable"` // Indices The list of indices where this field has the same type family, or null if all // indices have the same type family for the field. Indices []string `json:"indices,omitempty"` // Meta Merged metadata across all indices as a map of string keys to arrays of // values. A value length of 1 indicates that all indices had the same value for // this key, while a length of 2 or more indicates that not all indices had the // same value for this key. Meta map[string]json.RawMessage `json:"meta,omitempty"` // MetadataField Whether this field is registered as a metadata field. MetadataField *bool `json:"metadata_field,omitempty"` // MetricConflictsIndices The list of indices where this field is present if these indices // don’t have the same `time_series_metric` value for this field. MetricConflictsIndices []string `json:"metric_conflicts_indices,omitempty"` // NonAggregatableIndices The list of indices where this field is not aggregatable, or null if all // indices have the same definition for the field. NonAggregatableIndices []string `json:"non_aggregatable_indices,omitempty"` // NonDimensionIndices If this list is present in response then some indices have the // field marked as a dimension and other indices, the ones in this list, do not. NonDimensionIndices []string `json:"non_dimension_indices,omitempty"` // NonSearchableIndices The list of indices where this field is not searchable, or null if all // indices have the same definition for the field. NonSearchableIndices []string `json:"non_searchable_indices,omitempty"` // Searchable Whether this field is indexed for search on all indices. Searchable bool `json:"searchable"` // TimeSeriesDimension Whether this field is used as a time series dimension. TimeSeriesDimension *bool `json:"time_series_dimension,omitempty"` // TimeSeriesMetric Contains metric type if this fields is used as a time series // metrics, absent if the field is not used as metric. TimeSeriesMetric *timeseriesmetrictype.TimeSeriesMetricType `json:"time_series_metric,omitempty"` Type string `json:"type"` }
FieldCapability type.
func NewFieldCapability ¶
func NewFieldCapability() *FieldCapability
NewFieldCapability returns a FieldCapability.
type FieldCollapse ¶
type FieldCollapse struct { Collapse *FieldCollapse `json:"collapse,omitempty"` Field string `json:"field"` InnerHits []InnerHits `json:"inner_hits,omitempty"` MaxConcurrentGroupSearches *int `json:"max_concurrent_group_searches,omitempty"` }
FieldCollapse type.
func NewFieldCollapse ¶
func NewFieldCollapse() *FieldCollapse
NewFieldCollapse returns a FieldCollapse.
type FieldDateMath ¶
type FieldDateMath interface{}
FieldDateMath holds the union for the following types:
string Float64
type FieldLookup ¶
type FieldLookup struct { Id string `json:"id"` Index *string `json:"index,omitempty"` Path *string `json:"path,omitempty"` Routing *string `json:"routing,omitempty"` }
FieldLookup type.
type FieldMapping ¶
type FieldMapping struct { FullName string `json:"full_name"` Mapping map[string]Property `json:"mapping"` }
FieldMapping type.
func NewFieldMapping ¶
func NewFieldMapping() *FieldMapping
NewFieldMapping returns a FieldMapping.
func (*FieldMapping) UnmarshalJSON ¶
func (s *FieldMapping) UnmarshalJSON(data []byte) error
type FieldMemoryUsage ¶
type FieldMemoryUsage struct { MemorySize ByteSize `json:"memory_size,omitempty"` MemorySizeInBytes int64 `json:"memory_size_in_bytes"` }
FieldMemoryUsage type.
func NewFieldMemoryUsage ¶
func NewFieldMemoryUsage() *FieldMemoryUsage
NewFieldMemoryUsage returns a FieldMemoryUsage.
type FieldMetric ¶
type FieldMetric struct { // Field The field to collect metrics for. This must be a numeric of some kind. Field string `json:"field"` // Metrics An array of metrics to collect for the field. At least one metric must be // configured. Metrics []metric.Metric `json:"metrics"` }
FieldMetric type.
type FieldNamesField ¶
type FieldNamesField struct {
Enabled bool `json:"enabled"`
}
FieldNamesField type.
func NewFieldNamesField ¶
func NewFieldNamesField() *FieldNamesField
NewFieldNamesField returns a FieldNamesField.
type FieldRule ¶
type FieldRule struct { Dn []string `json:"dn,omitempty"` Groups []string `json:"groups,omitempty"` Metadata json.RawMessage `json:"metadata,omitempty"` Realm *SecurityRealm `json:"realm,omitempty"` Username *string `json:"username,omitempty"` }
FieldRule type.
type FieldSecurity ¶
type FieldSecurity struct { Except []string `json:"except,omitempty"` Grant []string `json:"grant,omitempty"` }
FieldSecurity type.
func NewFieldSecurity ¶
func NewFieldSecurity() *FieldSecurity
NewFieldSecurity returns a FieldSecurity.
type FieldSizeUsage ¶
type FieldSizeUsage struct { Size ByteSize `json:"size,omitempty"` SizeInBytes int64 `json:"size_in_bytes"` }
FieldSizeUsage type.
func NewFieldSizeUsage ¶
func NewFieldSizeUsage() *FieldSizeUsage
NewFieldSizeUsage returns a FieldSizeUsage.
type FieldSort ¶
type FieldSort struct { Format *string `json:"format,omitempty"` Missing Missing `json:"missing,omitempty"` Mode *sortmode.SortMode `json:"mode,omitempty"` Nested *NestedSortValue `json:"nested,omitempty"` NumericType *fieldsortnumerictype.FieldSortNumericType `json:"numeric_type,omitempty"` Order *sortorder.SortOrder `json:"order,omitempty"` UnmappedType *fieldtype.FieldType `json:"unmapped_type,omitempty"` }
FieldSort type.
type FieldStatistics ¶
type FieldStatistics struct { DocCount int `json:"doc_count"` SumDocFreq int64 `json:"sum_doc_freq"` SumTtf int64 `json:"sum_ttf"` }
FieldStatistics type.
func NewFieldStatistics ¶
func NewFieldStatistics() *FieldStatistics
NewFieldStatistics returns a FieldStatistics.
type FieldSuggester ¶
type FieldSuggester struct { Completion *CompletionSuggester `json:"completion,omitempty"` Phrase *PhraseSuggester `json:"phrase,omitempty"` Prefix *string `json:"prefix,omitempty"` Regex *string `json:"regex,omitempty"` Term *TermSuggester `json:"term,omitempty"` Text *string `json:"text,omitempty"` }
FieldSuggester type.
func NewFieldSuggester ¶
func NewFieldSuggester() *FieldSuggester
NewFieldSuggester returns a FieldSuggester.
type FieldSummary ¶
type FieldSummary struct { Any uint `json:"any"` DocValues uint `json:"doc_values"` InvertedIndex InvertedIndex `json:"inverted_index"` KnnVectors uint `json:"knn_vectors"` Norms uint `json:"norms"` Points uint `json:"points"` StoredFields uint `json:"stored_fields"` TermVectors uint `json:"term_vectors"` }
FieldSummary type.
func NewFieldSummary ¶
func NewFieldSummary() *FieldSummary
NewFieldSummary returns a FieldSummary.
type FieldTypes ¶
type FieldTypes struct { Count int `json:"count"` IndexCount int `json:"index_count"` IndexedVectorCount *int64 `json:"indexed_vector_count,omitempty"` IndexedVectorDimMax *int64 `json:"indexed_vector_dim_max,omitempty"` IndexedVectorDimMin *int64 `json:"indexed_vector_dim_min,omitempty"` Name string `json:"name"` ScriptCount *int `json:"script_count,omitempty"` }
FieldTypes type.
type FieldTypesMappings ¶
type FieldTypesMappings struct { FieldTypes []FieldTypes `json:"field_types"` RuntimeFieldTypes []ClusterRuntimeFieldTypes `json:"runtime_field_types,omitempty"` TotalDeduplicatedFieldCount *int `json:"total_deduplicated_field_count,omitempty"` TotalDeduplicatedMappingSize ByteSize `json:"total_deduplicated_mapping_size,omitempty"` TotalDeduplicatedMappingSizeInBytes *int64 `json:"total_deduplicated_mapping_size_in_bytes,omitempty"` TotalFieldCount *int `json:"total_field_count,omitempty"` }
FieldTypesMappings type.
func NewFieldTypesMappings ¶
func NewFieldTypesMappings() *FieldTypesMappings
NewFieldTypesMappings returns a FieldTypesMappings.
type FieldValue ¶
type FieldValue interface{}
FieldValue holds the union for the following types:
int64 Float64 string bool nil json.RawMessage
type FieldValueFactorScoreFunction ¶
type FieldValueFactorScoreFunction struct { Factor *Float64 `json:"factor,omitempty"` Field string `json:"field"` Missing *Float64 `json:"missing,omitempty"` Modifier *fieldvaluefactormodifier.FieldValueFactorModifier `json:"modifier,omitempty"` }
FieldValueFactorScoreFunction type.
func NewFieldValueFactorScoreFunction ¶
func NewFieldValueFactorScoreFunction() *FieldValueFactorScoreFunction
NewFieldValueFactorScoreFunction returns a FieldValueFactorScoreFunction.
type FielddataFrequencyFilter ¶
type FielddataFrequencyFilter struct { Max Float64 `json:"max"` Min Float64 `json:"min"` MinSegmentSize int `json:"min_segment_size"` }
FielddataFrequencyFilter type.
func NewFielddataFrequencyFilter ¶
func NewFielddataFrequencyFilter() *FielddataFrequencyFilter
NewFielddataFrequencyFilter returns a FielddataFrequencyFilter.
type FielddataRecord ¶
type FielddataRecord struct { // Field field name Field *string `json:"field,omitempty"` // Host host name Host *string `json:"host,omitempty"` // Id node id Id *string `json:"id,omitempty"` // Ip ip address Ip *string `json:"ip,omitempty"` // Node node name Node *string `json:"node,omitempty"` // Size field data usage Size *string `json:"size,omitempty"` }
FielddataRecord type.
func NewFielddataRecord ¶
func NewFielddataRecord() *FielddataRecord
NewFielddataRecord returns a FielddataRecord.
type FielddataStats ¶
type FielddataStats struct { Evictions *int64 `json:"evictions,omitempty"` Fields map[string]FieldMemoryUsage `json:"fields,omitempty"` MemorySize ByteSize `json:"memory_size,omitempty"` MemorySizeInBytes int64 `json:"memory_size_in_bytes"` }
FielddataStats type.
func NewFielddataStats ¶
func NewFielddataStats() *FielddataStats
NewFielddataStats returns a FielddataStats.
type FieldsUsageBody ¶
type FieldsUsageBody struct { FieldsUsageBody map[string]UsageStatsIndex `json:"-"` Shards_ ShardStatistics `json:"_shards"` }
FieldsUsageBody type.
func NewFieldsUsageBody ¶
func NewFieldsUsageBody() *FieldsUsageBody
NewFieldsUsageBody returns a FieldsUsageBody.
func (FieldsUsageBody) MarshalJSON ¶
func (s FieldsUsageBody) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type FileCountSnapshotStats ¶
type FileCountSnapshotStats struct { FileCount int `json:"file_count"` SizeInBytes int64 `json:"size_in_bytes"` }
FileCountSnapshotStats type.
func NewFileCountSnapshotStats ¶
func NewFileCountSnapshotStats() *FileCountSnapshotStats
NewFileCountSnapshotStats returns a FileCountSnapshotStats.
type FileDetails ¶
type FileDetails struct { Length int64 `json:"length"` Name string `json:"name"` Recovered int64 `json:"recovered"` }
FileDetails type.
type FileSystem ¶
type FileSystem struct { Data []DataPathStats `json:"data,omitempty"` IoStats *IoStats `json:"io_stats,omitempty"` Timestamp *int64 `json:"timestamp,omitempty"` Total *FileSystemTotal `json:"total,omitempty"` }
FileSystem type.
type FileSystemTotal ¶
type FileSystemTotal struct { Available *string `json:"available,omitempty"` AvailableInBytes *int64 `json:"available_in_bytes,omitempty"` Free *string `json:"free,omitempty"` FreeInBytes *int64 `json:"free_in_bytes,omitempty"` Total *string `json:"total,omitempty"` TotalInBytes *int64 `json:"total_in_bytes,omitempty"` }
FileSystemTotal type.
func NewFileSystemTotal ¶
func NewFileSystemTotal() *FileSystemTotal
NewFileSystemTotal returns a FileSystemTotal.
type FillMaskInferenceOptions ¶
type FillMaskInferenceOptions struct { // NumTopClasses Specifies the number of top class predictions to return. Defaults to 0. NumTopClasses *int `json:"num_top_classes,omitempty"` // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` // Tokenization The tokenization options to update when inferring Tokenization *TokenizationConfigContainer `json:"tokenization,omitempty"` }
FillMaskInferenceOptions type.
func NewFillMaskInferenceOptions ¶
func NewFillMaskInferenceOptions() *FillMaskInferenceOptions
NewFillMaskInferenceOptions returns a FillMaskInferenceOptions.
type FillMaskInferenceUpdateOptions ¶
type FillMaskInferenceUpdateOptions struct { // NumTopClasses Specifies the number of top class predictions to return. Defaults to 0. NumTopClasses *int `json:"num_top_classes,omitempty"` // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` // Tokenization The tokenization options to update when inferring Tokenization *NlpTokenizationUpdateOptions `json:"tokenization,omitempty"` }
FillMaskInferenceUpdateOptions type.
func NewFillMaskInferenceUpdateOptions ¶
func NewFillMaskInferenceUpdateOptions() *FillMaskInferenceUpdateOptions
NewFillMaskInferenceUpdateOptions returns a FillMaskInferenceUpdateOptions.
type FilterAggregate ¶
type FilterAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
FilterAggregate type.
func NewFilterAggregate ¶
func NewFilterAggregate() *FilterAggregate
NewFilterAggregate returns a FilterAggregate.
func (FilterAggregate) MarshalJSON ¶
func (s FilterAggregate) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*FilterAggregate) UnmarshalJSON ¶
func (s *FilterAggregate) UnmarshalJSON(data []byte) error
type FilterRef ¶
type FilterRef struct { // FilterId The identifier for the filter. FilterId string `json:"filter_id"` // FilterType If set to `include`, the rule applies for values in the filter. If set to // `exclude`, the rule applies for values not in the filter. FilterType *filtertype.FilterType `json:"filter_type,omitempty"` }
FilterRef type.
type FiltersAggregate ¶
type FiltersAggregate struct { Buckets BucketsFiltersBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
FiltersAggregate type.
func NewFiltersAggregate ¶
func NewFiltersAggregate() *FiltersAggregate
NewFiltersAggregate returns a FiltersAggregate.
func (*FiltersAggregate) UnmarshalJSON ¶
func (s *FiltersAggregate) UnmarshalJSON(data []byte) error
type FiltersAggregation ¶
type FiltersAggregation struct { Filters *BucketsQuery `json:"filters,omitempty"` Keyed *bool `json:"keyed,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` OtherBucket *bool `json:"other_bucket,omitempty"` OtherBucketKey *string `json:"other_bucket_key,omitempty"` }
FiltersAggregation type.
func NewFiltersAggregation ¶
func NewFiltersAggregation() *FiltersAggregation
NewFiltersAggregation returns a FiltersAggregation.
type FiltersBucket ¶
type FiltersBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` }
FiltersBucket type.
func NewFiltersBucket ¶
func NewFiltersBucket() *FiltersBucket
NewFiltersBucket returns a FiltersBucket.
func (FiltersBucket) MarshalJSON ¶
func (s FiltersBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*FiltersBucket) UnmarshalJSON ¶
func (s *FiltersBucket) UnmarshalJSON(data []byte) error
type FingerprintAnalyzer ¶
type FingerprintAnalyzer struct { MaxOutputSize int `json:"max_output_size"` PreserveOriginal bool `json:"preserve_original"` Separator string `json:"separator"` Stopwords []string `json:"stopwords,omitempty"` StopwordsPath *string `json:"stopwords_path,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
FingerprintAnalyzer type.
func NewFingerprintAnalyzer ¶
func NewFingerprintAnalyzer() *FingerprintAnalyzer
NewFingerprintAnalyzer returns a FingerprintAnalyzer.
type FingerprintTokenFilter ¶
type FingerprintTokenFilter struct { MaxOutputSize *int `json:"max_output_size,omitempty"` Separator *string `json:"separator,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
FingerprintTokenFilter type.
func NewFingerprintTokenFilter ¶
func NewFingerprintTokenFilter() *FingerprintTokenFilter
NewFingerprintTokenFilter returns a FingerprintTokenFilter.
type Flattened ¶
type Flattened struct { Available bool `json:"available"` Enabled bool `json:"enabled"` FieldCount int `json:"field_count"` }
Flattened type.
type FlattenedProperty ¶
type FlattenedProperty struct { Boost *Float64 `json:"boost,omitempty"` DepthLimit *int `json:"depth_limit,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` EagerGlobalOrdinals *bool `json:"eager_global_ordinals,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` Index *bool `json:"index,omitempty"` IndexOptions *indexoptions.IndexOptions `json:"index_options,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue *string `json:"null_value,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` SplitQueriesOnWhitespace *bool `json:"split_queries_on_whitespace,omitempty"` Type string `json:"type,omitempty"` }
FlattenedProperty type.
func NewFlattenedProperty ¶
func NewFlattenedProperty() *FlattenedProperty
NewFlattenedProperty returns a FlattenedProperty.
func (*FlattenedProperty) UnmarshalJSON ¶
func (s *FlattenedProperty) UnmarshalJSON(data []byte) error
type Float64 ¶
type Float64 float64
Float64 custom type for Inf & NaN handling.
func (Float64) MarshalJSON ¶
MarshalJSON implements Marshaler interface.
func (*Float64) UnmarshalJSON ¶
UnmarshalJSON implements Unmarshaler interface.
type FloatNumberProperty ¶
type FloatNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue *float32 `json:"null_value,omitempty"` OnScriptError *onscripterror.OnScriptError `json:"on_script_error,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Script Script `json:"script,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` // TimeSeriesDimension For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesDimension *bool `json:"time_series_dimension,omitempty"` // TimeSeriesMetric For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesMetric *timeseriesmetrictype.TimeSeriesMetricType `json:"time_series_metric,omitempty"` Type string `json:"type,omitempty"` }
FloatNumberProperty type.
func NewFloatNumberProperty ¶
func NewFloatNumberProperty() *FloatNumberProperty
NewFloatNumberProperty returns a FloatNumberProperty.
func (*FloatNumberProperty) UnmarshalJSON ¶
func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error
type FloatRangeProperty ¶
type FloatRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
FloatRangeProperty type.
func NewFloatRangeProperty ¶
func NewFloatRangeProperty() *FloatRangeProperty
NewFloatRangeProperty returns a FloatRangeProperty.
func (*FloatRangeProperty) UnmarshalJSON ¶
func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error
type FlushStats ¶
type FlushStats struct { Periodic int64 `json:"periodic"` Total int64 `json:"total"` TotalTime Duration `json:"total_time,omitempty"` TotalTimeInMillis int64 `json:"total_time_in_millis"` }
FlushStats type.
type FollowIndexStats ¶
type FollowIndexStats struct { Index string `json:"index"` Shards []CcrShardStats `json:"shards"` }
FollowIndexStats type.
func NewFollowIndexStats ¶
func NewFollowIndexStats() *FollowIndexStats
NewFollowIndexStats returns a FollowIndexStats.
type FollowStats ¶
type FollowStats struct {
Indices []FollowIndexStats `json:"indices"`
}
FollowStats type.
type FollowerIndex ¶
type FollowerIndex struct { FollowerIndex string `json:"follower_index"` LeaderIndex string `json:"leader_index"` Parameters *FollowerIndexParameters `json:"parameters,omitempty"` RemoteCluster string `json:"remote_cluster"` Status followerindexstatus.FollowerIndexStatus `json:"status"` }
FollowerIndex type.
func NewFollowerIndex ¶
func NewFollowerIndex() *FollowerIndex
NewFollowerIndex returns a FollowerIndex.
type FollowerIndexParameters ¶
type FollowerIndexParameters struct { MaxOutstandingReadRequests int `json:"max_outstanding_read_requests"` MaxOutstandingWriteRequests int `json:"max_outstanding_write_requests"` MaxReadRequestOperationCount int `json:"max_read_request_operation_count"` MaxReadRequestSize string `json:"max_read_request_size"` MaxRetryDelay Duration `json:"max_retry_delay"` MaxWriteBufferCount int `json:"max_write_buffer_count"` MaxWriteBufferSize string `json:"max_write_buffer_size"` MaxWriteRequestOperationCount int `json:"max_write_request_operation_count"` MaxWriteRequestSize string `json:"max_write_request_size"` ReadPollTimeout Duration `json:"read_poll_timeout"` }
FollowerIndexParameters type.
func NewFollowerIndexParameters ¶
func NewFollowerIndexParameters() *FollowerIndexParameters
NewFollowerIndexParameters returns a FollowerIndexParameters.
type ForceMergeConfiguration ¶
type ForceMergeConfiguration struct {
MaxNumSegments int `json:"max_num_segments"`
}
ForceMergeConfiguration type.
func NewForceMergeConfiguration ¶
func NewForceMergeConfiguration() *ForceMergeConfiguration
NewForceMergeConfiguration returns a ForceMergeConfiguration.
type ForceMergeResponseBody ¶
type ForceMergeResponseBody struct { Shards_ ShardStatistics `json:"_shards"` // Task task contains a task id returned when wait_for_completion=false, // you can use the task_id to get the status of the task at _tasks/<task_id> Task *string `json:"task,omitempty"` }
ForceMergeResponseBody type.
func NewForceMergeResponseBody ¶
func NewForceMergeResponseBody() *ForceMergeResponseBody
NewForceMergeResponseBody returns a ForceMergeResponseBody.
type ForeachProcessor ¶
type ForeachProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Processor *ProcessorContainer `json:"processor,omitempty"` Tag *string `json:"tag,omitempty"` }
ForeachProcessor type.
func NewForeachProcessor ¶
func NewForeachProcessor() *ForeachProcessor
NewForeachProcessor returns a ForeachProcessor.
type FormattableMetricAggregation ¶
type FormattableMetricAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` }
FormattableMetricAggregation type.
func NewFormattableMetricAggregation ¶
func NewFormattableMetricAggregation() *FormattableMetricAggregation
NewFormattableMetricAggregation returns a FormattableMetricAggregation.
type FrequencyEncodingPreprocessor ¶
type FrequencyEncodingPreprocessor struct { FeatureName string `json:"feature_name"` Field string `json:"field"` FrequencyMap map[string]Float64 `json:"frequency_map"` }
FrequencyEncodingPreprocessor type.
func NewFrequencyEncodingPreprocessor ¶
func NewFrequencyEncodingPreprocessor() *FrequencyEncodingPreprocessor
NewFrequencyEncodingPreprocessor returns a FrequencyEncodingPreprocessor.
type FrozenIndices ¶
type FrozenIndices struct { Available bool `json:"available"` Enabled bool `json:"enabled"` IndicesCount int64 `json:"indices_count"` }
FrozenIndices type.
func NewFrozenIndices ¶
func NewFrozenIndices() *FrozenIndices
NewFrozenIndices returns a FrozenIndices.
type FunctionScore ¶
type FunctionScore struct { Exp DecayFunction `json:"exp,omitempty"` FieldValueFactor *FieldValueFactorScoreFunction `json:"field_value_factor,omitempty"` Filter *Query `json:"filter,omitempty"` Gauss DecayFunction `json:"gauss,omitempty"` Linear DecayFunction `json:"linear,omitempty"` RandomScore *RandomScoreFunction `json:"random_score,omitempty"` ScriptScore *ScriptScoreFunction `json:"script_score,omitempty"` Weight *Float64 `json:"weight,omitempty"` }
FunctionScore type.
func NewFunctionScore ¶
func NewFunctionScore() *FunctionScore
NewFunctionScore returns a FunctionScore.
type FunctionScoreQuery ¶
type FunctionScoreQuery struct { Boost *float32 `json:"boost,omitempty"` BoostMode *functionboostmode.FunctionBoostMode `json:"boost_mode,omitempty"` Functions []FunctionScore `json:"functions,omitempty"` MaxBoost *Float64 `json:"max_boost,omitempty"` MinScore *Float64 `json:"min_score,omitempty"` Query *Query `json:"query,omitempty"` QueryName_ *string `json:"_name,omitempty"` ScoreMode *functionscoremode.FunctionScoreMode `json:"score_mode,omitempty"` }
FunctionScoreQuery type.
func NewFunctionScoreQuery ¶
func NewFunctionScoreQuery() *FunctionScoreQuery
NewFunctionScoreQuery returns a FunctionScoreQuery.
type Fuzziness ¶
type Fuzziness interface{}
Fuzziness holds the union for the following types:
string int
type FuzzyQuery ¶
type FuzzyQuery struct { Boost *float32 `json:"boost,omitempty"` Fuzziness Fuzziness `json:"fuzziness,omitempty"` MaxExpansions *int `json:"max_expansions,omitempty"` PrefixLength *int `json:"prefix_length,omitempty"` QueryName_ *string `json:"_name,omitempty"` Rewrite *string `json:"rewrite,omitempty"` Transpositions *bool `json:"transpositions,omitempty"` Value string `json:"value"` }
FuzzyQuery type.
type GarbageCollector ¶
type GarbageCollector struct {
Collectors map[string]GarbageCollectorTotal `json:"collectors,omitempty"`
}
GarbageCollector type.
func NewGarbageCollector ¶
func NewGarbageCollector() *GarbageCollector
NewGarbageCollector returns a GarbageCollector.
type GarbageCollectorTotal ¶
type GarbageCollectorTotal struct { CollectionCount *int64 `json:"collection_count,omitempty"` CollectionTime *string `json:"collection_time,omitempty"` CollectionTimeInMillis *int64 `json:"collection_time_in_millis,omitempty"` }
GarbageCollectorTotal type.
func NewGarbageCollectorTotal ¶
func NewGarbageCollectorTotal() *GarbageCollectorTotal
NewGarbageCollectorTotal returns a GarbageCollectorTotal.
type GeoBoundingBoxQuery ¶
type GeoBoundingBoxQuery struct { Boost *float32 `json:"boost,omitempty"` GeoBoundingBoxQuery map[string]GeoBounds `json:"-"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` QueryName_ *string `json:"_name,omitempty"` Type *geoexecution.GeoExecution `json:"type,omitempty"` ValidationMethod *geovalidationmethod.GeoValidationMethod `json:"validation_method,omitempty"` }
GeoBoundingBoxQuery type.
func NewGeoBoundingBoxQuery ¶
func NewGeoBoundingBoxQuery() *GeoBoundingBoxQuery
NewGeoBoundingBoxQuery returns a GeoBoundingBoxQuery.
func (GeoBoundingBoxQuery) MarshalJSON ¶
func (s GeoBoundingBoxQuery) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type GeoBounds ¶
type GeoBounds interface{}
GeoBounds holds the union for the following types:
CoordsGeoBounds TopLeftBottomRightGeoBounds TopRightBottomLeftGeoBounds WktGeoBounds
type GeoBoundsAggregate ¶
type GeoBoundsAggregate struct { Bounds GeoBounds `json:"bounds,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
GeoBoundsAggregate type.
func NewGeoBoundsAggregate ¶
func NewGeoBoundsAggregate() *GeoBoundsAggregate
NewGeoBoundsAggregate returns a GeoBoundsAggregate.
type GeoBoundsAggregation ¶
type GeoBoundsAggregation struct { Field *string `json:"field,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` WrapLongitude *bool `json:"wrap_longitude,omitempty"` }
GeoBoundsAggregation type.
func NewGeoBoundsAggregation ¶
func NewGeoBoundsAggregation() *GeoBoundsAggregation
NewGeoBoundsAggregation returns a GeoBoundsAggregation.
type GeoCentroidAggregate ¶
type GeoCentroidAggregate struct { Count int64 `json:"count"` Location GeoLocation `json:"location,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
GeoCentroidAggregate type.
func NewGeoCentroidAggregate ¶
func NewGeoCentroidAggregate() *GeoCentroidAggregate
NewGeoCentroidAggregate returns a GeoCentroidAggregate.
type GeoCentroidAggregation ¶
type GeoCentroidAggregation struct { Count *int64 `json:"count,omitempty"` Field *string `json:"field,omitempty"` Location GeoLocation `json:"location,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` }
GeoCentroidAggregation type.
func NewGeoCentroidAggregation ¶
func NewGeoCentroidAggregation() *GeoCentroidAggregation
NewGeoCentroidAggregation returns a GeoCentroidAggregation.
type GeoDecayFunction ¶
type GeoDecayFunction struct { GeoDecayFunction map[string]DecayPlacementGeoLocationDistance `json:"-"` MultiValueMode *multivaluemode.MultiValueMode `json:"multi_value_mode,omitempty"` }
GeoDecayFunction type.
func NewGeoDecayFunction ¶
func NewGeoDecayFunction() *GeoDecayFunction
NewGeoDecayFunction returns a GeoDecayFunction.
func (GeoDecayFunction) MarshalJSON ¶
func (s GeoDecayFunction) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type GeoDistanceAggregate ¶
type GeoDistanceAggregate struct { Buckets BucketsRangeBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
GeoDistanceAggregate type.
func NewGeoDistanceAggregate ¶
func NewGeoDistanceAggregate() *GeoDistanceAggregate
NewGeoDistanceAggregate returns a GeoDistanceAggregate.
func (*GeoDistanceAggregate) UnmarshalJSON ¶
func (s *GeoDistanceAggregate) UnmarshalJSON(data []byte) error
type GeoDistanceAggregation ¶
type GeoDistanceAggregation struct { DistanceType *geodistancetype.GeoDistanceType `json:"distance_type,omitempty"` Field *string `json:"field,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Origin GeoLocation `json:"origin,omitempty"` Ranges []AggregationRange `json:"ranges,omitempty"` Unit *distanceunit.DistanceUnit `json:"unit,omitempty"` }
GeoDistanceAggregation type.
func NewGeoDistanceAggregation ¶
func NewGeoDistanceAggregation() *GeoDistanceAggregation
NewGeoDistanceAggregation returns a GeoDistanceAggregation.
type GeoDistanceFeatureQuery ¶
type GeoDistanceFeatureQuery struct { Boost *float32 `json:"boost,omitempty"` Field string `json:"field"` Origin GeoLocation `json:"origin"` Pivot string `json:"pivot"` QueryName_ *string `json:"_name,omitempty"` }
GeoDistanceFeatureQuery type.
func NewGeoDistanceFeatureQuery ¶
func NewGeoDistanceFeatureQuery() *GeoDistanceFeatureQuery
NewGeoDistanceFeatureQuery returns a GeoDistanceFeatureQuery.
type GeoDistanceQuery ¶
type GeoDistanceQuery struct { Boost *float32 `json:"boost,omitempty"` Distance *string `json:"distance,omitempty"` DistanceType *geodistancetype.GeoDistanceType `json:"distance_type,omitempty"` GeoDistanceQuery map[string]GeoLocation `json:"-"` QueryName_ *string `json:"_name,omitempty"` ValidationMethod *geovalidationmethod.GeoValidationMethod `json:"validation_method,omitempty"` }
GeoDistanceQuery type.
func NewGeoDistanceQuery ¶
func NewGeoDistanceQuery() *GeoDistanceQuery
NewGeoDistanceQuery returns a GeoDistanceQuery.
func (GeoDistanceQuery) MarshalJSON ¶
func (s GeoDistanceQuery) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type GeoDistanceSort ¶
type GeoDistanceSort struct { DistanceType *geodistancetype.GeoDistanceType `json:"distance_type,omitempty"` GeoDistanceSort map[string][]GeoLocation `json:"-"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` Mode *sortmode.SortMode `json:"mode,omitempty"` Order *sortorder.SortOrder `json:"order,omitempty"` Unit *distanceunit.DistanceUnit `json:"unit,omitempty"` }
GeoDistanceSort type.
func NewGeoDistanceSort ¶
func NewGeoDistanceSort() *GeoDistanceSort
NewGeoDistanceSort returns a GeoDistanceSort.
func (GeoDistanceSort) MarshalJSON ¶
func (s GeoDistanceSort) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type GeoHashGridAggregate ¶
type GeoHashGridAggregate struct { Buckets BucketsGeoHashGridBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
GeoHashGridAggregate type.
func NewGeoHashGridAggregate ¶
func NewGeoHashGridAggregate() *GeoHashGridAggregate
NewGeoHashGridAggregate returns a GeoHashGridAggregate.
func (*GeoHashGridAggregate) UnmarshalJSON ¶
func (s *GeoHashGridAggregate) UnmarshalJSON(data []byte) error
type GeoHashGridAggregation ¶
type GeoHashGridAggregation struct { Bounds GeoBounds `json:"bounds,omitempty"` Field *string `json:"field,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Precision GeoHashPrecision `json:"precision,omitempty"` ShardSize *int `json:"shard_size,omitempty"` Size *int `json:"size,omitempty"` }
GeoHashGridAggregation type.
func NewGeoHashGridAggregation ¶
func NewGeoHashGridAggregation() *GeoHashGridAggregation
NewGeoHashGridAggregation returns a GeoHashGridAggregation.
type GeoHashGridBucket ¶
type GeoHashGridBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Key string `json:"key"` }
GeoHashGridBucket type.
func NewGeoHashGridBucket ¶
func NewGeoHashGridBucket() *GeoHashGridBucket
NewGeoHashGridBucket returns a GeoHashGridBucket.
func (GeoHashGridBucket) MarshalJSON ¶
func (s GeoHashGridBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*GeoHashGridBucket) UnmarshalJSON ¶
func (s *GeoHashGridBucket) UnmarshalJSON(data []byte) error
type GeoHashLocation ¶
type GeoHashLocation struct {
Geohash string `json:"geohash"`
}
GeoHashLocation type.
func NewGeoHashLocation ¶
func NewGeoHashLocation() *GeoHashLocation
NewGeoHashLocation returns a GeoHashLocation.
type GeoHashPrecision ¶
type GeoHashPrecision interface{}
GeoHashPrecision holds the union for the following types:
int string
type GeoHexGridAggregate ¶
type GeoHexGridAggregate struct { Buckets BucketsGeoHexGridBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
GeoHexGridAggregate type.
func NewGeoHexGridAggregate ¶
func NewGeoHexGridAggregate() *GeoHexGridAggregate
NewGeoHexGridAggregate returns a GeoHexGridAggregate.
func (*GeoHexGridAggregate) UnmarshalJSON ¶
func (s *GeoHexGridAggregate) UnmarshalJSON(data []byte) error
type GeoHexGridBucket ¶
type GeoHexGridBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Key string `json:"key"` }
GeoHexGridBucket type.
func NewGeoHexGridBucket ¶
func NewGeoHexGridBucket() *GeoHexGridBucket
NewGeoHexGridBucket returns a GeoHexGridBucket.
func (GeoHexGridBucket) MarshalJSON ¶
func (s GeoHexGridBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*GeoHexGridBucket) UnmarshalJSON ¶
func (s *GeoHexGridBucket) UnmarshalJSON(data []byte) error
type GeoIpDownloadStatistics ¶
type GeoIpDownloadStatistics struct { // DatabaseCount Current number of databases available for use. DatabaseCount int `json:"database_count"` // FailedDownloads Total number of failed database downloads. FailedDownloads int `json:"failed_downloads"` // SkippedUpdates Total number of database updates skipped. SkippedUpdates int `json:"skipped_updates"` // SuccessfulDownloads Total number of successful database downloads. SuccessfulDownloads int `json:"successful_downloads"` // TotalDownloadTime Total milliseconds spent downloading databases. TotalDownloadTime int64 `json:"total_download_time"` }
GeoIpDownloadStatistics type.
func NewGeoIpDownloadStatistics ¶
func NewGeoIpDownloadStatistics() *GeoIpDownloadStatistics
NewGeoIpDownloadStatistics returns a GeoIpDownloadStatistics.
type GeoIpNodeDatabaseName ¶
type GeoIpNodeDatabaseName struct { // Name Name of the database. Name string `json:"name"` }
GeoIpNodeDatabaseName type.
func NewGeoIpNodeDatabaseName ¶
func NewGeoIpNodeDatabaseName() *GeoIpNodeDatabaseName
NewGeoIpNodeDatabaseName returns a GeoIpNodeDatabaseName.
type GeoIpNodeDatabases ¶
type GeoIpNodeDatabases struct { // Databases Downloaded databases for the node. Databases []GeoIpNodeDatabaseName `json:"databases"` // FilesInTemp Downloaded database files, including related license files. Elasticsearch // stores these files in the node’s temporary directory: // $ES_TMPDIR/geoip-databases/<node_id>. FilesInTemp []string `json:"files_in_temp"` }
GeoIpNodeDatabases type.
func NewGeoIpNodeDatabases ¶
func NewGeoIpNodeDatabases() *GeoIpNodeDatabases
NewGeoIpNodeDatabases returns a GeoIpNodeDatabases.
type GeoIpProcessor ¶
type GeoIpProcessor struct { DatabaseFile *string `json:"database_file,omitempty"` Description *string `json:"description,omitempty"` Field string `json:"field"` FirstOnly *bool `json:"first_only,omitempty"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Properties []string `json:"properties,omitempty"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
GeoIpProcessor type.
func NewGeoIpProcessor ¶
func NewGeoIpProcessor() *GeoIpProcessor
NewGeoIpProcessor returns a GeoIpProcessor.
type GeoLine ¶
type GeoLine struct { // Coordinates Array of `[lon, lat]` coordinates Coordinates [][]Float64 `json:"coordinates"` // Type Always `"LineString"` Type string `json:"type"` }
GeoLine type.
type GeoLineAggregate ¶
type GeoLineAggregate struct { Geometry GeoLine `json:"geometry"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Properties json.RawMessage `json:"properties,omitempty"` Type string `json:"type"` }
GeoLineAggregate type.
func NewGeoLineAggregate ¶
func NewGeoLineAggregate() *GeoLineAggregate
NewGeoLineAggregate returns a GeoLineAggregate.
type GeoLineAggregation ¶
type GeoLineAggregation struct { IncludeSort *bool `json:"include_sort,omitempty"` Point GeoLinePoint `json:"point"` Size *int `json:"size,omitempty"` Sort GeoLineSort `json:"sort"` SortOrder *sortorder.SortOrder `json:"sort_order,omitempty"` }
GeoLineAggregation type.
func NewGeoLineAggregation ¶
func NewGeoLineAggregation() *GeoLineAggregation
NewGeoLineAggregation returns a GeoLineAggregation.
type GeoLinePoint ¶
type GeoLinePoint struct {
Field string `json:"field"`
}
GeoLinePoint type.
func NewGeoLinePoint ¶
func NewGeoLinePoint() *GeoLinePoint
NewGeoLinePoint returns a GeoLinePoint.
type GeoLocation ¶
type GeoLocation interface{}
GeoLocation holds the union for the following types:
LatLonGeoLocation GeoHashLocation []Float64 string
type GeoPointProperty ¶
type GeoPointProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` IgnoreZValue *bool `json:"ignore_z_value,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue GeoLocation `json:"null_value,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
GeoPointProperty type.
func NewGeoPointProperty ¶
func NewGeoPointProperty() *GeoPointProperty
NewGeoPointProperty returns a GeoPointProperty.
func (*GeoPointProperty) UnmarshalJSON ¶
func (s *GeoPointProperty) UnmarshalJSON(data []byte) error
type GeoPolygonPoints ¶
type GeoPolygonPoints struct {
Points []GeoLocation `json:"points"`
}
GeoPolygonPoints type.
func NewGeoPolygonPoints ¶
func NewGeoPolygonPoints() *GeoPolygonPoints
NewGeoPolygonPoints returns a GeoPolygonPoints.
type GeoPolygonQuery ¶
type GeoPolygonQuery struct { Boost *float32 `json:"boost,omitempty"` GeoPolygonQuery map[string]GeoPolygonPoints `json:"-"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` QueryName_ *string `json:"_name,omitempty"` ValidationMethod *geovalidationmethod.GeoValidationMethod `json:"validation_method,omitempty"` }
GeoPolygonQuery type.
func NewGeoPolygonQuery ¶
func NewGeoPolygonQuery() *GeoPolygonQuery
NewGeoPolygonQuery returns a GeoPolygonQuery.
func (GeoPolygonQuery) MarshalJSON ¶
func (s GeoPolygonQuery) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type GeoResults ¶
type GeoResults struct { // ActualPoint The actual value for the bucket formatted as a `geo_point`. ActualPoint string `json:"actual_point"` // TypicalPoint The typical value for the bucket formatted as a `geo_point`. TypicalPoint string `json:"typical_point"` }
GeoResults type.
type GeoShapeFieldQuery ¶
type GeoShapeFieldQuery struct { IndexedShape *FieldLookup `json:"indexed_shape,omitempty"` Relation *geoshaperelation.GeoShapeRelation `json:"relation,omitempty"` Shape json.RawMessage `json:"shape,omitempty"` }
GeoShapeFieldQuery type.
func NewGeoShapeFieldQuery ¶
func NewGeoShapeFieldQuery() *GeoShapeFieldQuery
NewGeoShapeFieldQuery returns a GeoShapeFieldQuery.
type GeoShapeProperty ¶
type GeoShapeProperty struct { Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` IgnoreZValue *bool `json:"ignore_z_value,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Orientation *geoorientation.GeoOrientation `json:"orientation,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Strategy *geostrategy.GeoStrategy `json:"strategy,omitempty"` Type string `json:"type,omitempty"` }
GeoShapeProperty type.
func NewGeoShapeProperty ¶
func NewGeoShapeProperty() *GeoShapeProperty
NewGeoShapeProperty returns a GeoShapeProperty.
func (*GeoShapeProperty) UnmarshalJSON ¶
func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error
type GeoShapeQuery ¶
type GeoShapeQuery struct { Boost *float32 `json:"boost,omitempty"` GeoShapeQuery map[string]GeoShapeFieldQuery `json:"-"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` QueryName_ *string `json:"_name,omitempty"` }
GeoShapeQuery type.
func NewGeoShapeQuery ¶
func NewGeoShapeQuery() *GeoShapeQuery
NewGeoShapeQuery returns a GeoShapeQuery.
func (GeoShapeQuery) MarshalJSON ¶
func (s GeoShapeQuery) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type GeoTileGridAggregate ¶
type GeoTileGridAggregate struct { Buckets BucketsGeoTileGridBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
GeoTileGridAggregate type.
func NewGeoTileGridAggregate ¶
func NewGeoTileGridAggregate() *GeoTileGridAggregate
NewGeoTileGridAggregate returns a GeoTileGridAggregate.
func (*GeoTileGridAggregate) UnmarshalJSON ¶
func (s *GeoTileGridAggregate) UnmarshalJSON(data []byte) error
type GeoTileGridAggregation ¶
type GeoTileGridAggregation struct { Bounds GeoBounds `json:"bounds,omitempty"` Field *string `json:"field,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Precision *int `json:"precision,omitempty"` ShardSize *int `json:"shard_size,omitempty"` Size *int `json:"size,omitempty"` }
GeoTileGridAggregation type.
func NewGeoTileGridAggregation ¶
func NewGeoTileGridAggregation() *GeoTileGridAggregation
NewGeoTileGridAggregation returns a GeoTileGridAggregation.
type GeoTileGridBucket ¶
type GeoTileGridBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Key string `json:"key"` }
GeoTileGridBucket type.
func NewGeoTileGridBucket ¶
func NewGeoTileGridBucket() *GeoTileGridBucket
NewGeoTileGridBucket returns a GeoTileGridBucket.
func (GeoTileGridBucket) MarshalJSON ¶
func (s GeoTileGridBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*GeoTileGridBucket) UnmarshalJSON ¶
func (s *GeoTileGridBucket) UnmarshalJSON(data []byte) error
type GeohexGridAggregation ¶
type GeohexGridAggregation struct { // Bounds Bounding box used to filter the geo-points in each bucket. Bounds GeoBounds `json:"bounds,omitempty"` // Field Field containing indexed geo-point values. Must be explicitly // mapped as a `geo_point` field. If the field contains an array // `geohex_grid` aggregates all array values. Field string `json:"field"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` // Precision Integer zoom of the key used to defined cells or buckets // in the results. Value should be between 0-15. Precision *int `json:"precision,omitempty"` // ShardSize Number of buckets returned from each shard. ShardSize *int `json:"shard_size,omitempty"` // Size Maximum number of buckets to return. Size *int `json:"size,omitempty"` }
GeohexGridAggregation type.
func NewGeohexGridAggregation ¶
func NewGeohexGridAggregation() *GeohexGridAggregation
NewGeohexGridAggregation returns a GeohexGridAggregation.
type GetMigrationFeature ¶
type GetMigrationFeature struct { FeatureName string `json:"feature_name"` Indices []MigrationFeatureIndexInfo `json:"indices"` MigrationStatus migrationstatus.MigrationStatus `json:"migration_status"` MinimumIndexVersion string `json:"minimum_index_version"` }
GetMigrationFeature type.
func NewGetMigrationFeature ¶
func NewGetMigrationFeature() *GetMigrationFeature
NewGetMigrationFeature returns a GetMigrationFeature.
type GetResult ¶
type GetResult struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` Found bool `json:"found"` Id_ string `json:"_id"` Index_ string `json:"_index"` PrimaryTerm_ *int64 `json:"_primary_term,omitempty"` Routing_ *string `json:"_routing,omitempty"` SeqNo_ *int64 `json:"_seq_no,omitempty"` Source_ json.RawMessage `json:"_source,omitempty"` Version_ *int64 `json:"_version,omitempty"` }
GetResult type.
type GetScriptContext ¶
type GetScriptContext struct { Methods []ContextMethod `json:"methods"` Name string `json:"name"` }
GetScriptContext type.
func NewGetScriptContext ¶
func NewGetScriptContext() *GetScriptContext
NewGetScriptContext returns a GetScriptContext.
type GetStats ¶
type GetStats struct { Current int64 `json:"current"` ExistsTime Duration `json:"exists_time,omitempty"` ExistsTimeInMillis int64 `json:"exists_time_in_millis"` ExistsTotal int64 `json:"exists_total"` MissingTime Duration `json:"missing_time,omitempty"` MissingTimeInMillis int64 `json:"missing_time_in_millis"` MissingTotal int64 `json:"missing_total"` Time Duration `json:"time,omitempty"` TimeInMillis int64 `json:"time_in_millis"` Total int64 `json:"total"` }
GetStats type.
type GetUserProfileErrors ¶
type GetUserProfileErrors struct { Count int64 `json:"count"` Details map[string]ErrorCause `json:"details"` }
GetUserProfileErrors type.
func NewGetUserProfileErrors ¶
func NewGetUserProfileErrors() *GetUserProfileErrors
NewGetUserProfileErrors returns a GetUserProfileErrors.
type GlobalAggregate ¶
type GlobalAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
GlobalAggregate type.
func NewGlobalAggregate ¶
func NewGlobalAggregate() *GlobalAggregate
NewGlobalAggregate returns a GlobalAggregate.
func (GlobalAggregate) MarshalJSON ¶
func (s GlobalAggregate) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*GlobalAggregate) UnmarshalJSON ¶
func (s *GlobalAggregate) UnmarshalJSON(data []byte) error
type GlobalAggregation ¶
type GlobalAggregation struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` }
GlobalAggregation type.
func NewGlobalAggregation ¶
func NewGlobalAggregation() *GlobalAggregation
NewGlobalAggregation returns a GlobalAggregation.
type GlobalPrivilege ¶
type GlobalPrivilege struct {
Application ApplicationGlobalUserPrivileges `json:"application"`
}
GlobalPrivilege type.
func NewGlobalPrivilege ¶
func NewGlobalPrivilege() *GlobalPrivilege
NewGlobalPrivilege returns a GlobalPrivilege.
type GoogleNormalizedDistanceHeuristic ¶
type GoogleNormalizedDistanceHeuristic struct {
BackgroundIsSuperset *bool `json:"background_is_superset,omitempty"`
}
GoogleNormalizedDistanceHeuristic type.
func NewGoogleNormalizedDistanceHeuristic ¶
func NewGoogleNormalizedDistanceHeuristic() *GoogleNormalizedDistanceHeuristic
NewGoogleNormalizedDistanceHeuristic returns a GoogleNormalizedDistanceHeuristic.
type GrantApiKey ¶
type GrantApiKey struct { Expiration *string `json:"expiration,omitempty"` Metadata map[string]json.RawMessage `json:"metadata,omitempty"` Name string `json:"name"` RoleDescriptors []map[string]RoleDescriptor `json:"role_descriptors,omitempty"` }
GrantApiKey type.
type GrokProcessor ¶
type GrokProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` PatternDefinitions map[string]string `json:"pattern_definitions,omitempty"` Patterns []string `json:"patterns"` Tag *string `json:"tag,omitempty"` TraceMatch *bool `json:"trace_match,omitempty"` }
GrokProcessor type.
func NewGrokProcessor ¶
func NewGrokProcessor() *GrokProcessor
NewGrokProcessor returns a GrokProcessor.
type Groupings ¶
type Groupings struct { DateHistogram *DateHistogramGrouping `json:"date_histogram,omitempty"` Histogram *HistogramGrouping `json:"histogram,omitempty"` Terms *TermsGrouping `json:"terms,omitempty"` }
Groupings type.
type GsubProcessor ¶
type GsubProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Pattern string `json:"pattern"` Replacement string `json:"replacement"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
GsubProcessor type.
func NewGsubProcessor ¶
func NewGsubProcessor() *GsubProcessor
NewGsubProcessor returns a GsubProcessor.
type HalfFloatNumberProperty ¶
type HalfFloatNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue *float32 `json:"null_value,omitempty"` OnScriptError *onscripterror.OnScriptError `json:"on_script_error,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Script Script `json:"script,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` // TimeSeriesDimension For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesDimension *bool `json:"time_series_dimension,omitempty"` // TimeSeriesMetric For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesMetric *timeseriesmetrictype.TimeSeriesMetricType `json:"time_series_metric,omitempty"` Type string `json:"type,omitempty"` }
HalfFloatNumberProperty type.
func NewHalfFloatNumberProperty ¶
func NewHalfFloatNumberProperty() *HalfFloatNumberProperty
NewHalfFloatNumberProperty returns a HalfFloatNumberProperty.
func (*HalfFloatNumberProperty) UnmarshalJSON ¶
func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error
type HasChildQuery ¶
type HasChildQuery struct { Boost *float32 `json:"boost,omitempty"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` InnerHits *InnerHits `json:"inner_hits,omitempty"` MaxChildren *int `json:"max_children,omitempty"` MinChildren *int `json:"min_children,omitempty"` Query *Query `json:"query,omitempty"` QueryName_ *string `json:"_name,omitempty"` ScoreMode *childscoremode.ChildScoreMode `json:"score_mode,omitempty"` Type string `json:"type"` }
HasChildQuery type.
func NewHasChildQuery ¶
func NewHasChildQuery() *HasChildQuery
NewHasChildQuery returns a HasChildQuery.
type HasParentQuery ¶
type HasParentQuery struct { Boost *float32 `json:"boost,omitempty"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` InnerHits *InnerHits `json:"inner_hits,omitempty"` ParentType string `json:"parent_type"` Query *Query `json:"query,omitempty"` QueryName_ *string `json:"_name,omitempty"` Score *bool `json:"score,omitempty"` }
HasParentQuery type.
func NewHasParentQuery ¶
func NewHasParentQuery() *HasParentQuery
NewHasParentQuery returns a HasParentQuery.
type HasPrivilegesUserProfileErrors ¶
type HasPrivilegesUserProfileErrors struct { Count int64 `json:"count"` Details map[string]ErrorCause `json:"details"` }
HasPrivilegesUserProfileErrors type.
func NewHasPrivilegesUserProfileErrors ¶
func NewHasPrivilegesUserProfileErrors() *HasPrivilegesUserProfileErrors
NewHasPrivilegesUserProfileErrors returns a HasPrivilegesUserProfileErrors.
type HdrMethod ¶
type HdrMethod struct {
NumberOfSignificantValueDigits *int `json:"number_of_significant_value_digits,omitempty"`
}
HdrMethod type.
type HdrPercentileRanksAggregate ¶
type HdrPercentileRanksAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Values Percentiles `json:"values"` }
HdrPercentileRanksAggregate type.
func NewHdrPercentileRanksAggregate ¶
func NewHdrPercentileRanksAggregate() *HdrPercentileRanksAggregate
NewHdrPercentileRanksAggregate returns a HdrPercentileRanksAggregate.
type HdrPercentilesAggregate ¶
type HdrPercentilesAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Values Percentiles `json:"values"` }
HdrPercentilesAggregate type.
func NewHdrPercentilesAggregate ¶
func NewHdrPercentilesAggregate() *HdrPercentilesAggregate
NewHdrPercentilesAggregate returns a HdrPercentilesAggregate.
type HealthRecord ¶
type HealthRecord struct { // ActiveShardsPercent active number of shards in percent ActiveShardsPercent *string `json:"active_shards_percent,omitempty"` // Cluster cluster name Cluster *string `json:"cluster,omitempty"` // Epoch seconds since 1970-01-01 00:00:00 Epoch StringifiedEpochTimeUnitSeconds `json:"epoch,omitempty"` // Init number of initializing nodes Init *string `json:"init,omitempty"` // MaxTaskWaitTime wait time of longest task pending MaxTaskWaitTime *string `json:"max_task_wait_time,omitempty"` // NodeData number of nodes that can store data NodeData *string `json:"node.data,omitempty"` // NodeTotal total number of nodes NodeTotal *string `json:"node.total,omitempty"` // PendingTasks number of pending tasks PendingTasks *string `json:"pending_tasks,omitempty"` // Pri number of primary shards Pri *string `json:"pri,omitempty"` // Relo number of relocating nodes Relo *string `json:"relo,omitempty"` // Shards total number of shards Shards *string `json:"shards,omitempty"` // Status health status Status *string `json:"status,omitempty"` // Timestamp time in HH:MM:SS Timestamp *string `json:"timestamp,omitempty"` // Unassign number of unassigned shards Unassign *string `json:"unassign,omitempty"` }
HealthRecord type.
func NewHealthRecord ¶
func NewHealthRecord() *HealthRecord
NewHealthRecord returns a HealthRecord.
type HealthStatistics ¶
type HealthStatistics struct { Available bool `json:"available"` Enabled bool `json:"enabled"` Invocations Invocations `json:"invocations"` }
HealthStatistics type.
func NewHealthStatistics ¶
func NewHealthStatistics() *HealthStatistics
NewHealthStatistics returns a HealthStatistics.
type Highlight ¶
type Highlight struct { BoundaryChars *string `json:"boundary_chars,omitempty"` BoundaryMaxScan *int `json:"boundary_max_scan,omitempty"` BoundaryScanner *boundaryscanner.BoundaryScanner `json:"boundary_scanner,omitempty"` BoundaryScannerLocale *string `json:"boundary_scanner_locale,omitempty"` Encoder *highlighterencoder.HighlighterEncoder `json:"encoder,omitempty"` Fields map[string]HighlightField `json:"fields"` ForceSource *bool `json:"force_source,omitempty"` FragmentSize *int `json:"fragment_size,omitempty"` Fragmenter *highlighterfragmenter.HighlighterFragmenter `json:"fragmenter,omitempty"` HighlightFilter *bool `json:"highlight_filter,omitempty"` HighlightQuery *Query `json:"highlight_query,omitempty"` MaxAnalyzedOffset *int `json:"max_analyzed_offset,omitempty"` MaxFragmentLength *int `json:"max_fragment_length,omitempty"` NoMatchSize *int `json:"no_match_size,omitempty"` NumberOfFragments *int `json:"number_of_fragments,omitempty"` Options map[string]json.RawMessage `json:"options,omitempty"` Order *highlighterorder.HighlighterOrder `json:"order,omitempty"` PhraseLimit *int `json:"phrase_limit,omitempty"` PostTags []string `json:"post_tags,omitempty"` PreTags []string `json:"pre_tags,omitempty"` RequireFieldMatch *bool `json:"require_field_match,omitempty"` TagsSchema *highlightertagsschema.HighlighterTagsSchema `json:"tags_schema,omitempty"` Type *highlightertype.HighlighterType `json:"type,omitempty"` }
Highlight type.
type HighlightField ¶
type HighlightField struct { Analyzer Analyzer `json:"analyzer,omitempty"` BoundaryChars *string `json:"boundary_chars,omitempty"` BoundaryMaxScan *int `json:"boundary_max_scan,omitempty"` BoundaryScanner *boundaryscanner.BoundaryScanner `json:"boundary_scanner,omitempty"` BoundaryScannerLocale *string `json:"boundary_scanner_locale,omitempty"` ForceSource *bool `json:"force_source,omitempty"` FragmentOffset *int `json:"fragment_offset,omitempty"` FragmentSize *int `json:"fragment_size,omitempty"` Fragmenter *highlighterfragmenter.HighlighterFragmenter `json:"fragmenter,omitempty"` HighlightFilter *bool `json:"highlight_filter,omitempty"` HighlightQuery *Query `json:"highlight_query,omitempty"` MatchedFields []string `json:"matched_fields,omitempty"` MaxAnalyzedOffset *int `json:"max_analyzed_offset,omitempty"` MaxFragmentLength *int `json:"max_fragment_length,omitempty"` NoMatchSize *int `json:"no_match_size,omitempty"` NumberOfFragments *int `json:"number_of_fragments,omitempty"` Options map[string]json.RawMessage `json:"options,omitempty"` Order *highlighterorder.HighlighterOrder `json:"order,omitempty"` PhraseLimit *int `json:"phrase_limit,omitempty"` PostTags []string `json:"post_tags,omitempty"` PreTags []string `json:"pre_tags,omitempty"` RequireFieldMatch *bool `json:"require_field_match,omitempty"` TagsSchema *highlightertagsschema.HighlighterTagsSchema `json:"tags_schema,omitempty"` Type *highlightertype.HighlighterType `json:"type,omitempty"` }
HighlightField type.
func NewHighlightField ¶
func NewHighlightField() *HighlightField
NewHighlightField returns a HighlightField.
func (*HighlightField) UnmarshalJSON ¶
func (s *HighlightField) UnmarshalJSON(data []byte) error
type Hint ¶
type Hint struct { // Labels A single key-value pair to match against the labels section // of a profile. A profile is considered matching if it matches // at least one of the strings. Labels map[string][]string `json:"labels,omitempty"` // Uids A list of Profile UIDs to match against. Uids []string `json:"uids,omitempty"` }
Hint type.
type HistogramAggregate ¶
type HistogramAggregate struct { Buckets BucketsHistogramBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
HistogramAggregate type.
func NewHistogramAggregate ¶
func NewHistogramAggregate() *HistogramAggregate
NewHistogramAggregate returns a HistogramAggregate.
func (*HistogramAggregate) UnmarshalJSON ¶
func (s *HistogramAggregate) UnmarshalJSON(data []byte) error
type HistogramAggregation ¶
type HistogramAggregation struct { ExtendedBounds *ExtendedBoundsdouble `json:"extended_bounds,omitempty"` Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` HardBounds *ExtendedBoundsdouble `json:"hard_bounds,omitempty"` Interval *Float64 `json:"interval,omitempty"` Keyed *bool `json:"keyed,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` MinDocCount *int `json:"min_doc_count,omitempty"` Missing *Float64 `json:"missing,omitempty"` Name *string `json:"name,omitempty"` Offset *Float64 `json:"offset,omitempty"` Order AggregateOrder `json:"order,omitempty"` Script Script `json:"script,omitempty"` }
HistogramAggregation type.
func NewHistogramAggregation ¶
func NewHistogramAggregation() *HistogramAggregation
NewHistogramAggregation returns a HistogramAggregation.
func (*HistogramAggregation) UnmarshalJSON ¶
func (s *HistogramAggregation) UnmarshalJSON(data []byte) error
type HistogramBucket ¶
type HistogramBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Key Float64 `json:"key"` KeyAsString *string `json:"key_as_string,omitempty"` }
HistogramBucket type.
func NewHistogramBucket ¶
func NewHistogramBucket() *HistogramBucket
NewHistogramBucket returns a HistogramBucket.
func (HistogramBucket) MarshalJSON ¶
func (s HistogramBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*HistogramBucket) UnmarshalJSON ¶
func (s *HistogramBucket) UnmarshalJSON(data []byte) error
type HistogramGrouping ¶
HistogramGrouping type.
func NewHistogramGrouping ¶
func NewHistogramGrouping() *HistogramGrouping
NewHistogramGrouping returns a HistogramGrouping.
type HistogramProperty ¶
type HistogramProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Type string `json:"type,omitempty"` }
HistogramProperty type.
func NewHistogramProperty ¶
func NewHistogramProperty() *HistogramProperty
NewHistogramProperty returns a HistogramProperty.
func (*HistogramProperty) UnmarshalJSON ¶
func (s *HistogramProperty) UnmarshalJSON(data []byte) error
type Hit ¶
type Hit struct { Explanation_ *Explanation `json:"_explanation,omitempty"` Fields map[string]json.RawMessage `json:"fields,omitempty"` Highlight map[string][]string `json:"highlight,omitempty"` Id_ string `json:"_id"` IgnoredFieldValues map[string][]string `json:"ignored_field_values,omitempty"` Ignored_ []string `json:"_ignored,omitempty"` Index_ string `json:"_index"` InnerHits map[string]InnerHitsResult `json:"inner_hits,omitempty"` MatchedQueries []string `json:"matched_queries,omitempty"` Nested_ *NestedIdentity `json:"_nested,omitempty"` Node_ *string `json:"_node,omitempty"` PrimaryTerm_ *int64 `json:"_primary_term,omitempty"` Routing_ *string `json:"_routing,omitempty"` Score_ Float64 `json:"_score,omitempty"` SeqNo_ *int64 `json:"_seq_no,omitempty"` Shard_ *string `json:"_shard,omitempty"` Sort []FieldValue `json:"sort,omitempty"` Source_ json.RawMessage `json:"_source,omitempty"` Version_ *int64 `json:"_version,omitempty"` }
Hit type.
type HitsEvent ¶
type HitsEvent struct { Fields map[string][]json.RawMessage `json:"fields,omitempty"` // Id_ Unique identifier for the event. This ID is only unique within the index. Id_ string `json:"_id"` // Index_ Name of the index containing the event. Index_ string `json:"_index"` // Source_ Original JSON body passed for the event at index time. Source_ json.RawMessage `json:"_source,omitempty"` }
HitsEvent type.
type HitsMetadata ¶
type HitsMetadata struct { Hits []Hit `json:"hits"` MaxScore Float64 `json:"max_score,omitempty"` // Total Total hit count information, present only if `track_total_hits` wasn't // `false` in the search request. Total *TotalHits `json:"total,omitempty"` }
HitsMetadata type.
func NewHitsMetadata ¶
func NewHitsMetadata() *HitsMetadata
NewHitsMetadata returns a HitsMetadata.
type HitsSequence ¶
type HitsSequence struct { // Events Contains events matching the query. Each object represents a matching event. Events []HitsEvent `json:"events"` // JoinKeys Shared field values used to constrain matches in the sequence. These are // defined using the by keyword in the EQL query syntax. JoinKeys []json.RawMessage `json:"join_keys"` }
HitsSequence type.
func NewHitsSequence ¶
func NewHitsSequence() *HitsSequence
NewHitsSequence returns a HitsSequence.
type HoltLinearModelSettings ¶
type HoltLinearModelSettings struct { Alpha *float32 `json:"alpha,omitempty"` Beta *float32 `json:"beta,omitempty"` }
HoltLinearModelSettings type.
func NewHoltLinearModelSettings ¶
func NewHoltLinearModelSettings() *HoltLinearModelSettings
NewHoltLinearModelSettings returns a HoltLinearModelSettings.
type HoltMovingAverageAggregation ¶
type HoltMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Minimize *bool `json:"minimize,omitempty"` Model string `json:"model,omitempty"` Name *string `json:"name,omitempty"` Predict *int `json:"predict,omitempty"` Settings HoltLinearModelSettings `json:"settings"` Window *int `json:"window,omitempty"` }
HoltMovingAverageAggregation type.
func NewHoltMovingAverageAggregation ¶
func NewHoltMovingAverageAggregation() *HoltMovingAverageAggregation
NewHoltMovingAverageAggregation returns a HoltMovingAverageAggregation.
func (*HoltMovingAverageAggregation) UnmarshalJSON ¶
func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error
type HoltWintersModelSettings ¶
type HoltWintersModelSettings struct { Alpha *float32 `json:"alpha,omitempty"` Beta *float32 `json:"beta,omitempty"` Gamma *float32 `json:"gamma,omitempty"` Pad *bool `json:"pad,omitempty"` Period *int `json:"period,omitempty"` Type *holtwinterstype.HoltWintersType `json:"type,omitempty"` }
HoltWintersModelSettings type.
func NewHoltWintersModelSettings ¶
func NewHoltWintersModelSettings() *HoltWintersModelSettings
NewHoltWintersModelSettings returns a HoltWintersModelSettings.
type HoltWintersMovingAverageAggregation ¶
type HoltWintersMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Minimize *bool `json:"minimize,omitempty"` Model string `json:"model,omitempty"` Name *string `json:"name,omitempty"` Predict *int `json:"predict,omitempty"` Settings HoltWintersModelSettings `json:"settings"` Window *int `json:"window,omitempty"` }
HoltWintersMovingAverageAggregation type.
func NewHoltWintersMovingAverageAggregation ¶
func NewHoltWintersMovingAverageAggregation() *HoltWintersMovingAverageAggregation
NewHoltWintersMovingAverageAggregation returns a HoltWintersMovingAverageAggregation.
func (*HoltWintersMovingAverageAggregation) UnmarshalJSON ¶
func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error
type Hop ¶
type Hop struct { Connections *Hop `json:"connections,omitempty"` Query Query `json:"query"` Vertices []VertexDefinition `json:"vertices"` }
Hop type.
type HotThread ¶
type HotThread struct { Hosts []string `json:"hosts"` NodeId string `json:"node_id"` NodeName string `json:"node_name"` Threads []string `json:"threads"` }
HotThread type.
type HourAndMinute ¶
HourAndMinute type.
func NewHourAndMinute ¶
func NewHourAndMinute() *HourAndMinute
NewHourAndMinute returns a HourAndMinute.
type HourlySchedule ¶
type HourlySchedule struct {
Minute []int `json:"minute"`
}
HourlySchedule type.
func NewHourlySchedule ¶
func NewHourlySchedule() *HourlySchedule
NewHourlySchedule returns a HourlySchedule.
type HtmlStripCharFilter ¶
type HtmlStripCharFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
HtmlStripCharFilter type.
func NewHtmlStripCharFilter ¶
func NewHtmlStripCharFilter() *HtmlStripCharFilter
NewHtmlStripCharFilter returns a HtmlStripCharFilter.
type Http ¶
type Http struct { Clients []Client `json:"clients,omitempty"` CurrentOpen *int `json:"current_open,omitempty"` TotalOpened *int64 `json:"total_opened,omitempty"` }
Http type.
type HttpEmailAttachment ¶
type HttpEmailAttachment struct { ContentType *string `json:"content_type,omitempty"` Inline *bool `json:"inline,omitempty"` Request *HttpInputRequestDefinition `json:"request,omitempty"` }
HttpEmailAttachment type.
func NewHttpEmailAttachment ¶
func NewHttpEmailAttachment() *HttpEmailAttachment
NewHttpEmailAttachment returns a HttpEmailAttachment.
type HttpHeaders ¶
HttpHeaders type alias.
type HttpInput ¶
type HttpInput struct { Extract []string `json:"extract,omitempty"` Request *HttpInputRequestDefinition `json:"request,omitempty"` ResponseContentType *responsecontenttype.ResponseContentType `json:"response_content_type,omitempty"` }
HttpInput type.
type HttpInputAuthentication ¶
type HttpInputAuthentication struct {
Basic HttpInputBasicAuthentication `json:"basic"`
}
HttpInputAuthentication type.
func NewHttpInputAuthentication ¶
func NewHttpInputAuthentication() *HttpInputAuthentication
NewHttpInputAuthentication returns a HttpInputAuthentication.
type HttpInputBasicAuthentication ¶
type HttpInputBasicAuthentication struct { Password string `json:"password"` Username string `json:"username"` }
HttpInputBasicAuthentication type.
func NewHttpInputBasicAuthentication ¶
func NewHttpInputBasicAuthentication() *HttpInputBasicAuthentication
NewHttpInputBasicAuthentication returns a HttpInputBasicAuthentication.
type HttpInputProxy ¶
HttpInputProxy type.
func NewHttpInputProxy ¶
func NewHttpInputProxy() *HttpInputProxy
NewHttpInputProxy returns a HttpInputProxy.
type HttpInputRequestDefinition ¶
type HttpInputRequestDefinition struct { Auth *HttpInputAuthentication `json:"auth,omitempty"` Body *string `json:"body,omitempty"` ConnectionTimeout Duration `json:"connection_timeout,omitempty"` Headers map[string]string `json:"headers,omitempty"` Host *string `json:"host,omitempty"` Method *httpinputmethod.HttpInputMethod `json:"method,omitempty"` Params map[string]string `json:"params,omitempty"` Path *string `json:"path,omitempty"` Port *uint `json:"port,omitempty"` Proxy *HttpInputProxy `json:"proxy,omitempty"` ReadTimeout Duration `json:"read_timeout,omitempty"` Scheme *connectionscheme.ConnectionScheme `json:"scheme,omitempty"` Url *string `json:"url,omitempty"` }
HttpInputRequestDefinition type.
func NewHttpInputRequestDefinition ¶
func NewHttpInputRequestDefinition() *HttpInputRequestDefinition
NewHttpInputRequestDefinition returns a HttpInputRequestDefinition.
type HttpInputRequestResult ¶
type HttpInputRequestResult struct { Auth *HttpInputAuthentication `json:"auth,omitempty"` Body *string `json:"body,omitempty"` ConnectionTimeout Duration `json:"connection_timeout,omitempty"` Headers map[string]string `json:"headers,omitempty"` Host *string `json:"host,omitempty"` Method *httpinputmethod.HttpInputMethod `json:"method,omitempty"` Params map[string]string `json:"params,omitempty"` Path *string `json:"path,omitempty"` Port *uint `json:"port,omitempty"` Proxy *HttpInputProxy `json:"proxy,omitempty"` ReadTimeout Duration `json:"read_timeout,omitempty"` Scheme *connectionscheme.ConnectionScheme `json:"scheme,omitempty"` Url *string `json:"url,omitempty"` }
HttpInputRequestResult type.
func NewHttpInputRequestResult ¶
func NewHttpInputRequestResult() *HttpInputRequestResult
NewHttpInputRequestResult returns a HttpInputRequestResult.
type HttpInputResponseResult ¶
type HttpInputResponseResult struct { Body string `json:"body"` Headers map[string][]string `json:"headers"` Status int `json:"status"` }
HttpInputResponseResult type.
func NewHttpInputResponseResult ¶
func NewHttpInputResponseResult() *HttpInputResponseResult
NewHttpInputResponseResult returns a HttpInputResponseResult.
type HunspellTokenFilter ¶
type HunspellTokenFilter struct { Dedup *bool `json:"dedup,omitempty"` Dictionary *string `json:"dictionary,omitempty"` Locale string `json:"locale"` LongestOnly *bool `json:"longest_only,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
HunspellTokenFilter type.
func NewHunspellTokenFilter ¶
func NewHunspellTokenFilter() *HunspellTokenFilter
NewHunspellTokenFilter returns a HunspellTokenFilter.
type Hyperparameter ¶
type Hyperparameter struct { // AbsoluteImportance A positive number showing how much the parameter influences the variation of // the loss function. For hyperparameters with values that are not specified by // the user but tuned during hyperparameter optimization. AbsoluteImportance *Float64 `json:"absolute_importance,omitempty"` // Name Name of the hyperparameter. Name string `json:"name"` // RelativeImportance A number between 0 and 1 showing the proportion of influence on the variation // of the loss function among all tuned hyperparameters. For hyperparameters // with values that are not specified by the user but tuned during // hyperparameter optimization. RelativeImportance *Float64 `json:"relative_importance,omitempty"` // Supplied Indicates if the hyperparameter is specified by the user (true) or optimized // (false). Supplied bool `json:"supplied"` // Value The value of the hyperparameter, either optimized or specified by the user. Value Float64 `json:"value"` }
Hyperparameter type.
func NewHyperparameter ¶
func NewHyperparameter() *Hyperparameter
NewHyperparameter returns a Hyperparameter.
type Hyperparameters ¶
type Hyperparameters struct { Alpha *Float64 `json:"alpha,omitempty"` DownsampleFactor *Float64 `json:"downsample_factor,omitempty"` Eta *Float64 `json:"eta,omitempty"` EtaGrowthRatePerTree *Float64 `json:"eta_growth_rate_per_tree,omitempty"` FeatureBagFraction *Float64 `json:"feature_bag_fraction,omitempty"` Gamma *Float64 `json:"gamma,omitempty"` Lambda *Float64 `json:"lambda,omitempty"` MaxAttemptsToAddTree *int `json:"max_attempts_to_add_tree,omitempty"` MaxOptimizationRoundsPerHyperparameter *int `json:"max_optimization_rounds_per_hyperparameter,omitempty"` MaxTrees *int `json:"max_trees,omitempty"` NumFolds *int `json:"num_folds,omitempty"` NumSplitsPerFeature *int `json:"num_splits_per_feature,omitempty"` SoftTreeDepthLimit *int `json:"soft_tree_depth_limit,omitempty"` SoftTreeDepthTolerance *Float64 `json:"soft_tree_depth_tolerance,omitempty"` }
Hyperparameters type.
func NewHyperparameters ¶
func NewHyperparameters() *Hyperparameters
NewHyperparameters returns a Hyperparameters.
type HyphenationDecompounderTokenFilter ¶
type HyphenationDecompounderTokenFilter struct { HyphenationPatternsPath *string `json:"hyphenation_patterns_path,omitempty"` MaxSubwordSize *int `json:"max_subword_size,omitempty"` MinSubwordSize *int `json:"min_subword_size,omitempty"` MinWordSize *int `json:"min_word_size,omitempty"` OnlyLongestMatch *bool `json:"only_longest_match,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` WordList []string `json:"word_list,omitempty"` WordListPath *string `json:"word_list_path,omitempty"` }
HyphenationDecompounderTokenFilter type.
func NewHyphenationDecompounderTokenFilter ¶
func NewHyphenationDecompounderTokenFilter() *HyphenationDecompounderTokenFilter
NewHyphenationDecompounderTokenFilter returns a HyphenationDecompounderTokenFilter.
type IcuAnalyzer ¶
type IcuAnalyzer struct { Method icunormalizationtype.IcuNormalizationType `json:"method"` Mode icunormalizationmode.IcuNormalizationMode `json:"mode"` Type string `json:"type,omitempty"` }
IcuAnalyzer type.
type IcuCollationTokenFilter ¶
type IcuCollationTokenFilter struct { Alternate *icucollationalternate.IcuCollationAlternate `json:"alternate,omitempty"` CaseFirst *icucollationcasefirst.IcuCollationCaseFirst `json:"caseFirst,omitempty"` CaseLevel *bool `json:"caseLevel,omitempty"` Country *string `json:"country,omitempty"` Decomposition *icucollationdecomposition.IcuCollationDecomposition `json:"decomposition,omitempty"` HiraganaQuaternaryMode *bool `json:"hiraganaQuaternaryMode,omitempty"` Language *string `json:"language,omitempty"` Numeric *bool `json:"numeric,omitempty"` Rules *string `json:"rules,omitempty"` Strength *icucollationstrength.IcuCollationStrength `json:"strength,omitempty"` Type string `json:"type,omitempty"` VariableTop *string `json:"variableTop,omitempty"` Variant *string `json:"variant,omitempty"` Version *string `json:"version,omitempty"` }
IcuCollationTokenFilter type.
func NewIcuCollationTokenFilter ¶
func NewIcuCollationTokenFilter() *IcuCollationTokenFilter
NewIcuCollationTokenFilter returns a IcuCollationTokenFilter.
type IcuFoldingTokenFilter ¶
type IcuFoldingTokenFilter struct { Type string `json:"type,omitempty"` UnicodeSetFilter string `json:"unicode_set_filter"` Version *string `json:"version,omitempty"` }
IcuFoldingTokenFilter type.
func NewIcuFoldingTokenFilter ¶
func NewIcuFoldingTokenFilter() *IcuFoldingTokenFilter
NewIcuFoldingTokenFilter returns a IcuFoldingTokenFilter.
type IcuNormalizationCharFilter ¶
type IcuNormalizationCharFilter struct { Mode *icunormalizationmode.IcuNormalizationMode `json:"mode,omitempty"` Name *icunormalizationtype.IcuNormalizationType `json:"name,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
IcuNormalizationCharFilter type.
func NewIcuNormalizationCharFilter ¶
func NewIcuNormalizationCharFilter() *IcuNormalizationCharFilter
NewIcuNormalizationCharFilter returns a IcuNormalizationCharFilter.
type IcuNormalizationTokenFilter ¶
type IcuNormalizationTokenFilter struct { Name icunormalizationtype.IcuNormalizationType `json:"name"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
IcuNormalizationTokenFilter type.
func NewIcuNormalizationTokenFilter ¶
func NewIcuNormalizationTokenFilter() *IcuNormalizationTokenFilter
NewIcuNormalizationTokenFilter returns a IcuNormalizationTokenFilter.
type IcuTokenizer ¶
type IcuTokenizer struct { RuleFiles string `json:"rule_files"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
IcuTokenizer type.
func NewIcuTokenizer ¶
func NewIcuTokenizer() *IcuTokenizer
NewIcuTokenizer returns a IcuTokenizer.
type IcuTransformTokenFilter ¶
type IcuTransformTokenFilter struct { Dir *icutransformdirection.IcuTransformDirection `json:"dir,omitempty"` Id string `json:"id"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
IcuTransformTokenFilter type.
func NewIcuTransformTokenFilter ¶
func NewIcuTransformTokenFilter() *IcuTransformTokenFilter
NewIcuTransformTokenFilter returns a IcuTransformTokenFilter.
type IdsQuery ¶
type IdsQuery struct { Boost *float32 `json:"boost,omitempty"` QueryName_ *string `json:"_name,omitempty"` Values []string `json:"values,omitempty"` }
IdsQuery type.
type Ilm ¶
type Ilm struct { PolicyCount int `json:"policy_count"` PolicyStats []IlmPolicyStatistics `json:"policy_stats"` }
Ilm type.
type IlmPolicy ¶
type IlmPolicy struct { Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` Phases Phases `json:"phases"` }
IlmPolicy type.
type IlmPolicyStatistics ¶
type IlmPolicyStatistics struct { IndicesManaged int `json:"indices_managed"` Phases Phases `json:"phases"` }
IlmPolicyStatistics type.
func NewIlmPolicyStatistics ¶
func NewIlmPolicyStatistics() *IlmPolicyStatistics
NewIlmPolicyStatistics returns a IlmPolicyStatistics.
type InProgress ¶
type InProgress struct { Name string `json:"name"` StartTimeMillis int64 `json:"start_time_millis"` State string `json:"state"` Uuid string `json:"uuid"` }
InProgress type.
type IndexAction ¶
type IndexAction struct { DocId *string `json:"doc_id,omitempty"` ExecutionTimeField *string `json:"execution_time_field,omitempty"` Index string `json:"index"` OpType *optype.OpType `json:"op_type,omitempty"` Refresh *refresh.Refresh `json:"refresh,omitempty"` Timeout Duration `json:"timeout,omitempty"` }
IndexAction type.
type IndexAliases ¶
type IndexAliases struct {
Aliases map[string]AliasDefinition `json:"aliases"`
}
IndexAliases type.
func NewIndexAliases ¶
func NewIndexAliases() *IndexAliases
NewIndexAliases returns a IndexAliases.
type IndexAndDataStreamAction ¶
type IndexAndDataStreamAction struct { DataStream string `json:"data_stream"` Index string `json:"index"` }
IndexAndDataStreamAction type.
func NewIndexAndDataStreamAction ¶
func NewIndexAndDataStreamAction() *IndexAndDataStreamAction
NewIndexAndDataStreamAction returns a IndexAndDataStreamAction.
type IndexCapabilities ¶
type IndexCapabilities struct {
RollupJobs []RollupJobSummary `json:"rollup_jobs"`
}
IndexCapabilities type.
func NewIndexCapabilities ¶
func NewIndexCapabilities() *IndexCapabilities
NewIndexCapabilities returns a IndexCapabilities.
type IndexDetails ¶
type IndexDetails struct { MaxSegmentsPerShard int64 `json:"max_segments_per_shard"` ShardCount int `json:"shard_count"` Size ByteSize `json:"size,omitempty"` SizeInBytes int64 `json:"size_in_bytes"` }
IndexDetails type.
func NewIndexDetails ¶
func NewIndexDetails() *IndexDetails
NewIndexDetails returns a IndexDetails.
type IndexHealthStats ¶
type IndexHealthStats struct { ActivePrimaryShards int `json:"active_primary_shards"` ActiveShards int `json:"active_shards"` InitializingShards int `json:"initializing_shards"` NumberOfReplicas int `json:"number_of_replicas"` NumberOfShards int `json:"number_of_shards"` RelocatingShards int `json:"relocating_shards"` Shards map[string]ShardHealthStats `json:"shards,omitempty"` Status healthstatus.HealthStatus `json:"status"` UnassignedShards int `json:"unassigned_shards"` }
IndexHealthStats type.
func NewIndexHealthStats ¶
func NewIndexHealthStats() *IndexHealthStats
NewIndexHealthStats returns a IndexHealthStats.
type IndexMappingRecord ¶
type IndexMappingRecord struct { Item *TypeMapping `json:"item,omitempty"` Mappings TypeMapping `json:"mappings"` }
IndexMappingRecord type.
func NewIndexMappingRecord ¶
func NewIndexMappingRecord() *IndexMappingRecord
NewIndexMappingRecord returns a IndexMappingRecord.
type IndexPrivilegesCheck ¶
type IndexPrivilegesCheck struct { // AllowRestrictedIndices This needs to be set to true (default is false) if using wildcards or regexps // for patterns that cover restricted indices. // Implicitly, restricted indices do not match index patterns because restricted // indices usually have limited privileges and including them in pattern tests // would render most such tests false. // If restricted indices are explicitly included in the names list, privileges // will be checked against them regardless of the value of // allow_restricted_indices. AllowRestrictedIndices *bool `json:"allow_restricted_indices,omitempty"` // Names A list of indices. Names []string `json:"names"` // Privileges A list of the privileges that you want to check for the specified indices. Privileges []indexprivilege.IndexPrivilege `json:"privileges"` }
IndexPrivilegesCheck type.
func NewIndexPrivilegesCheck ¶
func NewIndexPrivilegesCheck() *IndexPrivilegesCheck
NewIndexPrivilegesCheck returns a IndexPrivilegesCheck.
type IndexResult ¶
type IndexResult struct {
Response IndexResultSummary `json:"response"`
}
IndexResult type.
type IndexResultSummary ¶
type IndexResultSummary struct { Created bool `json:"created"` Id string `json:"id"` Index string `json:"index"` Result result.Result `json:"result"` Version int64 `json:"version"` }
IndexResultSummary type.
func NewIndexResultSummary ¶
func NewIndexResultSummary() *IndexResultSummary
NewIndexResultSummary returns a IndexResultSummary.
type IndexRouting ¶
type IndexRouting struct { Allocation *IndexRoutingAllocation `json:"allocation,omitempty"` Rebalance *IndexRoutingRebalance `json:"rebalance,omitempty"` }
IndexRouting type.
func NewIndexRouting ¶
func NewIndexRouting() *IndexRouting
NewIndexRouting returns a IndexRouting.
type IndexRoutingAllocation ¶
type IndexRoutingAllocation struct { Disk *IndexRoutingAllocationDisk `json:"disk,omitempty"` Enable *indexroutingallocationoptions.IndexRoutingAllocationOptions `json:"enable,omitempty"` Include *IndexRoutingAllocationInclude `json:"include,omitempty"` InitialRecovery *IndexRoutingAllocationInitialRecovery `json:"initial_recovery,omitempty"` }
IndexRoutingAllocation type.
func NewIndexRoutingAllocation ¶
func NewIndexRoutingAllocation() *IndexRoutingAllocation
NewIndexRoutingAllocation returns a IndexRoutingAllocation.
type IndexRoutingAllocationDisk ¶
type IndexRoutingAllocationDisk struct {
ThresholdEnabled string `json:"threshold_enabled,omitempty"`
}
IndexRoutingAllocationDisk type.
func NewIndexRoutingAllocationDisk ¶
func NewIndexRoutingAllocationDisk() *IndexRoutingAllocationDisk
NewIndexRoutingAllocationDisk returns a IndexRoutingAllocationDisk.
type IndexRoutingAllocationInclude ¶
type IndexRoutingAllocationInclude struct { Id_ *string `json:"_id,omitempty"` TierPreference_ *string `json:"_tier_preference,omitempty"` }
IndexRoutingAllocationInclude type.
func NewIndexRoutingAllocationInclude ¶
func NewIndexRoutingAllocationInclude() *IndexRoutingAllocationInclude
NewIndexRoutingAllocationInclude returns a IndexRoutingAllocationInclude.
type IndexRoutingAllocationInitialRecovery ¶
type IndexRoutingAllocationInitialRecovery struct {
Id_ *string `json:"_id,omitempty"`
}
IndexRoutingAllocationInitialRecovery type.
func NewIndexRoutingAllocationInitialRecovery ¶
func NewIndexRoutingAllocationInitialRecovery() *IndexRoutingAllocationInitialRecovery
NewIndexRoutingAllocationInitialRecovery returns a IndexRoutingAllocationInitialRecovery.
type IndexRoutingRebalance ¶
type IndexRoutingRebalance struct {
Enable indexroutingrebalanceoptions.IndexRoutingRebalanceOptions `json:"enable"`
}
IndexRoutingRebalance type.
func NewIndexRoutingRebalance ¶
func NewIndexRoutingRebalance() *IndexRoutingRebalance
NewIndexRoutingRebalance returns a IndexRoutingRebalance.
type IndexSegment ¶
type IndexSegment struct {
Shards map[string][]ShardsSegment `json:"shards"`
}
IndexSegment type.
func NewIndexSegment ¶
func NewIndexSegment() *IndexSegment
NewIndexSegment returns a IndexSegment.
type IndexSegmentSort ¶
type IndexSegmentSort struct { Field []string `json:"field,omitempty"` Missing []segmentsortmissing.SegmentSortMissing `json:"missing,omitempty"` Mode []segmentsortmode.SegmentSortMode `json:"mode,omitempty"` Order []segmentsortorder.SegmentSortOrder `json:"order,omitempty"` }
IndexSegmentSort type.
func NewIndexSegmentSort ¶
func NewIndexSegmentSort() *IndexSegmentSort
NewIndexSegmentSort returns a IndexSegmentSort.
type IndexSettingBlocks ¶
type IndexSettingBlocks struct { Metadata *bool `json:"metadata,omitempty"` Read *bool `json:"read,omitempty"` ReadOnly *bool `json:"read_only,omitempty"` ReadOnlyAllowDelete *bool `json:"read_only_allow_delete,omitempty"` Write string `json:"write,omitempty"` }
IndexSettingBlocks type.
func NewIndexSettingBlocks ¶
func NewIndexSettingBlocks() *IndexSettingBlocks
NewIndexSettingBlocks returns a IndexSettingBlocks.
type IndexSettings ¶
type IndexSettings struct { Analysis *IndexSettingsAnalysis `json:"analysis,omitempty"` // Analyze Settings to define analyzers, tokenizers, token filters and character // filters. Analyze *SettingsAnalyze `json:"analyze,omitempty"` AutoExpandReplicas *string `json:"auto_expand_replicas,omitempty"` Blocks *IndexSettingBlocks `json:"blocks,omitempty"` CheckOnStartup *indexcheckonstartup.IndexCheckOnStartup `json:"check_on_startup,omitempty"` Codec *string `json:"codec,omitempty"` CreationDate StringifiedEpochTimeUnitMillis `json:"creation_date,omitempty"` CreationDateString DateTime `json:"creation_date_string,omitempty"` DefaultPipeline *string `json:"default_pipeline,omitempty"` FinalPipeline *string `json:"final_pipeline,omitempty"` Format string `json:"format,omitempty"` GcDeletes Duration `json:"gc_deletes,omitempty"` Hidden string `json:"hidden,omitempty"` Highlight *SettingsHighlight `json:"highlight,omitempty"` Index *IndexSettings `json:"index,omitempty"` IndexSettings map[string]json.RawMessage `json:"-"` // IndexingPressure Configure indexing back pressure limits. IndexingPressure *IndicesIndexingPressure `json:"indexing_pressure,omitempty"` IndexingSlowlog *SlowlogSettings `json:"indexing.slowlog,omitempty"` Lifecycle *IndexSettingsLifecycle `json:"lifecycle,omitempty"` LoadFixedBitsetFiltersEagerly *bool `json:"load_fixed_bitset_filters_eagerly,omitempty"` // Mapping Enable or disable dynamic mapping for an index. Mapping *MappingLimitSettings `json:"mapping,omitempty"` MaxDocvalueFieldsSearch *int `json:"max_docvalue_fields_search,omitempty"` MaxInnerResultWindow *int `json:"max_inner_result_window,omitempty"` MaxNgramDiff *int `json:"max_ngram_diff,omitempty"` MaxRefreshListeners *int `json:"max_refresh_listeners,omitempty"` MaxRegexLength *int `json:"max_regex_length,omitempty"` MaxRescoreWindow *int `json:"max_rescore_window,omitempty"` MaxResultWindow *int `json:"max_result_window,omitempty"` MaxScriptFields *int `json:"max_script_fields,omitempty"` MaxShingleDiff *int `json:"max_shingle_diff,omitempty"` MaxSlicesPerScroll *int `json:"max_slices_per_scroll,omitempty"` MaxTermsCount *int `json:"max_terms_count,omitempty"` Merge *Merge `json:"merge,omitempty"` Mode *string `json:"mode,omitempty"` NumberOfReplicas string `json:"number_of_replicas,omitempty"` NumberOfRoutingShards *int `json:"number_of_routing_shards,omitempty"` NumberOfShards string `json:"number_of_shards,omitempty"` Priority string `json:"priority,omitempty"` ProvidedName *string `json:"provided_name,omitempty"` Queries *Queries `json:"queries,omitempty"` QueryString *SettingsQueryString `json:"query_string,omitempty"` RefreshInterval Duration `json:"refresh_interval,omitempty"` Routing *IndexRouting `json:"routing,omitempty"` RoutingPartitionSize *int `json:"routing_partition_size,omitempty"` RoutingPath []string `json:"routing_path,omitempty"` Search *SettingsSearch `json:"search,omitempty"` Settings *IndexSettings `json:"settings,omitempty"` Shards *int `json:"shards,omitempty"` // Similarity Configure custom similarity settings to customize how search results are // scored. Similarity *SettingsSimilarity `json:"similarity,omitempty"` SoftDeletes *SoftDeletes `json:"soft_deletes,omitempty"` Sort *IndexSegmentSort `json:"sort,omitempty"` // Store The store module allows you to control how index data is stored and accessed // on disk. Store *Storage `json:"store,omitempty"` TimeSeries *IndexSettingsTimeSeries `json:"time_series,omitempty"` TopMetricsMaxSize *int `json:"top_metrics_max_size,omitempty"` Translog *Translog `json:"translog,omitempty"` Uuid *string `json:"uuid,omitempty"` VerifiedBeforeClose string `json:"verified_before_close,omitempty"` Version *IndexVersioning `json:"version,omitempty"` }
IndexSettings type.
func NewIndexSettings ¶
func NewIndexSettings() *IndexSettings
NewIndexSettings returns a IndexSettings.
func (IndexSettings) MarshalJSON ¶
func (s IndexSettings) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type IndexSettingsAnalysis ¶
type IndexSettingsAnalysis struct { Analyzer map[string]Analyzer `json:"analyzer,omitempty"` CharFilter map[string]CharFilter `json:"char_filter,omitempty"` Filter map[string]TokenFilter `json:"filter,omitempty"` Normalizer map[string]Normalizer `json:"normalizer,omitempty"` Tokenizer map[string]Tokenizer `json:"tokenizer,omitempty"` }
IndexSettingsAnalysis type.
func NewIndexSettingsAnalysis ¶
func NewIndexSettingsAnalysis() *IndexSettingsAnalysis
NewIndexSettingsAnalysis returns a IndexSettingsAnalysis.
func (*IndexSettingsAnalysis) UnmarshalJSON ¶
func (s *IndexSettingsAnalysis) UnmarshalJSON(data []byte) error
type IndexSettingsLifecycle ¶
type IndexSettingsLifecycle struct { // IndexingComplete Indicates whether or not the index has been rolled over. Automatically set to // true when ILM completes the rollover action. // You can explicitly set it to skip rollover. IndexingComplete *bool `json:"indexing_complete,omitempty"` // Name The name of the policy to use to manage the index. For information about how // Elasticsearch applies policy changes, see Policy updates. Name string `json:"name"` // OriginationDate If specified, this is the timestamp used to calculate the index age for its // phase transitions. Use this setting // if you create a new index that contains old data and want to use the original // creation date to calculate the index // age. Specified as a Unix epoch value in milliseconds. OriginationDate *int64 `json:"origination_date,omitempty"` // ParseOriginationDate Set to true to parse the origination date from the index name. This // origination date is used to calculate the index age // for its phase transitions. The index name must match the pattern // ^.*-{date_format}-\\d+, where the date_format is // yyyy.MM.dd and the trailing digits are optional. An index that was rolled // over would normally match the full format, // for example logs-2016.10.31-000002). If the index name doesn’t match the // pattern, index creation fails. ParseOriginationDate *bool `json:"parse_origination_date,omitempty"` // RolloverAlias The index alias to update when the index rolls over. Specify when using a // policy that contains a rollover action. // When the index rolls over, the alias is updated to reflect that the index is // no longer the write index. For more // information about rolling indices, see Rollover. RolloverAlias *string `json:"rollover_alias,omitempty"` Step *IndexSettingsLifecycleStep `json:"step,omitempty"` }
IndexSettingsLifecycle type.
func NewIndexSettingsLifecycle ¶
func NewIndexSettingsLifecycle() *IndexSettingsLifecycle
NewIndexSettingsLifecycle returns a IndexSettingsLifecycle.
type IndexSettingsLifecycleStep ¶
type IndexSettingsLifecycleStep struct { // WaitTimeThreshold Time to wait for the cluster to resolve allocation issues during an ILM // shrink action. Must be greater than 1h (1 hour). // See Shard allocation for shrink. WaitTimeThreshold Duration `json:"wait_time_threshold,omitempty"` }
IndexSettingsLifecycleStep type.
func NewIndexSettingsLifecycleStep ¶
func NewIndexSettingsLifecycleStep() *IndexSettingsLifecycleStep
NewIndexSettingsLifecycleStep returns a IndexSettingsLifecycleStep.
type IndexSettingsTimeSeries ¶
type IndexSettingsTimeSeries struct { EndTime DateTime `json:"end_time,omitempty"` StartTime DateTime `json:"start_time,omitempty"` }
IndexSettingsTimeSeries type.
func NewIndexSettingsTimeSeries ¶
func NewIndexSettingsTimeSeries() *IndexSettingsTimeSeries
NewIndexSettingsTimeSeries returns a IndexSettingsTimeSeries.
type IndexState ¶
type IndexState struct { Aliases map[string]Alias `json:"aliases,omitempty"` DataStream *string `json:"data_stream,omitempty"` // Defaults Default settings, included when the request's `include_default` is `true`. Defaults *IndexSettings `json:"defaults,omitempty"` Mappings *TypeMapping `json:"mappings,omitempty"` Settings *IndexSettings `json:"settings,omitempty"` }
IndexState type.
func (*IndexState) UnmarshalJSON ¶ added in v0.0.1
func (s *IndexState) UnmarshalJSON(data []byte) error
type IndexStats ¶
type IndexStats struct { Bulk *BulkStats `json:"bulk,omitempty"` // Completion Contains statistics about completions across all shards assigned to the node. Completion *CompletionStats `json:"completion,omitempty"` // Docs Contains statistics about documents across all primary shards assigned to the // node. Docs *DocStats `json:"docs,omitempty"` // Fielddata Contains statistics about the field data cache across all shards assigned to // the node. Fielddata *FielddataStats `json:"fielddata,omitempty"` // Flush Contains statistics about flush operations for the node. Flush *FlushStats `json:"flush,omitempty"` // Get Contains statistics about get operations for the node. Get *GetStats `json:"get,omitempty"` // Indexing Contains statistics about indexing operations for the node. Indexing *IndexingStats `json:"indexing,omitempty"` // Indices Contains statistics about indices operations for the node. Indices *IndicesStats `json:"indices,omitempty"` // Merges Contains statistics about merge operations for the node. Merges *MergesStats `json:"merges,omitempty"` // QueryCache Contains statistics about the query cache across all shards assigned to the // node. QueryCache *QueryCacheStats `json:"query_cache,omitempty"` // Recovery Contains statistics about recovery operations for the node. Recovery *RecoveryStats `json:"recovery,omitempty"` // Refresh Contains statistics about refresh operations for the node. Refresh *RefreshStats `json:"refresh,omitempty"` // RequestCache Contains statistics about the request cache across all shards assigned to the // node. RequestCache *RequestCacheStats `json:"request_cache,omitempty"` // Search Contains statistics about search operations for the node. Search *SearchStats `json:"search,omitempty"` // Segments Contains statistics about segments across all shards assigned to the node. Segments *SegmentsStats `json:"segments,omitempty"` ShardStats *ShardsTotalStats `json:"shard_stats,omitempty"` // Store Contains statistics about the size of shards assigned to the node. Store *StoreStats `json:"store,omitempty"` // Translog Contains statistics about transaction log operations for the node. Translog *TranslogStats `json:"translog,omitempty"` // Warmer Contains statistics about index warming operations for the node. Warmer *WarmerStats `json:"warmer,omitempty"` }
IndexStats type.
type IndexTemplate ¶
type IndexTemplate struct { AllowAutoCreate *bool `json:"allow_auto_create,omitempty"` ComposedOf []string `json:"composed_of"` DataStream *IndexTemplateDataStreamConfiguration `json:"data_stream,omitempty"` IndexPatterns []string `json:"index_patterns"` Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` Priority *int64 `json:"priority,omitempty"` Template *IndexTemplateSummary `json:"template,omitempty"` Version *int64 `json:"version,omitempty"` }
IndexTemplate type.
func NewIndexTemplate ¶
func NewIndexTemplate() *IndexTemplate
NewIndexTemplate returns a IndexTemplate.
type IndexTemplateDataStreamConfiguration ¶
type IndexTemplateDataStreamConfiguration struct { // AllowCustomRouting If true, the data stream supports custom routing. AllowCustomRouting *bool `json:"allow_custom_routing,omitempty"` // Hidden If true, the data stream is hidden. Hidden *bool `json:"hidden,omitempty"` }
IndexTemplateDataStreamConfiguration type.
func NewIndexTemplateDataStreamConfiguration ¶
func NewIndexTemplateDataStreamConfiguration() *IndexTemplateDataStreamConfiguration
NewIndexTemplateDataStreamConfiguration returns a IndexTemplateDataStreamConfiguration.
type IndexTemplateItem ¶
type IndexTemplateItem struct { IndexTemplate IndexTemplate `json:"index_template"` Name string `json:"name"` }
IndexTemplateItem type.
func NewIndexTemplateItem ¶
func NewIndexTemplateItem() *IndexTemplateItem
NewIndexTemplateItem returns a IndexTemplateItem.
type IndexTemplateMapping ¶
type IndexTemplateMapping struct { Aliases map[string]Alias `json:"aliases,omitempty"` Mappings *TypeMapping `json:"mappings,omitempty"` Settings *IndexSettings `json:"settings,omitempty"` }
IndexTemplateMapping type.
func NewIndexTemplateMapping ¶
func NewIndexTemplateMapping() *IndexTemplateMapping
NewIndexTemplateMapping returns a IndexTemplateMapping.
type IndexTemplateSummary ¶
type IndexTemplateSummary struct { Aliases map[string]Alias `json:"aliases,omitempty"` Mappings *TypeMapping `json:"mappings,omitempty"` Settings *IndexSettings `json:"settings,omitempty"` }
IndexTemplateSummary type.
func NewIndexTemplateSummary ¶
func NewIndexTemplateSummary() *IndexTemplateSummary
NewIndexTemplateSummary returns a IndexTemplateSummary.
type IndexVersioning ¶
type IndexVersioning struct { Created *string `json:"created,omitempty"` CreatedString *string `json:"created_string,omitempty"` }
IndexVersioning type.
func NewIndexVersioning ¶
func NewIndexVersioning() *IndexVersioning
NewIndexVersioning returns a IndexVersioning.
type IndexingPressureMemorySummary ¶
type IndexingPressureMemorySummary struct { AllInBytes int64 `json:"all_in_bytes"` CombinedCoordinatingAndPrimaryInBytes int64 `json:"combined_coordinating_and_primary_in_bytes"` CoordinatingInBytes int64 `json:"coordinating_in_bytes"` CoordinatingRejections *int64 `json:"coordinating_rejections,omitempty"` PrimaryInBytes int64 `json:"primary_in_bytes"` PrimaryRejections *int64 `json:"primary_rejections,omitempty"` ReplicaInBytes int64 `json:"replica_in_bytes"` ReplicaRejections *int64 `json:"replica_rejections,omitempty"` }
IndexingPressureMemorySummary type.
func NewIndexingPressureMemorySummary ¶
func NewIndexingPressureMemorySummary() *IndexingPressureMemorySummary
NewIndexingPressureMemorySummary returns a IndexingPressureMemorySummary.
type IndexingStats ¶
type IndexingStats struct { DeleteCurrent int64 `json:"delete_current"` DeleteTime Duration `json:"delete_time,omitempty"` DeleteTimeInMillis int64 `json:"delete_time_in_millis"` DeleteTotal int64 `json:"delete_total"` IndexCurrent int64 `json:"index_current"` IndexFailed int64 `json:"index_failed"` IndexTime Duration `json:"index_time,omitempty"` IndexTimeInMillis int64 `json:"index_time_in_millis"` IndexTotal int64 `json:"index_total"` IsThrottled bool `json:"is_throttled"` NoopUpdateTotal int64 `json:"noop_update_total"` ThrottleTime Duration `json:"throttle_time,omitempty"` ThrottleTimeInMillis int64 `json:"throttle_time_in_millis"` Types map[string]IndexingStats `json:"types,omitempty"` WriteLoad *Float64 `json:"write_load,omitempty"` }
IndexingStats type.
func NewIndexingStats ¶
func NewIndexingStats() *IndexingStats
NewIndexingStats returns a IndexingStats.
type IndicesAction ¶
type IndicesAction struct { Add *AddAction `json:"add,omitempty"` Remove *RemoveAction `json:"remove,omitempty"` RemoveIndex *RemoveIndexAction `json:"remove_index,omitempty"` }
IndicesAction type.
func NewIndicesAction ¶
func NewIndicesAction() *IndicesAction
NewIndicesAction returns a IndicesAction.
type IndicesBlockStatus ¶
IndicesBlockStatus type.
func NewIndicesBlockStatus ¶
func NewIndicesBlockStatus() *IndicesBlockStatus
NewIndicesBlockStatus returns a IndicesBlockStatus.
type IndicesIndexingPressure ¶
type IndicesIndexingPressure struct {
Memory IndicesIndexingPressureMemory `json:"memory"`
}
IndicesIndexingPressure type.
func NewIndicesIndexingPressure ¶
func NewIndicesIndexingPressure() *IndicesIndexingPressure
NewIndicesIndexingPressure returns a IndicesIndexingPressure.
type IndicesIndexingPressureMemory ¶
type IndicesIndexingPressureMemory struct { // Limit Number of outstanding bytes that may be consumed by indexing requests. When // this limit is reached or exceeded, // the node will reject new coordinating and primary operations. When replica // operations consume 1.5x this limit, // the node will reject new replica operations. Defaults to 10% of the heap. Limit *int `json:"limit,omitempty"` }
IndicesIndexingPressureMemory type.
func NewIndicesIndexingPressureMemory ¶
func NewIndicesIndexingPressureMemory() *IndicesIndexingPressureMemory
NewIndicesIndexingPressureMemory returns a IndicesIndexingPressureMemory.
type IndicesModifyAction ¶
type IndicesModifyAction struct { AddBackingIndex *IndexAndDataStreamAction `json:"add_backing_index,omitempty"` RemoveBackingIndex *IndexAndDataStreamAction `json:"remove_backing_index,omitempty"` }
IndicesModifyAction type.
func NewIndicesModifyAction ¶
func NewIndicesModifyAction() *IndicesModifyAction
NewIndicesModifyAction returns a IndicesModifyAction.
type IndicesOptions ¶
type IndicesOptions struct { // AllowNoIndices If false, the request returns an error if any wildcard expression, index // alias, or `_all` value targets only // missing or closed indices. This behavior applies even if the request targets // other open indices. For example, // a request targeting `foo*,bar*` returns an error if an index starts with // `foo` but no index starts with `bar`. AllowNoIndices *bool `json:"allow_no_indices,omitempty"` // ExpandWildcards Type of index that wildcard patterns can match. If the request can target // data streams, this argument // determines whether wildcard expressions match hidden data streams. Supports // comma-separated values, // such as `open,hidden`. ExpandWildcards []expandwildcard.ExpandWildcard `json:"expand_wildcards,omitempty"` // IgnoreThrottled If true, concrete, expanded or aliased indices are ignored when frozen. IgnoreThrottled *bool `json:"ignore_throttled,omitempty"` IgnoreUnavailable *bool `json:"ignore_unavailable,omitempty"` }
IndicesOptions type.
func NewIndicesOptions ¶
func NewIndicesOptions() *IndicesOptions
NewIndicesOptions returns a IndicesOptions.
type IndicesPrivileges ¶
type IndicesPrivileges struct { // AllowRestrictedIndices Set to `true` if using wildcard or regular expressions for patterns that // cover restricted indices. Implicitly, restricted indices have limited // privileges that can cause pattern tests to fail. If restricted indices are // explicitly included in the `names` list, Elasticsearch checks privileges // against these indices regardless of the value set for // `allow_restricted_indices`. AllowRestrictedIndices *bool `json:"allow_restricted_indices,omitempty"` // FieldSecurity The document fields that the owners of the role have read access to. FieldSecurity []FieldSecurity `json:"field_security,omitempty"` // Names A list of indices (or index name patterns) to which the permissions in this // entry apply. Names []string `json:"names"` // Privileges The index level privileges that owners of the role have on the specified // indices. Privileges []indexprivilege.IndexPrivilege `json:"privileges"` // Query A search query that defines the documents the owners of the role have access // to. A document within the specified indices must match this query for it to // be accessible by the owners of the role. Query IndicesPrivilegesQuery `json:"query,omitempty"` }
IndicesPrivileges type.
func NewIndicesPrivileges ¶
func NewIndicesPrivileges() *IndicesPrivileges
NewIndicesPrivileges returns a IndicesPrivileges.
type IndicesPrivilegesQuery ¶
type IndicesPrivilegesQuery interface{}
IndicesPrivilegesQuery holds the union for the following types:
string Query RoleTemplateQuery
type IndicesRecord ¶
type IndicesRecord struct { // BulkAvgSizeInBytes average size in bytes of shard bulk BulkAvgSizeInBytes *string `json:"bulk.avg_size_in_bytes,omitempty"` // BulkAvgTime average time spend in shard bulk BulkAvgTime *string `json:"bulk.avg_time,omitempty"` // BulkTotalOperations number of bulk shard ops BulkTotalOperations *string `json:"bulk.total_operations,omitempty"` // BulkTotalSizeInBytes total size in bytes of shard bulk BulkTotalSizeInBytes *string `json:"bulk.total_size_in_bytes,omitempty"` // BulkTotalTime time spend in shard bulk BulkTotalTime *string `json:"bulk.total_time,omitempty"` // CompletionSize size of completion CompletionSize *string `json:"completion.size,omitempty"` // CreationDate index creation date (millisecond value) CreationDate *string `json:"creation.date,omitempty"` // CreationDateString index creation date (as string) CreationDateString *string `json:"creation.date.string,omitempty"` // DocsCount available docs DocsCount string `json:"docs.count,omitempty"` // DocsDeleted deleted docs DocsDeleted string `json:"docs.deleted,omitempty"` // FielddataEvictions fielddata evictions FielddataEvictions *string `json:"fielddata.evictions,omitempty"` // FielddataMemorySize used fielddata cache FielddataMemorySize *string `json:"fielddata.memory_size,omitempty"` // FlushTotal number of flushes FlushTotal *string `json:"flush.total,omitempty"` // FlushTotalTime time spent in flush FlushTotalTime *string `json:"flush.total_time,omitempty"` // GetCurrent number of current get ops GetCurrent *string `json:"get.current,omitempty"` // GetExistsTime time spent in successful gets GetExistsTime *string `json:"get.exists_time,omitempty"` // GetExistsTotal number of successful gets GetExistsTotal *string `json:"get.exists_total,omitempty"` // GetMissingTime time spent in failed gets GetMissingTime *string `json:"get.missing_time,omitempty"` // GetMissingTotal number of failed gets GetMissingTotal *string `json:"get.missing_total,omitempty"` // GetTime time spent in get GetTime *string `json:"get.time,omitempty"` // GetTotal number of get ops GetTotal *string `json:"get.total,omitempty"` // Health current health status Health *string `json:"health,omitempty"` // Index index name Index *string `json:"index,omitempty"` // IndexingDeleteCurrent number of current deletions IndexingDeleteCurrent *string `json:"indexing.delete_current,omitempty"` // IndexingDeleteTime time spent in deletions IndexingDeleteTime *string `json:"indexing.delete_time,omitempty"` // IndexingDeleteTotal number of delete ops IndexingDeleteTotal *string `json:"indexing.delete_total,omitempty"` // IndexingIndexCurrent number of current indexing ops IndexingIndexCurrent *string `json:"indexing.index_current,omitempty"` // IndexingIndexFailed number of failed indexing ops IndexingIndexFailed *string `json:"indexing.index_failed,omitempty"` // IndexingIndexTime time spent in indexing IndexingIndexTime *string `json:"indexing.index_time,omitempty"` // IndexingIndexTotal number of indexing ops IndexingIndexTotal *string `json:"indexing.index_total,omitempty"` // MemoryTotal total used memory MemoryTotal *string `json:"memory.total,omitempty"` // MergesCurrent number of current merges MergesCurrent *string `json:"merges.current,omitempty"` // MergesCurrentDocs number of current merging docs MergesCurrentDocs *string `json:"merges.current_docs,omitempty"` // MergesCurrentSize size of current merges MergesCurrentSize *string `json:"merges.current_size,omitempty"` // MergesTotal number of completed merge ops MergesTotal *string `json:"merges.total,omitempty"` // MergesTotalDocs docs merged MergesTotalDocs *string `json:"merges.total_docs,omitempty"` // MergesTotalSize size merged MergesTotalSize *string `json:"merges.total_size,omitempty"` // MergesTotalTime time spent in merges MergesTotalTime *string `json:"merges.total_time,omitempty"` // Pri number of primary shards Pri *string `json:"pri,omitempty"` // PriBulkAvgSizeInBytes average size in bytes of shard bulk PriBulkAvgSizeInBytes *string `json:"pri.bulk.avg_size_in_bytes,omitempty"` // PriBulkAvgTime average time spend in shard bulk PriBulkAvgTime *string `json:"pri.bulk.avg_time,omitempty"` // PriBulkTotalOperations number of bulk shard ops PriBulkTotalOperations *string `json:"pri.bulk.total_operations,omitempty"` // PriBulkTotalSizeInBytes total size in bytes of shard bulk PriBulkTotalSizeInBytes *string `json:"pri.bulk.total_size_in_bytes,omitempty"` // PriBulkTotalTime time spend in shard bulk PriBulkTotalTime *string `json:"pri.bulk.total_time,omitempty"` // PriCompletionSize size of completion PriCompletionSize *string `json:"pri.completion.size,omitempty"` // PriFielddataEvictions fielddata evictions PriFielddataEvictions *string `json:"pri.fielddata.evictions,omitempty"` // PriFielddataMemorySize used fielddata cache PriFielddataMemorySize *string `json:"pri.fielddata.memory_size,omitempty"` // PriFlushTotal number of flushes PriFlushTotal *string `json:"pri.flush.total,omitempty"` // PriFlushTotalTime time spent in flush PriFlushTotalTime *string `json:"pri.flush.total_time,omitempty"` // PriGetCurrent number of current get ops PriGetCurrent *string `json:"pri.get.current,omitempty"` // PriGetExistsTime time spent in successful gets PriGetExistsTime *string `json:"pri.get.exists_time,omitempty"` // PriGetExistsTotal number of successful gets PriGetExistsTotal *string `json:"pri.get.exists_total,omitempty"` // PriGetMissingTime time spent in failed gets PriGetMissingTime *string `json:"pri.get.missing_time,omitempty"` // PriGetMissingTotal number of failed gets PriGetMissingTotal *string `json:"pri.get.missing_total,omitempty"` // PriGetTime time spent in get PriGetTime *string `json:"pri.get.time,omitempty"` // PriGetTotal number of get ops PriGetTotal *string `json:"pri.get.total,omitempty"` // PriIndexingDeleteCurrent number of current deletions PriIndexingDeleteCurrent *string `json:"pri.indexing.delete_current,omitempty"` // PriIndexingDeleteTime time spent in deletions PriIndexingDeleteTime *string `json:"pri.indexing.delete_time,omitempty"` // PriIndexingDeleteTotal number of delete ops PriIndexingDeleteTotal *string `json:"pri.indexing.delete_total,omitempty"` // PriIndexingIndexCurrent number of current indexing ops PriIndexingIndexCurrent *string `json:"pri.indexing.index_current,omitempty"` // PriIndexingIndexFailed number of failed indexing ops PriIndexingIndexFailed *string `json:"pri.indexing.index_failed,omitempty"` // PriIndexingIndexTime time spent in indexing PriIndexingIndexTime *string `json:"pri.indexing.index_time,omitempty"` // PriIndexingIndexTotal number of indexing ops PriIndexingIndexTotal *string `json:"pri.indexing.index_total,omitempty"` // PriMemoryTotal total user memory PriMemoryTotal *string `json:"pri.memory.total,omitempty"` // PriMergesCurrent number of current merges PriMergesCurrent *string `json:"pri.merges.current,omitempty"` // PriMergesCurrentDocs number of current merging docs PriMergesCurrentDocs *string `json:"pri.merges.current_docs,omitempty"` // PriMergesCurrentSize size of current merges PriMergesCurrentSize *string `json:"pri.merges.current_size,omitempty"` // PriMergesTotal number of completed merge ops PriMergesTotal *string `json:"pri.merges.total,omitempty"` // PriMergesTotalDocs docs merged PriMergesTotalDocs *string `json:"pri.merges.total_docs,omitempty"` // PriMergesTotalSize size merged PriMergesTotalSize *string `json:"pri.merges.total_size,omitempty"` // PriMergesTotalTime time spent in merges PriMergesTotalTime *string `json:"pri.merges.total_time,omitempty"` // PriQueryCacheEvictions query cache evictions PriQueryCacheEvictions *string `json:"pri.query_cache.evictions,omitempty"` // PriQueryCacheMemorySize used query cache PriQueryCacheMemorySize *string `json:"pri.query_cache.memory_size,omitempty"` // PriRefreshExternalTime time spent in external refreshes PriRefreshExternalTime *string `json:"pri.refresh.external_time,omitempty"` // PriRefreshExternalTotal total external refreshes PriRefreshExternalTotal *string `json:"pri.refresh.external_total,omitempty"` // PriRefreshListeners number of pending refresh listeners PriRefreshListeners *string `json:"pri.refresh.listeners,omitempty"` // PriRefreshTime time spent in refreshes PriRefreshTime *string `json:"pri.refresh.time,omitempty"` // PriRefreshTotal total refreshes PriRefreshTotal *string `json:"pri.refresh.total,omitempty"` // PriRequestCacheEvictions request cache evictions PriRequestCacheEvictions *string `json:"pri.request_cache.evictions,omitempty"` // PriRequestCacheHitCount request cache hit count PriRequestCacheHitCount *string `json:"pri.request_cache.hit_count,omitempty"` // PriRequestCacheMemorySize used request cache PriRequestCacheMemorySize *string `json:"pri.request_cache.memory_size,omitempty"` // PriRequestCacheMissCount request cache miss count PriRequestCacheMissCount *string `json:"pri.request_cache.miss_count,omitempty"` // PriSearchFetchCurrent current fetch phase ops PriSearchFetchCurrent *string `json:"pri.search.fetch_current,omitempty"` // PriSearchFetchTime time spent in fetch phase PriSearchFetchTime *string `json:"pri.search.fetch_time,omitempty"` // PriSearchFetchTotal total fetch ops PriSearchFetchTotal *string `json:"pri.search.fetch_total,omitempty"` // PriSearchOpenContexts open search contexts PriSearchOpenContexts *string `json:"pri.search.open_contexts,omitempty"` // PriSearchQueryCurrent current query phase ops PriSearchQueryCurrent *string `json:"pri.search.query_current,omitempty"` // PriSearchQueryTime time spent in query phase PriSearchQueryTime *string `json:"pri.search.query_time,omitempty"` // PriSearchQueryTotal total query phase ops PriSearchQueryTotal *string `json:"pri.search.query_total,omitempty"` // PriSearchScrollCurrent open scroll contexts PriSearchScrollCurrent *string `json:"pri.search.scroll_current,omitempty"` // PriSearchScrollTime time scroll contexts held open PriSearchScrollTime *string `json:"pri.search.scroll_time,omitempty"` // PriSearchScrollTotal completed scroll contexts PriSearchScrollTotal *string `json:"pri.search.scroll_total,omitempty"` // PriSegmentsCount number of segments PriSegmentsCount *string `json:"pri.segments.count,omitempty"` // PriSegmentsFixedBitsetMemory memory used by fixed bit sets for nested object field types and export type // filters for types referred in _parent fields PriSegmentsFixedBitsetMemory *string `json:"pri.segments.fixed_bitset_memory,omitempty"` // PriSegmentsIndexWriterMemory memory used by index writer PriSegmentsIndexWriterMemory *string `json:"pri.segments.index_writer_memory,omitempty"` // PriSegmentsMemory memory used by segments PriSegmentsMemory *string `json:"pri.segments.memory,omitempty"` // PriSegmentsVersionMapMemory memory used by version map PriSegmentsVersionMapMemory *string `json:"pri.segments.version_map_memory,omitempty"` // PriStoreSize store size of primaries PriStoreSize string `json:"pri.store.size,omitempty"` // PriSuggestCurrent number of current suggest ops PriSuggestCurrent *string `json:"pri.suggest.current,omitempty"` // PriSuggestTime time spend in suggest PriSuggestTime *string `json:"pri.suggest.time,omitempty"` // PriSuggestTotal number of suggest ops PriSuggestTotal *string `json:"pri.suggest.total,omitempty"` // PriWarmerCurrent current warmer ops PriWarmerCurrent *string `json:"pri.warmer.current,omitempty"` // PriWarmerTotal total warmer ops PriWarmerTotal *string `json:"pri.warmer.total,omitempty"` // PriWarmerTotalTime time spent in warmers PriWarmerTotalTime *string `json:"pri.warmer.total_time,omitempty"` // QueryCacheEvictions query cache evictions QueryCacheEvictions *string `json:"query_cache.evictions,omitempty"` // QueryCacheMemorySize used query cache QueryCacheMemorySize *string `json:"query_cache.memory_size,omitempty"` // RefreshExternalTime time spent in external refreshes RefreshExternalTime *string `json:"refresh.external_time,omitempty"` // RefreshExternalTotal total external refreshes RefreshExternalTotal *string `json:"refresh.external_total,omitempty"` // RefreshListeners number of pending refresh listeners RefreshListeners *string `json:"refresh.listeners,omitempty"` // RefreshTime time spent in refreshes RefreshTime *string `json:"refresh.time,omitempty"` // RefreshTotal total refreshes RefreshTotal *string `json:"refresh.total,omitempty"` // Rep number of replica shards Rep *string `json:"rep,omitempty"` // RequestCacheEvictions request cache evictions RequestCacheEvictions *string `json:"request_cache.evictions,omitempty"` // RequestCacheHitCount request cache hit count RequestCacheHitCount *string `json:"request_cache.hit_count,omitempty"` // RequestCacheMemorySize used request cache RequestCacheMemorySize *string `json:"request_cache.memory_size,omitempty"` // RequestCacheMissCount request cache miss count RequestCacheMissCount *string `json:"request_cache.miss_count,omitempty"` // SearchFetchCurrent current fetch phase ops SearchFetchCurrent *string `json:"search.fetch_current,omitempty"` // SearchFetchTime time spent in fetch phase SearchFetchTime *string `json:"search.fetch_time,omitempty"` // SearchFetchTotal total fetch ops SearchFetchTotal *string `json:"search.fetch_total,omitempty"` // SearchOpenContexts open search contexts SearchOpenContexts *string `json:"search.open_contexts,omitempty"` // SearchQueryCurrent current query phase ops SearchQueryCurrent *string `json:"search.query_current,omitempty"` // SearchQueryTime time spent in query phase SearchQueryTime *string `json:"search.query_time,omitempty"` // SearchQueryTotal total query phase ops SearchQueryTotal *string `json:"search.query_total,omitempty"` // SearchScrollCurrent open scroll contexts SearchScrollCurrent *string `json:"search.scroll_current,omitempty"` // SearchScrollTime time scroll contexts held open SearchScrollTime *string `json:"search.scroll_time,omitempty"` // SearchScrollTotal completed scroll contexts SearchScrollTotal *string `json:"search.scroll_total,omitempty"` // SearchThrottled indicates if the index is search throttled SearchThrottled *string `json:"search.throttled,omitempty"` // SegmentsCount number of segments SegmentsCount *string `json:"segments.count,omitempty"` // SegmentsFixedBitsetMemory memory used by fixed bit sets for nested object field types and export type // filters for types referred in _parent fields SegmentsFixedBitsetMemory *string `json:"segments.fixed_bitset_memory,omitempty"` // SegmentsIndexWriterMemory memory used by index writer SegmentsIndexWriterMemory *string `json:"segments.index_writer_memory,omitempty"` // SegmentsMemory memory used by segments SegmentsMemory *string `json:"segments.memory,omitempty"` // SegmentsVersionMapMemory memory used by version map SegmentsVersionMapMemory *string `json:"segments.version_map_memory,omitempty"` // Status open/close status Status *string `json:"status,omitempty"` // StoreSize store size of primaries & replicas StoreSize string `json:"store.size,omitempty"` // SuggestCurrent number of current suggest ops SuggestCurrent *string `json:"suggest.current,omitempty"` // SuggestTime time spend in suggest SuggestTime *string `json:"suggest.time,omitempty"` // SuggestTotal number of suggest ops SuggestTotal *string `json:"suggest.total,omitempty"` // Uuid index uuid Uuid *string `json:"uuid,omitempty"` // WarmerCurrent current warmer ops WarmerCurrent *string `json:"warmer.current,omitempty"` // WarmerTotal total warmer ops WarmerTotal *string `json:"warmer.total,omitempty"` // WarmerTotalTime time spent in warmers WarmerTotalTime *string `json:"warmer.total_time,omitempty"` }
IndicesRecord type.
func NewIndicesRecord ¶
func NewIndicesRecord() *IndicesRecord
NewIndicesRecord returns a IndicesRecord.
type IndicesShardStats ¶
type IndicesShardStats struct { Bulk *BulkStats `json:"bulk,omitempty"` Commit *ShardCommit `json:"commit,omitempty"` Completion *CompletionStats `json:"completion,omitempty"` Docs *DocStats `json:"docs,omitempty"` Fielddata *FielddataStats `json:"fielddata,omitempty"` Flush *FlushStats `json:"flush,omitempty"` Get *GetStats `json:"get,omitempty"` Indexing *IndexingStats `json:"indexing,omitempty"` Indices *IndicesStats `json:"indices,omitempty"` Mappings *MappingStats `json:"mappings,omitempty"` Merges *MergesStats `json:"merges,omitempty"` QueryCache *ShardQueryCache `json:"query_cache,omitempty"` Recovery *RecoveryStats `json:"recovery,omitempty"` Refresh *RefreshStats `json:"refresh,omitempty"` RequestCache *RequestCacheStats `json:"request_cache,omitempty"` RetentionLeases *ShardRetentionLeases `json:"retention_leases,omitempty"` Routing *ShardRouting `json:"routing,omitempty"` Search *SearchStats `json:"search,omitempty"` Segments *SegmentsStats `json:"segments,omitempty"` SeqNo *ShardSequenceNumber `json:"seq_no,omitempty"` ShardPath *ShardPath `json:"shard_path,omitempty"` ShardStats *ShardsTotalStats `json:"shard_stats,omitempty"` Shards map[string]json.RawMessage `json:"shards,omitempty"` Store *StoreStats `json:"store,omitempty"` Translog *TranslogStats `json:"translog,omitempty"` Warmer *WarmerStats `json:"warmer,omitempty"` }
IndicesShardStats type.
func NewIndicesShardStats ¶
func NewIndicesShardStats() *IndicesShardStats
NewIndicesShardStats returns a IndicesShardStats.
type IndicesShardStores ¶
type IndicesShardStores struct {
Shards map[string]ShardStoreWrapper `json:"shards"`
}
IndicesShardStores type.
func NewIndicesShardStores ¶
func NewIndicesShardStores() *IndicesShardStores
NewIndicesShardStores returns a IndicesShardStores.
type IndicesShardsStats ¶
type IndicesShardsStats struct { AllFields FieldSummary `json:"all_fields"` Fields map[string]FieldSummary `json:"fields"` }
IndicesShardsStats type.
func NewIndicesShardsStats ¶
func NewIndicesShardsStats() *IndicesShardsStats
NewIndicesShardsStats returns a IndicesShardsStats.
type IndicesStats ¶
type IndicesStats struct { Health *healthstatus.HealthStatus `json:"health,omitempty"` Primaries *IndexStats `json:"primaries,omitempty"` Shards map[string][]IndicesShardStats `json:"shards,omitempty"` Status *indexmetadatastate.IndexMetadataState `json:"status,omitempty"` Total *IndexStats `json:"total,omitempty"` Uuid *string `json:"uuid,omitempty"` }
IndicesStats type.
func NewIndicesStats ¶
func NewIndicesStats() *IndicesStats
NewIndicesStats returns a IndicesStats.
type IndicesValidationExplanation ¶
type IndicesValidationExplanation struct { Error *string `json:"error,omitempty"` Explanation *string `json:"explanation,omitempty"` Index string `json:"index"` Valid bool `json:"valid"` }
IndicesValidationExplanation type.
func NewIndicesValidationExplanation ¶
func NewIndicesValidationExplanation() *IndicesValidationExplanation
NewIndicesValidationExplanation returns a IndicesValidationExplanation.
type IndicesVersions ¶
type IndicesVersions struct { IndexCount int `json:"index_count"` PrimaryShardCount int `json:"primary_shard_count"` TotalPrimaryBytes int64 `json:"total_primary_bytes"` Version string `json:"version"` }
IndicesVersions type.
func NewIndicesVersions ¶
func NewIndicesVersions() *IndicesVersions
NewIndicesVersions returns a IndicesVersions.
type InferenceAggregate ¶
type InferenceAggregate struct { Data map[string]json.RawMessage `json:"-"` FeatureImportance []InferenceFeatureImportance `json:"feature_importance,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` TopClasses []InferenceTopClassEntry `json:"top_classes,omitempty"` Value FieldValue `json:"value,omitempty"` Warning *string `json:"warning,omitempty"` }
InferenceAggregate type.
func NewInferenceAggregate ¶
func NewInferenceAggregate() *InferenceAggregate
NewInferenceAggregate returns a InferenceAggregate.
func (InferenceAggregate) MarshalJSON ¶
func (s InferenceAggregate) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type InferenceAggregation ¶
type InferenceAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` InferenceConfig *InferenceConfigContainer `json:"inference_config,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` ModelId string `json:"model_id"` Name *string `json:"name,omitempty"` }
InferenceAggregation type.
func NewInferenceAggregation ¶
func NewInferenceAggregation() *InferenceAggregation
NewInferenceAggregation returns a InferenceAggregation.
func (*InferenceAggregation) UnmarshalJSON ¶
func (s *InferenceAggregation) UnmarshalJSON(data []byte) error
type InferenceClassImportance ¶
type InferenceClassImportance struct { ClassName string `json:"class_name"` Importance Float64 `json:"importance"` }
InferenceClassImportance type.
func NewInferenceClassImportance ¶
func NewInferenceClassImportance() *InferenceClassImportance
NewInferenceClassImportance returns a InferenceClassImportance.
type InferenceConfig ¶
type InferenceConfig struct { Classification *InferenceConfigClassification `json:"classification,omitempty"` Regression *InferenceConfigRegression `json:"regression,omitempty"` }
InferenceConfig type.
func NewInferenceConfig ¶
func NewInferenceConfig() *InferenceConfig
NewInferenceConfig returns a InferenceConfig.
type InferenceConfigClassification ¶
type InferenceConfigClassification struct { NumTopClasses *int `json:"num_top_classes,omitempty"` NumTopFeatureImportanceValues *int `json:"num_top_feature_importance_values,omitempty"` PredictionFieldType *string `json:"prediction_field_type,omitempty"` ResultsField *string `json:"results_field,omitempty"` TopClassesResultsField *string `json:"top_classes_results_field,omitempty"` }
InferenceConfigClassification type.
func NewInferenceConfigClassification ¶
func NewInferenceConfigClassification() *InferenceConfigClassification
NewInferenceConfigClassification returns a InferenceConfigClassification.
type InferenceConfigContainer ¶
type InferenceConfigContainer struct { // Classification Classification configuration for inference. Classification *ClassificationInferenceOptions `json:"classification,omitempty"` // Regression Regression configuration for inference. Regression *RegressionInferenceOptions `json:"regression,omitempty"` }
InferenceConfigContainer type.
func NewInferenceConfigContainer ¶
func NewInferenceConfigContainer() *InferenceConfigContainer
NewInferenceConfigContainer returns a InferenceConfigContainer.
type InferenceConfigCreateContainer ¶
type InferenceConfigCreateContainer struct { // Classification Classification configuration for inference. Classification *ClassificationInferenceOptions `json:"classification,omitempty"` // FillMask Fill mask configuration for inference. FillMask *FillMaskInferenceOptions `json:"fill_mask,omitempty"` // Ner Named entity recognition configuration for inference. Ner *NerInferenceOptions `json:"ner,omitempty"` // PassThrough Pass through configuration for inference. PassThrough *PassThroughInferenceOptions `json:"pass_through,omitempty"` // QuestionAnswering Question answering configuration for inference. QuestionAnswering *QuestionAnsweringInferenceOptions `json:"question_answering,omitempty"` // Regression Regression configuration for inference. Regression *RegressionInferenceOptions `json:"regression,omitempty"` // TextClassification Text classification configuration for inference. TextClassification *TextClassificationInferenceOptions `json:"text_classification,omitempty"` // TextEmbedding Text embedding configuration for inference. TextEmbedding *TextEmbeddingInferenceOptions `json:"text_embedding,omitempty"` // ZeroShotClassification Zeroshot classification configuration for inference. ZeroShotClassification *ZeroShotClassificationInferenceOptions `json:"zero_shot_classification,omitempty"` }
InferenceConfigCreateContainer type.
func NewInferenceConfigCreateContainer ¶
func NewInferenceConfigCreateContainer() *InferenceConfigCreateContainer
NewInferenceConfigCreateContainer returns a InferenceConfigCreateContainer.
type InferenceConfigRegression ¶
type InferenceConfigRegression struct { NumTopFeatureImportanceValues *int `json:"num_top_feature_importance_values,omitempty"` ResultsField *string `json:"results_field,omitempty"` }
InferenceConfigRegression type.
func NewInferenceConfigRegression ¶
func NewInferenceConfigRegression() *InferenceConfigRegression
NewInferenceConfigRegression returns a InferenceConfigRegression.
type InferenceConfigUpdateContainer ¶
type InferenceConfigUpdateContainer struct { // Classification Classification configuration for inference. Classification *ClassificationInferenceOptions `json:"classification,omitempty"` // FillMask Fill mask configuration for inference. FillMask *FillMaskInferenceUpdateOptions `json:"fill_mask,omitempty"` // Ner Named entity recognition configuration for inference. Ner *NerInferenceUpdateOptions `json:"ner,omitempty"` // PassThrough Pass through configuration for inference. PassThrough *PassThroughInferenceUpdateOptions `json:"pass_through,omitempty"` // QuestionAnswering Question answering configuration for inference QuestionAnswering *QuestionAnsweringInferenceUpdateOptions `json:"question_answering,omitempty"` // Regression Regression configuration for inference. Regression *RegressionInferenceOptions `json:"regression,omitempty"` // TextClassification Text classification configuration for inference. TextClassification *TextClassificationInferenceUpdateOptions `json:"text_classification,omitempty"` // TextEmbedding Text embedding configuration for inference. TextEmbedding *TextEmbeddingInferenceUpdateOptions `json:"text_embedding,omitempty"` // ZeroShotClassification Zeroshot classification configuration for inference. ZeroShotClassification *ZeroShotClassificationInferenceUpdateOptions `json:"zero_shot_classification,omitempty"` }
InferenceConfigUpdateContainer type.
func NewInferenceConfigUpdateContainer ¶
func NewInferenceConfigUpdateContainer() *InferenceConfigUpdateContainer
NewInferenceConfigUpdateContainer returns a InferenceConfigUpdateContainer.
type InferenceFeatureImportance ¶
type InferenceFeatureImportance struct { Classes []InferenceClassImportance `json:"classes,omitempty"` FeatureName string `json:"feature_name"` Importance *Float64 `json:"importance,omitempty"` }
InferenceFeatureImportance type.
func NewInferenceFeatureImportance ¶
func NewInferenceFeatureImportance() *InferenceFeatureImportance
NewInferenceFeatureImportance returns a InferenceFeatureImportance.
type InferenceProcessor ¶
type InferenceProcessor struct { Description *string `json:"description,omitempty"` FieldMap map[string]json.RawMessage `json:"field_map,omitempty"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` InferenceConfig *InferenceConfig `json:"inference_config,omitempty"` ModelId string `json:"model_id"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
InferenceProcessor type.
func NewInferenceProcessor ¶
func NewInferenceProcessor() *InferenceProcessor
NewInferenceProcessor returns a InferenceProcessor.
type InferenceResponseResult ¶
type InferenceResponseResult struct { // Entities If the model is trained for named entity recognition (NER) tasks, the // response contains the recognized entities. Entities []TrainedModelEntities `json:"entities,omitempty"` // FeatureImportance The feature importance for the inference results. Relevant only for // classification or regression models FeatureImportance []TrainedModelInferenceFeatureImportance `json:"feature_importance,omitempty"` // IsTruncated Indicates whether the input text was truncated to meet the model's maximum // sequence length limit. This property // is present only when it is true. IsTruncated *bool `json:"is_truncated,omitempty"` // PredictedValue If the model is trained for a text classification or zero shot classification // task, the response is the // predicted class. // For named entity recognition (NER) tasks, it contains the annotated text // output. // For fill mask tasks, it contains the top prediction for replacing the mask // token. // For text embedding tasks, it contains the raw numerical text embedding // values. // For regression models, its a numerical value // For classification models, it may be an integer, double, boolean or string // depending on prediction type PredictedValue []PredictedValue `json:"predicted_value,omitempty"` // PredictedValueSequence For fill mask tasks, the response contains the input text sequence with the // mask token replaced by the predicted // value. // Additionally PredictedValueSequence *string `json:"predicted_value_sequence,omitempty"` // PredictionProbability Specifies a probability for the predicted value. PredictionProbability *Float64 `json:"prediction_probability,omitempty"` // PredictionScore Specifies a confidence score for the predicted value. PredictionScore *Float64 `json:"prediction_score,omitempty"` // TopClasses For fill mask, text classification, and zero shot classification tasks, the // response contains a list of top // class entries. TopClasses []TopClassEntry `json:"top_classes,omitempty"` // Warning If the request failed, the response contains the reason for the failure. Warning *string `json:"warning,omitempty"` }
InferenceResponseResult type.
func NewInferenceResponseResult ¶
func NewInferenceResponseResult() *InferenceResponseResult
NewInferenceResponseResult returns a InferenceResponseResult.
type InferenceTopClassEntry ¶
type InferenceTopClassEntry struct { ClassName FieldValue `json:"class_name"` ClassProbability Float64 `json:"class_probability"` ClassScore Float64 `json:"class_score"` }
InferenceTopClassEntry type.
func NewInferenceTopClassEntry ¶
func NewInferenceTopClassEntry() *InferenceTopClassEntry
NewInferenceTopClassEntry returns a InferenceTopClassEntry.
type Influence ¶
type Influence struct { InfluencerFieldName string `json:"influencer_field_name"` InfluencerFieldValues []string `json:"influencer_field_values"` }
Influence type.
type Influencer ¶
type Influencer struct { // BucketSpan The length of the bucket in seconds. This value matches the bucket span that // is specified in the job. BucketSpan int64 `json:"bucket_span"` // Foo Additional influencer properties are added, depending on the fields being // analyzed. For example, if it’s // analyzing `user_name` as an influencer, a field `user_name` is added to the // result document. This // information enables you to filter the anomaly results more easily. Foo *string `json:"foo,omitempty"` // InfluencerFieldName The field name of the influencer. InfluencerFieldName string `json:"influencer_field_name"` // InfluencerFieldValue The entity that influenced, contributed to, or was to blame for the anomaly. InfluencerFieldValue string `json:"influencer_field_value"` // InfluencerScore A normalized score between 0-100, which is based on the probability of the // influencer in this bucket aggregated // across detectors. Unlike `initial_influencer_score`, this value is updated by // a re-normalization process as new // data is analyzed. InfluencerScore Float64 `json:"influencer_score"` // InitialInfluencerScore A normalized score between 0-100, which is based on the probability of the // influencer aggregated across detectors. // This is the initial value that was calculated at the time the bucket was // processed. InitialInfluencerScore Float64 `json:"initial_influencer_score"` // IsInterim If true, this is an interim result. In other words, the results are // calculated based on partial input data. IsInterim bool `json:"is_interim"` // JobId Identifier for the anomaly detection job. JobId string `json:"job_id"` // Probability The probability that the influencer has this behavior, in the range 0 to 1. // This value can be held to a high // precision of over 300 decimal places, so the `influencer_score` is provided // as a human-readable and friendly // interpretation of this value. Probability Float64 `json:"probability"` // ResultType Internal. This value is always set to `influencer`. ResultType string `json:"result_type"` // Timestamp The start time of the bucket for which these results were calculated. Timestamp int64 `json:"timestamp"` }
Influencer type.
type InfoFeatureState ¶
type InfoFeatureState struct { FeatureName string `json:"feature_name"` Indices []string `json:"indices"` }
InfoFeatureState type.
func NewInfoFeatureState ¶
func NewInfoFeatureState() *InfoFeatureState
NewInfoFeatureState returns a InfoFeatureState.
type IngestPipeline ¶
type IngestPipeline struct { Description *string `json:"description,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Processors []ProcessorContainer `json:"processors,omitempty"` Version *int64 `json:"version,omitempty"` }
IngestPipeline type.
func NewIngestPipeline ¶
func NewIngestPipeline() *IngestPipeline
NewIngestPipeline returns a IngestPipeline.
type IngestTotal ¶
type IngestTotal struct { Count *int64 `json:"count,omitempty"` Current *int64 `json:"current,omitempty"` Failed *int64 `json:"failed,omitempty"` Processors []map[string]KeyedProcessor `json:"processors,omitempty"` TimeInMillis *int64 `json:"time_in_millis,omitempty"` }
IngestTotal type.
type InlineGet ¶
type InlineGet struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` Found bool `json:"found"` Metadata map[string]json.RawMessage `json:"-"` PrimaryTerm_ *int64 `json:"_primary_term,omitempty"` Routing_ *string `json:"_routing,omitempty"` SeqNo_ *int64 `json:"_seq_no,omitempty"` Source_ json.RawMessage `json:"_source,omitempty"` }
InlineGet type.
func (InlineGet) MarshalJSON ¶
MarhsalJSON overrides marshalling for types with additional properties
type InlineGetDictUserDefined ¶
type InlineGetDictUserDefined struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` Found bool `json:"found"` InlineGetDictUserDefined map[string]json.RawMessage `json:"-"` PrimaryTerm_ *int64 `json:"_primary_term,omitempty"` Routing_ *string `json:"_routing,omitempty"` SeqNo_ *int64 `json:"_seq_no,omitempty"` Source_ map[string]json.RawMessage `json:"_source"` }
InlineGetDictUserDefined type.
func NewInlineGetDictUserDefined ¶
func NewInlineGetDictUserDefined() *InlineGetDictUserDefined
NewInlineGetDictUserDefined returns a InlineGetDictUserDefined.
func (InlineGetDictUserDefined) MarshalJSON ¶
func (s InlineGetDictUserDefined) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type InlineScript ¶
type InlineScript struct { Lang *scriptlanguage.ScriptLanguage `json:"lang,omitempty"` Options map[string]string `json:"options,omitempty"` Params map[string]json.RawMessage `json:"params,omitempty"` Source string `json:"source"` }
InlineScript type.
func NewInlineScript ¶
func NewInlineScript() *InlineScript
NewInlineScript returns a InlineScript.
type InnerHits ¶
type InnerHits struct { Collapse *FieldCollapse `json:"collapse,omitempty"` DocvalueFields []FieldAndFormat `json:"docvalue_fields,omitempty"` Explain *bool `json:"explain,omitempty"` Fields []string `json:"fields,omitempty"` From *int `json:"from,omitempty"` Highlight *Highlight `json:"highlight,omitempty"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` Name *string `json:"name,omitempty"` ScriptFields map[string]ScriptField `json:"script_fields,omitempty"` SeqNoPrimaryTerm *bool `json:"seq_no_primary_term,omitempty"` Size *int `json:"size,omitempty"` Sort []SortCombinations `json:"sort,omitempty"` Source_ SourceConfig `json:"_source,omitempty"` StoredField []string `json:"stored_field,omitempty"` TrackScores *bool `json:"track_scores,omitempty"` Version *bool `json:"version,omitempty"` }
InnerHits type.
type InnerHitsResult ¶
type InnerHitsResult struct {
Hits *HitsMetadata `json:"hits,omitempty"`
}
InnerHitsResult type.
func NewInnerHitsResult ¶
func NewInnerHitsResult() *InnerHitsResult
NewInnerHitsResult returns a InnerHitsResult.
type IntegerNumberProperty ¶
type IntegerNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue *int `json:"null_value,omitempty"` OnScriptError *onscripterror.OnScriptError `json:"on_script_error,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Script Script `json:"script,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` // TimeSeriesDimension For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesDimension *bool `json:"time_series_dimension,omitempty"` // TimeSeriesMetric For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesMetric *timeseriesmetrictype.TimeSeriesMetricType `json:"time_series_metric,omitempty"` Type string `json:"type,omitempty"` }
IntegerNumberProperty type.
func NewIntegerNumberProperty ¶
func NewIntegerNumberProperty() *IntegerNumberProperty
NewIntegerNumberProperty returns a IntegerNumberProperty.
func (*IntegerNumberProperty) UnmarshalJSON ¶
func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error
type IntegerRangeProperty ¶
type IntegerRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
IntegerRangeProperty type.
func NewIntegerRangeProperty ¶
func NewIntegerRangeProperty() *IntegerRangeProperty
NewIntegerRangeProperty returns a IntegerRangeProperty.
func (*IntegerRangeProperty) UnmarshalJSON ¶
func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error
type Intervals ¶
type Intervals struct { AllOf *IntervalsAllOf `json:"all_of,omitempty"` AnyOf *IntervalsAnyOf `json:"any_of,omitempty"` Fuzzy *IntervalsFuzzy `json:"fuzzy,omitempty"` Match *IntervalsMatch `json:"match,omitempty"` Prefix *IntervalsPrefix `json:"prefix,omitempty"` Wildcard *IntervalsWildcard `json:"wildcard,omitempty"` }
Intervals type.
type IntervalsAllOf ¶
type IntervalsAllOf struct { Filter *IntervalsFilter `json:"filter,omitempty"` Intervals []Intervals `json:"intervals"` MaxGaps *int `json:"max_gaps,omitempty"` Ordered *bool `json:"ordered,omitempty"` }
IntervalsAllOf type.
func NewIntervalsAllOf ¶
func NewIntervalsAllOf() *IntervalsAllOf
NewIntervalsAllOf returns a IntervalsAllOf.
type IntervalsAnyOf ¶
type IntervalsAnyOf struct { Filter *IntervalsFilter `json:"filter,omitempty"` Intervals []Intervals `json:"intervals"` }
IntervalsAnyOf type.
func NewIntervalsAnyOf ¶
func NewIntervalsAnyOf() *IntervalsAnyOf
NewIntervalsAnyOf returns a IntervalsAnyOf.
type IntervalsFilter ¶
type IntervalsFilter struct { After *Intervals `json:"after,omitempty"` Before *Intervals `json:"before,omitempty"` ContainedBy *Intervals `json:"contained_by,omitempty"` Containing *Intervals `json:"containing,omitempty"` NotContainedBy *Intervals `json:"not_contained_by,omitempty"` NotContaining *Intervals `json:"not_containing,omitempty"` NotOverlapping *Intervals `json:"not_overlapping,omitempty"` Overlapping *Intervals `json:"overlapping,omitempty"` Script Script `json:"script,omitempty"` }
IntervalsFilter type.
func NewIntervalsFilter ¶
func NewIntervalsFilter() *IntervalsFilter
NewIntervalsFilter returns a IntervalsFilter.
type IntervalsFuzzy ¶
type IntervalsFuzzy struct { Analyzer *string `json:"analyzer,omitempty"` Fuzziness Fuzziness `json:"fuzziness,omitempty"` PrefixLength *int `json:"prefix_length,omitempty"` Term string `json:"term"` Transpositions *bool `json:"transpositions,omitempty"` UseField *string `json:"use_field,omitempty"` }
IntervalsFuzzy type.
func NewIntervalsFuzzy ¶
func NewIntervalsFuzzy() *IntervalsFuzzy
NewIntervalsFuzzy returns a IntervalsFuzzy.
type IntervalsMatch ¶
type IntervalsMatch struct { Analyzer *string `json:"analyzer,omitempty"` Filter *IntervalsFilter `json:"filter,omitempty"` MaxGaps *int `json:"max_gaps,omitempty"` Ordered *bool `json:"ordered,omitempty"` Query string `json:"query"` UseField *string `json:"use_field,omitempty"` }
IntervalsMatch type.
func NewIntervalsMatch ¶
func NewIntervalsMatch() *IntervalsMatch
NewIntervalsMatch returns a IntervalsMatch.
type IntervalsPrefix ¶
type IntervalsPrefix struct { Analyzer *string `json:"analyzer,omitempty"` Prefix string `json:"prefix"` UseField *string `json:"use_field,omitempty"` }
IntervalsPrefix type.
func NewIntervalsPrefix ¶
func NewIntervalsPrefix() *IntervalsPrefix
NewIntervalsPrefix returns a IntervalsPrefix.
type IntervalsQuery ¶
type IntervalsQuery struct { AllOf *IntervalsAllOf `json:"all_of,omitempty"` AnyOf *IntervalsAnyOf `json:"any_of,omitempty"` Boost *float32 `json:"boost,omitempty"` Fuzzy *IntervalsFuzzy `json:"fuzzy,omitempty"` Match *IntervalsMatch `json:"match,omitempty"` Prefix *IntervalsPrefix `json:"prefix,omitempty"` QueryName_ *string `json:"_name,omitempty"` Wildcard *IntervalsWildcard `json:"wildcard,omitempty"` }
IntervalsQuery type.
func NewIntervalsQuery ¶
func NewIntervalsQuery() *IntervalsQuery
NewIntervalsQuery returns a IntervalsQuery.
type IntervalsWildcard ¶
type IntervalsWildcard struct { Analyzer *string `json:"analyzer,omitempty"` Pattern string `json:"pattern"` UseField *string `json:"use_field,omitempty"` }
IntervalsWildcard type.
func NewIntervalsWildcard ¶
func NewIntervalsWildcard() *IntervalsWildcard
NewIntervalsWildcard returns a IntervalsWildcard.
type InvertedIndex ¶
type InvertedIndex struct { Offsets uint `json:"offsets"` Payloads uint `json:"payloads"` Positions uint `json:"positions"` Postings uint `json:"postings"` Proximity uint `json:"proximity"` TermFrequencies uint `json:"term_frequencies"` Terms uint `json:"terms"` }
InvertedIndex type.
func NewInvertedIndex ¶
func NewInvertedIndex() *InvertedIndex
NewInvertedIndex returns a InvertedIndex.
type Invocation ¶
Invocation type.
type IoStatDevice ¶
type IoStatDevice struct { DeviceName *string `json:"device_name,omitempty"` Operations *int64 `json:"operations,omitempty"` ReadKilobytes *int64 `json:"read_kilobytes,omitempty"` ReadOperations *int64 `json:"read_operations,omitempty"` WriteKilobytes *int64 `json:"write_kilobytes,omitempty"` WriteOperations *int64 `json:"write_operations,omitempty"` }
IoStatDevice type.
func NewIoStatDevice ¶
func NewIoStatDevice() *IoStatDevice
NewIoStatDevice returns a IoStatDevice.
type IoStats ¶
type IoStats struct { Devices []IoStatDevice `json:"devices,omitempty"` Total *IoStatDevice `json:"total,omitempty"` }
IoStats type.
type IpPrefixAggregate ¶
type IpPrefixAggregate struct { Buckets BucketsIpPrefixBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
IpPrefixAggregate type.
func NewIpPrefixAggregate ¶
func NewIpPrefixAggregate() *IpPrefixAggregate
NewIpPrefixAggregate returns a IpPrefixAggregate.
func (*IpPrefixAggregate) UnmarshalJSON ¶
func (s *IpPrefixAggregate) UnmarshalJSON(data []byte) error
type IpPrefixAggregation ¶
type IpPrefixAggregation struct { // AppendPrefixLength Defines whether the prefix length is appended to IP address keys in the // response. AppendPrefixLength *bool `json:"append_prefix_length,omitempty"` // Field The document IP address field to aggregation on. The field mapping type must // be `ip` Field string `json:"field"` // IsIpv6 Defines whether the prefix applies to IPv6 addresses. IsIpv6 *bool `json:"is_ipv6,omitempty"` // Keyed Defines whether buckets are returned as a hash rather than an array in the // response. Keyed *bool `json:"keyed,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` // MinDocCount Minimum number of documents for buckets to be included in the response. MinDocCount *int64 `json:"min_doc_count,omitempty"` Name *string `json:"name,omitempty"` // PrefixLength Length of the network prefix. For IPv4 addresses the accepted range is [0, // 32]. // For IPv6 addresses the accepted range is [0, 128]. PrefixLength int `json:"prefix_length"` }
IpPrefixAggregation type.
func NewIpPrefixAggregation ¶
func NewIpPrefixAggregation() *IpPrefixAggregation
NewIpPrefixAggregation returns a IpPrefixAggregation.
type IpPrefixBucket ¶
type IpPrefixBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` IsIpv6 bool `json:"is_ipv6"` Key string `json:"key"` Netmask *string `json:"netmask,omitempty"` PrefixLength int `json:"prefix_length"` }
IpPrefixBucket type.
func NewIpPrefixBucket ¶
func NewIpPrefixBucket() *IpPrefixBucket
NewIpPrefixBucket returns a IpPrefixBucket.
func (IpPrefixBucket) MarshalJSON ¶
func (s IpPrefixBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*IpPrefixBucket) UnmarshalJSON ¶
func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error
type IpProperty ¶
type IpProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue *string `json:"null_value,omitempty"` OnScriptError *onscripterror.OnScriptError `json:"on_script_error,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Script Script `json:"script,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` // TimeSeriesDimension For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesDimension *bool `json:"time_series_dimension,omitempty"` Type string `json:"type,omitempty"` }
IpProperty type.
func (*IpProperty) UnmarshalJSON ¶
func (s *IpProperty) UnmarshalJSON(data []byte) error
type IpRangeAggregate ¶
type IpRangeAggregate struct { Buckets BucketsIpRangeBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
IpRangeAggregate type.
func NewIpRangeAggregate ¶
func NewIpRangeAggregate() *IpRangeAggregate
NewIpRangeAggregate returns a IpRangeAggregate.
func (*IpRangeAggregate) UnmarshalJSON ¶
func (s *IpRangeAggregate) UnmarshalJSON(data []byte) error
type IpRangeAggregation ¶
type IpRangeAggregation struct { Field *string `json:"field,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Ranges []IpRangeAggregationRange `json:"ranges,omitempty"` }
IpRangeAggregation type.
func NewIpRangeAggregation ¶
func NewIpRangeAggregation() *IpRangeAggregation
NewIpRangeAggregation returns a IpRangeAggregation.
type IpRangeAggregationRange ¶
type IpRangeAggregationRange struct { From string `json:"from,omitempty"` Mask *string `json:"mask,omitempty"` To string `json:"to,omitempty"` }
IpRangeAggregationRange type.
func NewIpRangeAggregationRange ¶
func NewIpRangeAggregationRange() *IpRangeAggregationRange
NewIpRangeAggregationRange returns a IpRangeAggregationRange.
type IpRangeBucket ¶
type IpRangeBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` From *string `json:"from,omitempty"` Key *string `json:"key,omitempty"` To *string `json:"to,omitempty"` }
IpRangeBucket type.
func NewIpRangeBucket ¶
func NewIpRangeBucket() *IpRangeBucket
NewIpRangeBucket returns a IpRangeBucket.
func (IpRangeBucket) MarshalJSON ¶
func (s IpRangeBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*IpRangeBucket) UnmarshalJSON ¶
func (s *IpRangeBucket) UnmarshalJSON(data []byte) error
type IpRangeProperty ¶
type IpRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
IpRangeProperty type.
func NewIpRangeProperty ¶
func NewIpRangeProperty() *IpRangeProperty
NewIpRangeProperty returns a IpRangeProperty.
func (*IpRangeProperty) UnmarshalJSON ¶
func (s *IpRangeProperty) UnmarshalJSON(data []byte) error
type Job ¶
type Job struct { AllowLazyOpen bool `json:"allow_lazy_open"` AnalysisConfig AnalysisConfig `json:"analysis_config"` AnalysisLimits *AnalysisLimits `json:"analysis_limits,omitempty"` BackgroundPersistInterval Duration `json:"background_persist_interval,omitempty"` Blocked *JobBlocked `json:"blocked,omitempty"` CreateTime DateTime `json:"create_time,omitempty"` CustomSettings json.RawMessage `json:"custom_settings,omitempty"` DailyModelSnapshotRetentionAfterDays *int64 `json:"daily_model_snapshot_retention_after_days,omitempty"` DataDescription DataDescription `json:"data_description"` DatafeedConfig *MLDatafeed `json:"datafeed_config,omitempty"` Deleting *bool `json:"deleting,omitempty"` Description *string `json:"description,omitempty"` FinishedTime DateTime `json:"finished_time,omitempty"` Groups []string `json:"groups,omitempty"` JobId string `json:"job_id"` JobType *string `json:"job_type,omitempty"` JobVersion *string `json:"job_version,omitempty"` ModelPlotConfig *ModelPlotConfig `json:"model_plot_config,omitempty"` ModelSnapshotId *string `json:"model_snapshot_id,omitempty"` ModelSnapshotRetentionDays int64 `json:"model_snapshot_retention_days"` RenormalizationWindowDays *int64 `json:"renormalization_window_days,omitempty"` ResultsIndexName string `json:"results_index_name"` ResultsRetentionDays *int64 `json:"results_retention_days,omitempty"` }
Job type.
type JobBlocked ¶
type JobBlocked struct { Reason jobblockedreason.JobBlockedReason `json:"reason"` TaskId TaskId `json:"task_id,omitempty"` }
JobBlocked type.
type JobConfig ¶
type JobConfig struct { AllowLazyOpen *bool `json:"allow_lazy_open,omitempty"` AnalysisConfig AnalysisConfig `json:"analysis_config"` AnalysisLimits *AnalysisLimits `json:"analysis_limits,omitempty"` BackgroundPersistInterval Duration `json:"background_persist_interval,omitempty"` CustomSettings json.RawMessage `json:"custom_settings,omitempty"` DailyModelSnapshotRetentionAfterDays *int64 `json:"daily_model_snapshot_retention_after_days,omitempty"` DataDescription DataDescription `json:"data_description"` DatafeedConfig *DatafeedConfig `json:"datafeed_config,omitempty"` Description *string `json:"description,omitempty"` Groups []string `json:"groups,omitempty"` JobId *string `json:"job_id,omitempty"` JobType *string `json:"job_type,omitempty"` ModelPlotConfig *ModelPlotConfig `json:"model_plot_config,omitempty"` ModelSnapshotRetentionDays *int64 `json:"model_snapshot_retention_days,omitempty"` RenormalizationWindowDays *int64 `json:"renormalization_window_days,omitempty"` ResultsIndexName *string `json:"results_index_name,omitempty"` ResultsRetentionDays *int64 `json:"results_retention_days,omitempty"` }
JobConfig type.
type JobForecastStatistics ¶
type JobForecastStatistics struct { ForecastedJobs int `json:"forecasted_jobs"` MemoryBytes *JobStatistics `json:"memory_bytes,omitempty"` ProcessingTimeMs *JobStatistics `json:"processing_time_ms,omitempty"` Records *JobStatistics `json:"records,omitempty"` Status map[string]int64 `json:"status,omitempty"` Total int64 `json:"total"` }
JobForecastStatistics type.
func NewJobForecastStatistics ¶
func NewJobForecastStatistics() *JobForecastStatistics
NewJobForecastStatistics returns a JobForecastStatistics.
type JobStatistics ¶
type JobStatistics struct { Avg Float64 `json:"avg"` Max Float64 `json:"max"` Min Float64 `json:"min"` Total Float64 `json:"total"` }
JobStatistics type.
func NewJobStatistics ¶
func NewJobStatistics() *JobStatistics
NewJobStatistics returns a JobStatistics.
type JobStats ¶
type JobStats struct { AssignmentExplanation *string `json:"assignment_explanation,omitempty"` DataCounts DataCounts `json:"data_counts"` Deleting *bool `json:"deleting,omitempty"` ForecastsStats JobForecastStatistics `json:"forecasts_stats"` JobId string `json:"job_id"` ModelSizeStats ModelSizeStats `json:"model_size_stats"` Node *DiscoveryNode `json:"node,omitempty"` OpenTime DateTime `json:"open_time,omitempty"` State jobstate.JobState `json:"state"` TimingStats JobTimingStats `json:"timing_stats"` }
JobStats type.
type JobTimingStats ¶
type JobTimingStats struct { AverageBucketProcessingTimeMs Float64 `json:"average_bucket_processing_time_ms,omitempty"` BucketCount int64 `json:"bucket_count"` ExponentialAverageBucketProcessingTimeMs Float64 `json:"exponential_average_bucket_processing_time_ms,omitempty"` ExponentialAverageBucketProcessingTimePerHourMs Float64 `json:"exponential_average_bucket_processing_time_per_hour_ms"` JobId string `json:"job_id"` MaximumBucketProcessingTimeMs Float64 `json:"maximum_bucket_processing_time_ms,omitempty"` MinimumBucketProcessingTimeMs Float64 `json:"minimum_bucket_processing_time_ms,omitempty"` TotalBucketProcessingTimeMs Float64 `json:"total_bucket_processing_time_ms"` }
JobTimingStats type.
func NewJobTimingStats ¶
func NewJobTimingStats() *JobTimingStats
NewJobTimingStats returns a JobTimingStats.
type JobUsage ¶
type JobUsage struct { Count int `json:"count"` CreatedBy map[string]int64 `json:"created_by"` Detectors JobStatistics `json:"detectors"` Forecasts MlJobForecasts `json:"forecasts"` ModelSize JobStatistics `json:"model_size"` }
JobUsage type.
type JobsRecord ¶
type JobsRecord struct { // AssignmentExplanation why the job is or is not assigned to a node AssignmentExplanation *string `json:"assignment_explanation,omitempty"` // BucketsCount bucket count BucketsCount *string `json:"buckets.count,omitempty"` // BucketsTimeExpAvg exponential average bucket processing time (milliseconds) BucketsTimeExpAvg *string `json:"buckets.time.exp_avg,omitempty"` // BucketsTimeExpAvgHour exponential average bucket processing time by hour (milliseconds) BucketsTimeExpAvgHour *string `json:"buckets.time.exp_avg_hour,omitempty"` // BucketsTimeMax maximum bucket processing time BucketsTimeMax *string `json:"buckets.time.max,omitempty"` // BucketsTimeMin minimum bucket processing time BucketsTimeMin *string `json:"buckets.time.min,omitempty"` // BucketsTimeTotal total bucket processing time BucketsTimeTotal *string `json:"buckets.time.total,omitempty"` // DataBuckets total bucket count DataBuckets *string `json:"data.buckets,omitempty"` // DataEarliestRecord earliest record time DataEarliestRecord *string `json:"data.earliest_record,omitempty"` // DataEmptyBuckets number of empty buckets DataEmptyBuckets *string `json:"data.empty_buckets,omitempty"` // DataInputBytes total input bytes DataInputBytes ByteSize `json:"data.input_bytes,omitempty"` // DataInputFields total field count DataInputFields *string `json:"data.input_fields,omitempty"` // DataInputRecords total record count DataInputRecords *string `json:"data.input_records,omitempty"` // DataInvalidDates number of records with invalid dates DataInvalidDates *string `json:"data.invalid_dates,omitempty"` // DataLast last time data was seen DataLast *string `json:"data.last,omitempty"` // DataLastEmptyBucket last time an empty bucket occurred DataLastEmptyBucket *string `json:"data.last_empty_bucket,omitempty"` // DataLastSparseBucket last time a sparse bucket occurred DataLastSparseBucket *string `json:"data.last_sparse_bucket,omitempty"` // DataLatestRecord latest record time DataLatestRecord *string `json:"data.latest_record,omitempty"` // DataMissingFields number of records with missing fields DataMissingFields *string `json:"data.missing_fields,omitempty"` // DataOutOfOrderTimestamps number of records handled out of order DataOutOfOrderTimestamps *string `json:"data.out_of_order_timestamps,omitempty"` // DataProcessedFields number of processed fields DataProcessedFields *string `json:"data.processed_fields,omitempty"` // DataProcessedRecords number of processed records DataProcessedRecords *string `json:"data.processed_records,omitempty"` // DataSparseBuckets number of sparse buckets DataSparseBuckets *string `json:"data.sparse_buckets,omitempty"` // ForecastsMemoryAvg average memory used by forecasts ForecastsMemoryAvg *string `json:"forecasts.memory.avg,omitempty"` // ForecastsMemoryMax maximum memory used by forecasts ForecastsMemoryMax *string `json:"forecasts.memory.max,omitempty"` // ForecastsMemoryMin minimum memory used by forecasts ForecastsMemoryMin *string `json:"forecasts.memory.min,omitempty"` // ForecastsMemoryTotal total memory used by all forecasts ForecastsMemoryTotal *string `json:"forecasts.memory.total,omitempty"` // ForecastsRecordsAvg average record count for forecasts ForecastsRecordsAvg *string `json:"forecasts.records.avg,omitempty"` // ForecastsRecordsMax maximum record count for forecasts ForecastsRecordsMax *string `json:"forecasts.records.max,omitempty"` // ForecastsRecordsMin minimum record count for forecasts ForecastsRecordsMin *string `json:"forecasts.records.min,omitempty"` // ForecastsRecordsTotal total record count for all forecasts ForecastsRecordsTotal *string `json:"forecasts.records.total,omitempty"` // ForecastsTimeAvg average runtime for all forecasts (milliseconds) ForecastsTimeAvg *string `json:"forecasts.time.avg,omitempty"` // ForecastsTimeMax maximum run time for forecasts ForecastsTimeMax *string `json:"forecasts.time.max,omitempty"` // ForecastsTimeMin minimum runtime for forecasts ForecastsTimeMin *string `json:"forecasts.time.min,omitempty"` // ForecastsTimeTotal total runtime for all forecasts ForecastsTimeTotal *string `json:"forecasts.time.total,omitempty"` // ForecastsTotal total number of forecasts ForecastsTotal *string `json:"forecasts.total,omitempty"` // Id the job_id Id *string `json:"id,omitempty"` // ModelBucketAllocationFailures number of bucket allocation failures ModelBucketAllocationFailures *string `json:"model.bucket_allocation_failures,omitempty"` // ModelByFields count of 'by' fields ModelByFields *string `json:"model.by_fields,omitempty"` // ModelBytes model size ModelBytes ByteSize `json:"model.bytes,omitempty"` // ModelBytesExceeded how much the model has exceeded the limit ModelBytesExceeded ByteSize `json:"model.bytes_exceeded,omitempty"` // ModelCategorizationStatus current categorization status ModelCategorizationStatus *categorizationstatus.CategorizationStatus `json:"model.categorization_status,omitempty"` // ModelCategorizedDocCount count of categorized documents ModelCategorizedDocCount *string `json:"model.categorized_doc_count,omitempty"` // ModelDeadCategoryCount count of dead categories ModelDeadCategoryCount *string `json:"model.dead_category_count,omitempty"` // ModelFailedCategoryCount count of failed categories ModelFailedCategoryCount *string `json:"model.failed_category_count,omitempty"` // ModelFrequentCategoryCount count of frequent categories ModelFrequentCategoryCount *string `json:"model.frequent_category_count,omitempty"` // ModelLogTime when the model stats were gathered ModelLogTime *string `json:"model.log_time,omitempty"` // ModelMemoryLimit model memory limit ModelMemoryLimit *string `json:"model.memory_limit,omitempty"` // ModelMemoryStatus current memory status ModelMemoryStatus *memorystatus.MemoryStatus `json:"model.memory_status,omitempty"` // ModelOverFields count of 'over' fields ModelOverFields *string `json:"model.over_fields,omitempty"` // ModelPartitionFields count of 'partition' fields ModelPartitionFields *string `json:"model.partition_fields,omitempty"` // ModelRareCategoryCount count of rare categories ModelRareCategoryCount *string `json:"model.rare_category_count,omitempty"` // ModelTimestamp the time of the last record when the model stats were gathered ModelTimestamp *string `json:"model.timestamp,omitempty"` // ModelTotalCategoryCount count of categories ModelTotalCategoryCount *string `json:"model.total_category_count,omitempty"` // NodeAddress network address of the assigned node NodeAddress *string `json:"node.address,omitempty"` // NodeEphemeralId ephemeral id of the assigned node NodeEphemeralId *string `json:"node.ephemeral_id,omitempty"` // NodeId id of the assigned node NodeId *string `json:"node.id,omitempty"` // NodeName name of the assigned node NodeName *string `json:"node.name,omitempty"` // OpenedTime the amount of time the job has been opened OpenedTime *string `json:"opened_time,omitempty"` // State the job state State *jobstate.JobState `json:"state,omitempty"` }
JobsRecord type.
type JoinProcessor ¶
type JoinProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Separator string `json:"separator"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
JoinProcessor type.
func NewJoinProcessor ¶
func NewJoinProcessor() *JoinProcessor
NewJoinProcessor returns a JoinProcessor.
type JoinProperty ¶
type JoinProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` EagerGlobalOrdinals *bool `json:"eager_global_ordinals,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Relations map[string][]string `json:"relations,omitempty"` Type string `json:"type,omitempty"` }
JoinProperty type.
func NewJoinProperty ¶
func NewJoinProperty() *JoinProperty
NewJoinProperty returns a JoinProperty.
func (*JoinProperty) UnmarshalJSON ¶
func (s *JoinProperty) UnmarshalJSON(data []byte) error
type JsonProcessor ¶
type JsonProcessor struct { AddToRoot *bool `json:"add_to_root,omitempty"` AddToRootConflictStrategy *jsonprocessorconflictstrategy.JsonProcessorConflictStrategy `json:"add_to_root_conflict_strategy,omitempty"` AllowDuplicateKeys *bool `json:"allow_duplicate_keys,omitempty"` Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
JsonProcessor type.
func NewJsonProcessor ¶
func NewJsonProcessor() *JsonProcessor
NewJsonProcessor returns a JsonProcessor.
type Jvm ¶
type Jvm struct { BufferPools map[string]NodeBufferPool `json:"buffer_pools,omitempty"` Classes *JvmClasses `json:"classes,omitempty"` Gc *GarbageCollector `json:"gc,omitempty"` Mem *JvmMemoryStats `json:"mem,omitempty"` Threads *JvmThreads `json:"threads,omitempty"` Timestamp *int64 `json:"timestamp,omitempty"` Uptime *string `json:"uptime,omitempty"` UptimeInMillis *int64 `json:"uptime_in_millis,omitempty"` }
Jvm type.
type JvmClasses ¶
type JvmClasses struct { CurrentLoadedCount *int64 `json:"current_loaded_count,omitempty"` TotalLoadedCount *int64 `json:"total_loaded_count,omitempty"` TotalUnloadedCount *int64 `json:"total_unloaded_count,omitempty"` }
JvmClasses type.
type JvmMemoryStats ¶
type JvmMemoryStats struct { HeapCommittedInBytes *int64 `json:"heap_committed_in_bytes,omitempty"` HeapMaxInBytes *int64 `json:"heap_max_in_bytes,omitempty"` HeapUsedInBytes *int64 `json:"heap_used_in_bytes,omitempty"` HeapUsedPercent *int64 `json:"heap_used_percent,omitempty"` NonHeapCommittedInBytes *int64 `json:"non_heap_committed_in_bytes,omitempty"` NonHeapUsedInBytes *int64 `json:"non_heap_used_in_bytes,omitempty"` Pools map[string]Pool `json:"pools,omitempty"` }
JvmMemoryStats type.
func NewJvmMemoryStats ¶
func NewJvmMemoryStats() *JvmMemoryStats
NewJvmMemoryStats returns a JvmMemoryStats.
type JvmStats ¶
type JvmStats struct { // HeapMax Maximum amount of memory available for use by the heap. HeapMax ByteSize `json:"heap_max,omitempty"` // HeapMaxInBytes Maximum amount of memory, in bytes, available for use by the heap. HeapMaxInBytes int `json:"heap_max_in_bytes"` // JavaInference Amount of Java heap currently being used for caching inference models. JavaInference ByteSize `json:"java_inference,omitempty"` // JavaInferenceInBytes Amount of Java heap, in bytes, currently being used for caching inference // models. JavaInferenceInBytes int `json:"java_inference_in_bytes"` // JavaInferenceMax Maximum amount of Java heap to be used for caching inference models. JavaInferenceMax ByteSize `json:"java_inference_max,omitempty"` // JavaInferenceMaxInBytes Maximum amount of Java heap, in bytes, to be used for caching inference // models. JavaInferenceMaxInBytes int `json:"java_inference_max_in_bytes"` }
JvmStats type.
type JvmThreads ¶
type JvmThreads struct { Count *int64 `json:"count,omitempty"` PeakCount *int64 `json:"peak_count,omitempty"` }
JvmThreads type.
type KStemTokenFilter ¶
type KStemTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
KStemTokenFilter type.
func NewKStemTokenFilter ¶
func NewKStemTokenFilter() *KStemTokenFilter
NewKStemTokenFilter returns a KStemTokenFilter.
type KeepTypesTokenFilter ¶
type KeepTypesTokenFilter struct { Mode *keeptypesmode.KeepTypesMode `json:"mode,omitempty"` Type string `json:"type,omitempty"` Types []string `json:"types,omitempty"` Version *string `json:"version,omitempty"` }
KeepTypesTokenFilter type.
func NewKeepTypesTokenFilter ¶
func NewKeepTypesTokenFilter() *KeepTypesTokenFilter
NewKeepTypesTokenFilter returns a KeepTypesTokenFilter.
type KeepWordsTokenFilter ¶
type KeepWordsTokenFilter struct { KeepWords []string `json:"keep_words,omitempty"` KeepWordsCase *bool `json:"keep_words_case,omitempty"` KeepWordsPath *string `json:"keep_words_path,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
KeepWordsTokenFilter type.
func NewKeepWordsTokenFilter ¶
func NewKeepWordsTokenFilter() *KeepWordsTokenFilter
NewKeepWordsTokenFilter returns a KeepWordsTokenFilter.
type KeyValueProcessor ¶
type KeyValueProcessor struct { Description *string `json:"description,omitempty"` ExcludeKeys []string `json:"exclude_keys,omitempty"` Field string `json:"field"` FieldSplit string `json:"field_split"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` IncludeKeys []string `json:"include_keys,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Prefix *string `json:"prefix,omitempty"` StripBrackets *bool `json:"strip_brackets,omitempty"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` TrimKey *string `json:"trim_key,omitempty"` TrimValue *string `json:"trim_value,omitempty"` ValueSplit string `json:"value_split"` }
KeyValueProcessor type.
func NewKeyValueProcessor ¶
func NewKeyValueProcessor() *KeyValueProcessor
NewKeyValueProcessor returns a KeyValueProcessor.
type KeyedPercentiles ¶
KeyedPercentiles type alias.
type KeyedProcessor ¶
type KeyedProcessor struct { Stats *Processor `json:"stats,omitempty"` Type *string `json:"type,omitempty"` }
KeyedProcessor type.
func NewKeyedProcessor ¶
func NewKeyedProcessor() *KeyedProcessor
NewKeyedProcessor returns a KeyedProcessor.
type KeywordAnalyzer ¶
type KeywordAnalyzer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
KeywordAnalyzer type.
func NewKeywordAnalyzer ¶
func NewKeywordAnalyzer() *KeywordAnalyzer
NewKeywordAnalyzer returns a KeywordAnalyzer.
type KeywordMarkerTokenFilter ¶
type KeywordMarkerTokenFilter struct { IgnoreCase *bool `json:"ignore_case,omitempty"` Keywords []string `json:"keywords,omitempty"` KeywordsPath *string `json:"keywords_path,omitempty"` KeywordsPattern *string `json:"keywords_pattern,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
KeywordMarkerTokenFilter type.
func NewKeywordMarkerTokenFilter ¶
func NewKeywordMarkerTokenFilter() *KeywordMarkerTokenFilter
NewKeywordMarkerTokenFilter returns a KeywordMarkerTokenFilter.
type KeywordProperty ¶
type KeywordProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` EagerGlobalOrdinals *bool `json:"eager_global_ordinals,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` Index *bool `json:"index,omitempty"` IndexOptions *indexoptions.IndexOptions `json:"index_options,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Normalizer *string `json:"normalizer,omitempty"` Norms *bool `json:"norms,omitempty"` NullValue *string `json:"null_value,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` SplitQueriesOnWhitespace *bool `json:"split_queries_on_whitespace,omitempty"` Store *bool `json:"store,omitempty"` // TimeSeriesDimension For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesDimension *bool `json:"time_series_dimension,omitempty"` Type string `json:"type,omitempty"` }
KeywordProperty type.
func NewKeywordProperty ¶
func NewKeywordProperty() *KeywordProperty
NewKeywordProperty returns a KeywordProperty.
func (*KeywordProperty) UnmarshalJSON ¶
func (s *KeywordProperty) UnmarshalJSON(data []byte) error
type KeywordTokenizer ¶
type KeywordTokenizer struct { BufferSize int `json:"buffer_size"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
KeywordTokenizer type.
func NewKeywordTokenizer ¶
func NewKeywordTokenizer() *KeywordTokenizer
NewKeywordTokenizer returns a KeywordTokenizer.
type KibanaToken ¶
KibanaToken type.
type KnnQuery ¶
type KnnQuery struct { // Boost Boost value to apply to kNN scores Boost *float32 `json:"boost,omitempty"` // Field The name of the vector field to search against Field string `json:"field"` // Filter Filters for the kNN search query Filter []Query `json:"filter,omitempty"` // K The final number of nearest neighbors to return as top hits K int64 `json:"k"` // NumCandidates The number of nearest neighbor candidates to consider per shard NumCandidates int64 `json:"num_candidates"` // QueryVector The query vector QueryVector []float32 `json:"query_vector,omitempty"` // QueryVectorBuilder The query vector builder. You must provide a query_vector_builder or // query_vector, but not both. QueryVectorBuilder *QueryVectorBuilder `json:"query_vector_builder,omitempty"` }
KnnQuery type.
type KuromojiAnalyzer ¶
type KuromojiAnalyzer struct { Mode kuromojitokenizationmode.KuromojiTokenizationMode `json:"mode"` Type string `json:"type,omitempty"` UserDictionary *string `json:"user_dictionary,omitempty"` }
KuromojiAnalyzer type.
func NewKuromojiAnalyzer ¶
func NewKuromojiAnalyzer() *KuromojiAnalyzer
NewKuromojiAnalyzer returns a KuromojiAnalyzer.
type KuromojiIterationMarkCharFilter ¶
type KuromojiIterationMarkCharFilter struct { NormalizeKana bool `json:"normalize_kana"` NormalizeKanji bool `json:"normalize_kanji"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
KuromojiIterationMarkCharFilter type.
func NewKuromojiIterationMarkCharFilter ¶
func NewKuromojiIterationMarkCharFilter() *KuromojiIterationMarkCharFilter
NewKuromojiIterationMarkCharFilter returns a KuromojiIterationMarkCharFilter.
type KuromojiPartOfSpeechTokenFilter ¶
type KuromojiPartOfSpeechTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
KuromojiPartOfSpeechTokenFilter type.
func NewKuromojiPartOfSpeechTokenFilter ¶
func NewKuromojiPartOfSpeechTokenFilter() *KuromojiPartOfSpeechTokenFilter
NewKuromojiPartOfSpeechTokenFilter returns a KuromojiPartOfSpeechTokenFilter.
type KuromojiReadingFormTokenFilter ¶
type KuromojiReadingFormTokenFilter struct { Type string `json:"type,omitempty"` UseRomaji bool `json:"use_romaji"` Version *string `json:"version,omitempty"` }
KuromojiReadingFormTokenFilter type.
func NewKuromojiReadingFormTokenFilter ¶
func NewKuromojiReadingFormTokenFilter() *KuromojiReadingFormTokenFilter
NewKuromojiReadingFormTokenFilter returns a KuromojiReadingFormTokenFilter.
type KuromojiStemmerTokenFilter ¶
type KuromojiStemmerTokenFilter struct { MinimumLength int `json:"minimum_length"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
KuromojiStemmerTokenFilter type.
func NewKuromojiStemmerTokenFilter ¶
func NewKuromojiStemmerTokenFilter() *KuromojiStemmerTokenFilter
NewKuromojiStemmerTokenFilter returns a KuromojiStemmerTokenFilter.
type KuromojiTokenizer ¶
type KuromojiTokenizer struct { DiscardCompoundToken *bool `json:"discard_compound_token,omitempty"` DiscardPunctuation *bool `json:"discard_punctuation,omitempty"` Mode kuromojitokenizationmode.KuromojiTokenizationMode `json:"mode"` NbestCost *int `json:"nbest_cost,omitempty"` NbestExamples *string `json:"nbest_examples,omitempty"` Type string `json:"type,omitempty"` UserDictionary *string `json:"user_dictionary,omitempty"` UserDictionaryRules []string `json:"user_dictionary_rules,omitempty"` Version *string `json:"version,omitempty"` }
KuromojiTokenizer type.
func NewKuromojiTokenizer ¶
func NewKuromojiTokenizer() *KuromojiTokenizer
NewKuromojiTokenizer returns a KuromojiTokenizer.
type LanguageAnalyzer ¶
type LanguageAnalyzer struct { Language language.Language `json:"language"` StemExclusion []string `json:"stem_exclusion"` Stopwords []string `json:"stopwords,omitempty"` StopwordsPath *string `json:"stopwords_path,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
LanguageAnalyzer type.
func NewLanguageAnalyzer ¶
func NewLanguageAnalyzer() *LanguageAnalyzer
NewLanguageAnalyzer returns a LanguageAnalyzer.
type LanguageContext ¶
type LanguageContext struct { Contexts []string `json:"contexts"` Language scriptlanguage.ScriptLanguage `json:"language"` }
LanguageContext type.
func NewLanguageContext ¶
func NewLanguageContext() *LanguageContext
NewLanguageContext returns a LanguageContext.
type LaplaceSmoothingModel ¶
type LaplaceSmoothingModel struct {
Alpha Float64 `json:"alpha"`
}
LaplaceSmoothingModel type.
func NewLaplaceSmoothingModel ¶
func NewLaplaceSmoothingModel() *LaplaceSmoothingModel
NewLaplaceSmoothingModel returns a LaplaceSmoothingModel.
type LatLonGeoLocation ¶
LatLonGeoLocation type.
func NewLatLonGeoLocation ¶
func NewLatLonGeoLocation() *LatLonGeoLocation
NewLatLonGeoLocation returns a LatLonGeoLocation.
type Latest ¶
type Latest struct { // Sort Specifies the date field that is used to identify the latest documents. Sort string `json:"sort"` // UniqueKey Specifies an array of one or more fields that are used to group the data. UniqueKey []string `json:"unique_key"` }
Latest type.
type LengthTokenFilter ¶
type LengthTokenFilter struct { Max *int `json:"max,omitempty"` Min *int `json:"min,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
LengthTokenFilter type.
func NewLengthTokenFilter ¶
func NewLengthTokenFilter() *LengthTokenFilter
NewLengthTokenFilter returns a LengthTokenFilter.
type LetterTokenizer ¶
type LetterTokenizer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
LetterTokenizer type.
func NewLetterTokenizer ¶
func NewLetterTokenizer() *LetterTokenizer
NewLetterTokenizer returns a LetterTokenizer.
type License ¶
type License struct { ExpiryDateInMillis int64 `json:"expiry_date_in_millis"` IssueDateInMillis int64 `json:"issue_date_in_millis"` IssuedTo string `json:"issued_to"` Issuer string `json:"issuer"` MaxNodes int64 `json:"max_nodes,omitempty"` MaxResourceUnits *int64 `json:"max_resource_units,omitempty"` Signature string `json:"signature"` StartDateInMillis *int64 `json:"start_date_in_millis,omitempty"` Type licensetype.LicenseType `json:"type"` Uid string `json:"uid"` }
License type.
type LicenseInformation ¶
type LicenseInformation struct { ExpiryDate DateTime `json:"expiry_date,omitempty"` ExpiryDateInMillis *int64 `json:"expiry_date_in_millis,omitempty"` IssueDate DateTime `json:"issue_date"` IssueDateInMillis int64 `json:"issue_date_in_millis"` IssuedTo string `json:"issued_to"` Issuer string `json:"issuer"` MaxNodes int64 `json:"max_nodes,omitempty"` MaxResourceUnits int `json:"max_resource_units,omitempty"` StartDateInMillis int64 `json:"start_date_in_millis"` Status licensestatus.LicenseStatus `json:"status"` Type licensetype.LicenseType `json:"type"` Uid string `json:"uid"` }
LicenseInformation type.
func NewLicenseInformation ¶
func NewLicenseInformation() *LicenseInformation
NewLicenseInformation returns a LicenseInformation.
type Lifecycle ¶
type Lifecycle struct { ModifiedDate DateTime `json:"modified_date"` Policy IlmPolicy `json:"policy"` Version int64 `json:"version"` }
Lifecycle type.
type LifecycleExplain ¶
type LifecycleExplain interface{}
LifecycleExplain holds the union for the following types:
LifecycleExplainManaged LifecycleExplainUnmanaged
type LifecycleExplainManaged ¶
type LifecycleExplainManaged struct { Action *string `json:"action,omitempty"` ActionTime DateTime `json:"action_time,omitempty"` ActionTimeMillis *int64 `json:"action_time_millis,omitempty"` Age Duration `json:"age,omitempty"` FailedStep *string `json:"failed_step,omitempty"` FailedStepRetryCount *int `json:"failed_step_retry_count,omitempty"` Index *string `json:"index,omitempty"` IndexCreationDate DateTime `json:"index_creation_date,omitempty"` IndexCreationDateMillis *int64 `json:"index_creation_date_millis,omitempty"` IsAutoRetryableError *bool `json:"is_auto_retryable_error,omitempty"` LifecycleDate DateTime `json:"lifecycle_date,omitempty"` LifecycleDateMillis *int64 `json:"lifecycle_date_millis,omitempty"` Managed bool `json:"managed,omitempty"` Phase string `json:"phase"` PhaseExecution *LifecycleExplainPhaseExecution `json:"phase_execution,omitempty"` PhaseTime DateTime `json:"phase_time,omitempty"` PhaseTimeMillis *int64 `json:"phase_time_millis,omitempty"` Policy string `json:"policy"` Step *string `json:"step,omitempty"` StepInfo map[string]json.RawMessage `json:"step_info,omitempty"` StepTime DateTime `json:"step_time,omitempty"` StepTimeMillis *int64 `json:"step_time_millis,omitempty"` TimeSinceIndexCreation Duration `json:"time_since_index_creation,omitempty"` }
LifecycleExplainManaged type.
func NewLifecycleExplainManaged ¶
func NewLifecycleExplainManaged() *LifecycleExplainManaged
NewLifecycleExplainManaged returns a LifecycleExplainManaged.
type LifecycleExplainPhaseExecution ¶
type LifecycleExplainPhaseExecution struct { ModifiedDateInMillis int64 `json:"modified_date_in_millis"` Policy string `json:"policy"` Version int64 `json:"version"` }
LifecycleExplainPhaseExecution type.
func NewLifecycleExplainPhaseExecution ¶
func NewLifecycleExplainPhaseExecution() *LifecycleExplainPhaseExecution
NewLifecycleExplainPhaseExecution returns a LifecycleExplainPhaseExecution.
type LifecycleExplainUnmanaged ¶
type LifecycleExplainUnmanaged struct { Index string `json:"index"` Managed bool `json:"managed,omitempty"` }
LifecycleExplainUnmanaged type.
func NewLifecycleExplainUnmanaged ¶
func NewLifecycleExplainUnmanaged() *LifecycleExplainUnmanaged
NewLifecycleExplainUnmanaged returns a LifecycleExplainUnmanaged.
type Like ¶
type Like interface{}
Like holds the union for the following types:
string LikeDocument
type LikeDocument ¶
type LikeDocument struct { Doc json.RawMessage `json:"doc,omitempty"` Fields []string `json:"fields,omitempty"` Id_ *string `json:"_id,omitempty"` Index_ *string `json:"_index,omitempty"` PerFieldAnalyzer map[string]string `json:"per_field_analyzer,omitempty"` Routing *string `json:"routing,omitempty"` Version *int64 `json:"version,omitempty"` VersionType *versiontype.VersionType `json:"version_type,omitempty"` }
LikeDocument type.
func NewLikeDocument ¶
func NewLikeDocument() *LikeDocument
NewLikeDocument returns a LikeDocument.
type LimitTokenCountTokenFilter ¶
type LimitTokenCountTokenFilter struct { ConsumeAllTokens *bool `json:"consume_all_tokens,omitempty"` MaxTokenCount *int `json:"max_token_count,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
LimitTokenCountTokenFilter type.
func NewLimitTokenCountTokenFilter ¶
func NewLimitTokenCountTokenFilter() *LimitTokenCountTokenFilter
NewLimitTokenCountTokenFilter returns a LimitTokenCountTokenFilter.
type Limits ¶
type Limits struct { EffectiveMaxModelMemoryLimit string `json:"effective_max_model_memory_limit"` MaxModelMemoryLimit *string `json:"max_model_memory_limit,omitempty"` TotalMlMemory string `json:"total_ml_memory"` }
Limits type.
type LinearInterpolationSmoothingModel ¶
type LinearInterpolationSmoothingModel struct { BigramLambda Float64 `json:"bigram_lambda"` TrigramLambda Float64 `json:"trigram_lambda"` UnigramLambda Float64 `json:"unigram_lambda"` }
LinearInterpolationSmoothingModel type.
func NewLinearInterpolationSmoothingModel ¶
func NewLinearInterpolationSmoothingModel() *LinearInterpolationSmoothingModel
NewLinearInterpolationSmoothingModel returns a LinearInterpolationSmoothingModel.
type LinearMovingAverageAggregation ¶
type LinearMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Minimize *bool `json:"minimize,omitempty"` Model string `json:"model,omitempty"` Name *string `json:"name,omitempty"` Predict *int `json:"predict,omitempty"` Settings EmptyObject `json:"settings"` Window *int `json:"window,omitempty"` }
LinearMovingAverageAggregation type.
func NewLinearMovingAverageAggregation ¶
func NewLinearMovingAverageAggregation() *LinearMovingAverageAggregation
NewLinearMovingAverageAggregation returns a LinearMovingAverageAggregation.
func (*LinearMovingAverageAggregation) UnmarshalJSON ¶
func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error
type LoggingAction ¶
type LoggingAction struct { Category *string `json:"category,omitempty"` Level *string `json:"level,omitempty"` Text string `json:"text"` }
LoggingAction type.
func NewLoggingAction ¶
func NewLoggingAction() *LoggingAction
NewLoggingAction returns a LoggingAction.
type LoggingResult ¶
type LoggingResult struct {
LoggedText string `json:"logged_text"`
}
LoggingResult type.
func NewLoggingResult ¶
func NewLoggingResult() *LoggingResult
NewLoggingResult returns a LoggingResult.
type LogstashPipeline ¶
type LogstashPipeline struct { Description string `json:"description"` LastModified DateTime `json:"last_modified"` Pipeline string `json:"pipeline"` PipelineMetadata PipelineMetadata `json:"pipeline_metadata"` PipelineSettings PipelineSettings `json:"pipeline_settings"` Username string `json:"username"` }
LogstashPipeline type.
func NewLogstashPipeline ¶
func NewLogstashPipeline() *LogstashPipeline
NewLogstashPipeline returns a LogstashPipeline.
type LongNumberProperty ¶
type LongNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue *int64 `json:"null_value,omitempty"` OnScriptError *onscripterror.OnScriptError `json:"on_script_error,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Script Script `json:"script,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` // TimeSeriesDimension For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesDimension *bool `json:"time_series_dimension,omitempty"` // TimeSeriesMetric For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesMetric *timeseriesmetrictype.TimeSeriesMetricType `json:"time_series_metric,omitempty"` Type string `json:"type,omitempty"` }
LongNumberProperty type.
func NewLongNumberProperty ¶
func NewLongNumberProperty() *LongNumberProperty
NewLongNumberProperty returns a LongNumberProperty.
func (*LongNumberProperty) UnmarshalJSON ¶
func (s *LongNumberProperty) UnmarshalJSON(data []byte) error
type LongRangeProperty ¶
type LongRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
LongRangeProperty type.
func NewLongRangeProperty ¶
func NewLongRangeProperty() *LongRangeProperty
NewLongRangeProperty returns a LongRangeProperty.
func (*LongRangeProperty) UnmarshalJSON ¶
func (s *LongRangeProperty) UnmarshalJSON(data []byte) error
type LongRareTermsAggregate ¶
type LongRareTermsAggregate struct { Buckets BucketsLongRareTermsBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
LongRareTermsAggregate type.
func NewLongRareTermsAggregate ¶
func NewLongRareTermsAggregate() *LongRareTermsAggregate
NewLongRareTermsAggregate returns a LongRareTermsAggregate.
func (*LongRareTermsAggregate) UnmarshalJSON ¶
func (s *LongRareTermsAggregate) UnmarshalJSON(data []byte) error
type LongRareTermsBucket ¶
type LongRareTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Key int64 `json:"key"` KeyAsString *string `json:"key_as_string,omitempty"` }
LongRareTermsBucket type.
func NewLongRareTermsBucket ¶
func NewLongRareTermsBucket() *LongRareTermsBucket
NewLongRareTermsBucket returns a LongRareTermsBucket.
func (LongRareTermsBucket) MarshalJSON ¶
func (s LongRareTermsBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*LongRareTermsBucket) UnmarshalJSON ¶
func (s *LongRareTermsBucket) UnmarshalJSON(data []byte) error
type LongTermsAggregate ¶
type LongTermsAggregate struct { Buckets BucketsLongTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` }
LongTermsAggregate type.
func NewLongTermsAggregate ¶
func NewLongTermsAggregate() *LongTermsAggregate
NewLongTermsAggregate returns a LongTermsAggregate.
func (*LongTermsAggregate) UnmarshalJSON ¶
func (s *LongTermsAggregate) UnmarshalJSON(data []byte) error
type LongTermsBucket ¶
type LongTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` DocCountError *int64 `json:"doc_count_error,omitempty"` Key int64 `json:"key"` KeyAsString *string `json:"key_as_string,omitempty"` }
LongTermsBucket type.
func NewLongTermsBucket ¶
func NewLongTermsBucket() *LongTermsBucket
NewLongTermsBucket returns a LongTermsBucket.
func (LongTermsBucket) MarshalJSON ¶
func (s LongTermsBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*LongTermsBucket) UnmarshalJSON ¶
func (s *LongTermsBucket) UnmarshalJSON(data []byte) error
type LowercaseNormalizer ¶
type LowercaseNormalizer struct {
Type string `json:"type,omitempty"`
}
LowercaseNormalizer type.
func NewLowercaseNormalizer ¶
func NewLowercaseNormalizer() *LowercaseNormalizer
NewLowercaseNormalizer returns a LowercaseNormalizer.
type LowercaseProcessor ¶
type LowercaseProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
LowercaseProcessor type.
func NewLowercaseProcessor ¶
func NewLowercaseProcessor() *LowercaseProcessor
NewLowercaseProcessor returns a LowercaseProcessor.
type LowercaseTokenFilter ¶
type LowercaseTokenFilter struct { Language *string `json:"language,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
LowercaseTokenFilter type.
func NewLowercaseTokenFilter ¶
func NewLowercaseTokenFilter() *LowercaseTokenFilter
NewLowercaseTokenFilter returns a LowercaseTokenFilter.
type LowercaseTokenizer ¶
type LowercaseTokenizer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
LowercaseTokenizer type.
func NewLowercaseTokenizer ¶
func NewLowercaseTokenizer() *LowercaseTokenizer
NewLowercaseTokenizer returns a LowercaseTokenizer.
type MLDatafeed ¶
type MLDatafeed struct { Aggregations map[string]Aggregations `json:"aggregations,omitempty"` // Authorization The security privileges that the datafeed uses to run its queries. If Elastic // Stack security features were disabled at the time of the most recent update // to the datafeed, this property is omitted. Authorization *DatafeedAuthorization `json:"authorization,omitempty"` ChunkingConfig *ChunkingConfig `json:"chunking_config,omitempty"` DatafeedId string `json:"datafeed_id"` DelayedDataCheckConfig DelayedDataCheckConfig `json:"delayed_data_check_config"` Frequency Duration `json:"frequency,omitempty"` Indexes []string `json:"indexes,omitempty"` Indices []string `json:"indices"` IndicesOptions *IndicesOptions `json:"indices_options,omitempty"` JobId string `json:"job_id"` MaxEmptySearches *int `json:"max_empty_searches,omitempty"` Query Query `json:"query"` QueryDelay Duration `json:"query_delay,omitempty"` RuntimeMappings map[string]RuntimeField `json:"runtime_mappings,omitempty"` ScriptFields map[string]ScriptField `json:"script_fields,omitempty"` ScrollSize *int `json:"scroll_size,omitempty"` }
MLDatafeed type.
type MLFilter ¶
type MLFilter struct { // Description A description of the filter. Description *string `json:"description,omitempty"` // FilterId A string that uniquely identifies a filter. FilterId string `json:"filter_id"` // Items An array of strings which is the filter item list. Items []string `json:"items"` }
MLFilter type.
type MTermVectorsOperation ¶
type MTermVectorsOperation struct { Doc json.RawMessage `json:"doc,omitempty"` FieldStatistics *bool `json:"field_statistics,omitempty"` Fields []string `json:"fields,omitempty"` Filter *TermVectorsFilter `json:"filter,omitempty"` Id_ string `json:"_id"` Index_ *string `json:"_index,omitempty"` Offsets *bool `json:"offsets,omitempty"` Payloads *bool `json:"payloads,omitempty"` Positions *bool `json:"positions,omitempty"` Routing *string `json:"routing,omitempty"` TermStatistics *bool `json:"term_statistics,omitempty"` Version *int64 `json:"version,omitempty"` VersionType *versiontype.VersionType `json:"version_type,omitempty"` }
MTermVectorsOperation type.
func NewMTermVectorsOperation ¶
func NewMTermVectorsOperation() *MTermVectorsOperation
NewMTermVectorsOperation returns a MTermVectorsOperation.
type MachineLearning ¶
type MachineLearning struct { Available bool `json:"available"` DataFrameAnalyticsJobs MlDataFrameAnalyticsJobs `json:"data_frame_analytics_jobs"` Datafeeds map[string]XpackDatafeed `json:"datafeeds"` Enabled bool `json:"enabled"` Inference MlInference `json:"inference"` // Jobs Job usage statistics. The `_all` entry is always present and gathers // statistics for all jobs. Jobs map[string]JobUsage `json:"jobs"` NodeCount int `json:"node_count"` }
MachineLearning type.
func NewMachineLearning ¶
func NewMachineLearning() *MachineLearning
NewMachineLearning returns a MachineLearning.
type ManageUserPrivileges ¶
type ManageUserPrivileges struct {
Applications []string `json:"applications"`
}
ManageUserPrivileges type.
func NewManageUserPrivileges ¶
func NewManageUserPrivileges() *ManageUserPrivileges
NewManageUserPrivileges returns a ManageUserPrivileges.
type MappingCharFilter ¶
type MappingCharFilter struct { Mappings []string `json:"mappings,omitempty"` MappingsPath *string `json:"mappings_path,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
MappingCharFilter type.
func NewMappingCharFilter ¶
func NewMappingCharFilter() *MappingCharFilter
NewMappingCharFilter returns a MappingCharFilter.
type MappingLimitSettings ¶
type MappingLimitSettings struct { Coerce *bool `json:"coerce,omitempty"` Depth *MappingLimitSettingsDepth `json:"depth,omitempty"` DimensionFields *MappingLimitSettingsDimensionFields `json:"dimension_fields,omitempty"` FieldNameLength *MappingLimitSettingsFieldNameLength `json:"field_name_length,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` NestedFields *MappingLimitSettingsNestedFields `json:"nested_fields,omitempty"` NestedObjects *MappingLimitSettingsNestedObjects `json:"nested_objects,omitempty"` TotalFields *MappingLimitSettingsTotalFields `json:"total_fields,omitempty"` }
MappingLimitSettings type.
func NewMappingLimitSettings ¶
func NewMappingLimitSettings() *MappingLimitSettings
NewMappingLimitSettings returns a MappingLimitSettings.
type MappingLimitSettingsDepth ¶
type MappingLimitSettingsDepth struct { // Limit The maximum depth for a field, which is measured as the number of inner // objects. For instance, if all fields are defined // at the root object level, then the depth is 1. If there is one object // mapping, then the depth is 2, etc. Limit *int `json:"limit,omitempty"` }
MappingLimitSettingsDepth type.
func NewMappingLimitSettingsDepth ¶
func NewMappingLimitSettingsDepth() *MappingLimitSettingsDepth
NewMappingLimitSettingsDepth returns a MappingLimitSettingsDepth.
type MappingLimitSettingsDimensionFields ¶
type MappingLimitSettingsDimensionFields struct { // Limit [preview] This functionality is in technical preview and may be changed or // removed in a future release. Elastic will // apply best effort to fix any issues, but features in technical preview are // not subject to the support SLA of official GA features. Limit *int `json:"limit,omitempty"` }
MappingLimitSettingsDimensionFields type.
func NewMappingLimitSettingsDimensionFields ¶
func NewMappingLimitSettingsDimensionFields() *MappingLimitSettingsDimensionFields
NewMappingLimitSettingsDimensionFields returns a MappingLimitSettingsDimensionFields.
type MappingLimitSettingsFieldNameLength ¶
type MappingLimitSettingsFieldNameLength struct { // Limit Setting for the maximum length of a field name. This setting isn’t really // something that addresses mappings explosion but // might still be useful if you want to limit the field length. It usually // shouldn’t be necessary to set this setting. The // default is okay unless a user starts to add a huge number of fields with // really long names. Default is `Long.MAX_VALUE` (no limit). Limit *int64 `json:"limit,omitempty"` }
MappingLimitSettingsFieldNameLength type.
func NewMappingLimitSettingsFieldNameLength ¶
func NewMappingLimitSettingsFieldNameLength() *MappingLimitSettingsFieldNameLength
NewMappingLimitSettingsFieldNameLength returns a MappingLimitSettingsFieldNameLength.
type MappingLimitSettingsNestedFields ¶
type MappingLimitSettingsNestedFields struct { // Limit The maximum number of distinct nested mappings in an index. The nested type // should only be used in special cases, when // arrays of objects need to be queried independently of each other. To // safeguard against poorly designed mappings, this // setting limits the number of unique nested types per index. Limit *int `json:"limit,omitempty"` }
MappingLimitSettingsNestedFields type.
func NewMappingLimitSettingsNestedFields ¶
func NewMappingLimitSettingsNestedFields() *MappingLimitSettingsNestedFields
NewMappingLimitSettingsNestedFields returns a MappingLimitSettingsNestedFields.
type MappingLimitSettingsNestedObjects ¶
type MappingLimitSettingsNestedObjects struct { // Limit The maximum number of nested JSON objects that a single document can contain // across all nested types. This limit helps // to prevent out of memory errors when a document contains too many nested // objects. Limit *int `json:"limit,omitempty"` }
MappingLimitSettingsNestedObjects type.
func NewMappingLimitSettingsNestedObjects ¶
func NewMappingLimitSettingsNestedObjects() *MappingLimitSettingsNestedObjects
NewMappingLimitSettingsNestedObjects returns a MappingLimitSettingsNestedObjects.
type MappingLimitSettingsTotalFields ¶
type MappingLimitSettingsTotalFields struct { // Limit The maximum number of fields in an index. Field and object mappings, as well // as field aliases count towards this limit. // The limit is in place to prevent mappings and searches from becoming too // large. Higher values can lead to performance // degradations and memory issues, especially in clusters with a high load or // few resources. Limit *int `json:"limit,omitempty"` }
MappingLimitSettingsTotalFields type.
func NewMappingLimitSettingsTotalFields ¶
func NewMappingLimitSettingsTotalFields() *MappingLimitSettingsTotalFields
NewMappingLimitSettingsTotalFields returns a MappingLimitSettingsTotalFields.
type MappingStats ¶
type MappingStats struct { TotalCount int64 `json:"total_count"` TotalEstimatedOverhead ByteSize `json:"total_estimated_overhead,omitempty"` TotalEstimatedOverheadInBytes int64 `json:"total_estimated_overhead_in_bytes"` }
MappingStats type.
func NewMappingStats ¶
func NewMappingStats() *MappingStats
NewMappingStats returns a MappingStats.
type MasterRecord ¶
type MasterRecord struct { // Host host name Host *string `json:"host,omitempty"` // Id node id Id *string `json:"id,omitempty"` // Ip ip address Ip *string `json:"ip,omitempty"` // Node node name Node *string `json:"node,omitempty"` }
MasterRecord type.
func NewMasterRecord ¶
func NewMasterRecord() *MasterRecord
NewMasterRecord returns a MasterRecord.
type MatchAllQuery ¶
type MatchAllQuery struct { Boost *float32 `json:"boost,omitempty"` QueryName_ *string `json:"_name,omitempty"` }
MatchAllQuery type.
func NewMatchAllQuery ¶
func NewMatchAllQuery() *MatchAllQuery
NewMatchAllQuery returns a MatchAllQuery.
type MatchBoolPrefixQuery ¶
type MatchBoolPrefixQuery struct { Analyzer *string `json:"analyzer,omitempty"` Boost *float32 `json:"boost,omitempty"` Fuzziness Fuzziness `json:"fuzziness,omitempty"` FuzzyRewrite *string `json:"fuzzy_rewrite,omitempty"` FuzzyTranspositions *bool `json:"fuzzy_transpositions,omitempty"` MaxExpansions *int `json:"max_expansions,omitempty"` MinimumShouldMatch MinimumShouldMatch `json:"minimum_should_match,omitempty"` Operator *operator.Operator `json:"operator,omitempty"` PrefixLength *int `json:"prefix_length,omitempty"` Query string `json:"query"` QueryName_ *string `json:"_name,omitempty"` }
MatchBoolPrefixQuery type.
func NewMatchBoolPrefixQuery ¶
func NewMatchBoolPrefixQuery() *MatchBoolPrefixQuery
NewMatchBoolPrefixQuery returns a MatchBoolPrefixQuery.
type MatchNoneQuery ¶
type MatchNoneQuery struct { Boost *float32 `json:"boost,omitempty"` QueryName_ *string `json:"_name,omitempty"` }
MatchNoneQuery type.
func NewMatchNoneQuery ¶
func NewMatchNoneQuery() *MatchNoneQuery
NewMatchNoneQuery returns a MatchNoneQuery.
type MatchOnlyTextProperty ¶
type MatchOnlyTextProperty struct { // CopyTo Allows you to copy the values of multiple fields into a group // field, which can then be queried as a single field. CopyTo []string `json:"copy_to,omitempty"` // Fields Multi-fields allow the same string value to be indexed in multiple ways for // different purposes, such as one // field for search and a multi-field for sorting and aggregations, or the same // string value analyzed by different analyzers. Fields map[string]Property `json:"fields,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Type string `json:"type,omitempty"` }
MatchOnlyTextProperty type.
func NewMatchOnlyTextProperty ¶
func NewMatchOnlyTextProperty() *MatchOnlyTextProperty
NewMatchOnlyTextProperty returns a MatchOnlyTextProperty.
func (*MatchOnlyTextProperty) UnmarshalJSON ¶
func (s *MatchOnlyTextProperty) UnmarshalJSON(data []byte) error
type MatchPhrasePrefixQuery ¶
type MatchPhrasePrefixQuery struct { Analyzer *string `json:"analyzer,omitempty"` Boost *float32 `json:"boost,omitempty"` MaxExpansions *int `json:"max_expansions,omitempty"` Query string `json:"query"` QueryName_ *string `json:"_name,omitempty"` Slop *int `json:"slop,omitempty"` ZeroTermsQuery *zerotermsquery.ZeroTermsQuery `json:"zero_terms_query,omitempty"` }
MatchPhrasePrefixQuery type.
func NewMatchPhrasePrefixQuery ¶
func NewMatchPhrasePrefixQuery() *MatchPhrasePrefixQuery
NewMatchPhrasePrefixQuery returns a MatchPhrasePrefixQuery.
type MatchPhraseQuery ¶
type MatchPhraseQuery struct { Analyzer *string `json:"analyzer,omitempty"` Boost *float32 `json:"boost,omitempty"` Query string `json:"query"` QueryName_ *string `json:"_name,omitempty"` Slop *int `json:"slop,omitempty"` ZeroTermsQuery *zerotermsquery.ZeroTermsQuery `json:"zero_terms_query,omitempty"` }
MatchPhraseQuery type.
func NewMatchPhraseQuery ¶
func NewMatchPhraseQuery() *MatchPhraseQuery
NewMatchPhraseQuery returns a MatchPhraseQuery.
type MatchQuery ¶
type MatchQuery struct { Analyzer *string `json:"analyzer,omitempty"` AutoGenerateSynonymsPhraseQuery *bool `json:"auto_generate_synonyms_phrase_query,omitempty"` Boost *float32 `json:"boost,omitempty"` CutoffFrequency *Float64 `json:"cutoff_frequency,omitempty"` Fuzziness Fuzziness `json:"fuzziness,omitempty"` FuzzyRewrite *string `json:"fuzzy_rewrite,omitempty"` FuzzyTranspositions *bool `json:"fuzzy_transpositions,omitempty"` Lenient *bool `json:"lenient,omitempty"` MaxExpansions *int `json:"max_expansions,omitempty"` MinimumShouldMatch MinimumShouldMatch `json:"minimum_should_match,omitempty"` Operator *operator.Operator `json:"operator,omitempty"` PrefixLength *int `json:"prefix_length,omitempty"` Query string `json:"query"` QueryName_ *string `json:"_name,omitempty"` ZeroTermsQuery *zerotermsquery.ZeroTermsQuery `json:"zero_terms_query,omitempty"` }
MatchQuery type.
type MatrixAggregation ¶
type MatrixAggregation struct { Fields []string `json:"fields,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Missing map[string]Float64 `json:"missing,omitempty"` Name *string `json:"name,omitempty"` }
MatrixAggregation type.
func NewMatrixAggregation ¶
func NewMatrixAggregation() *MatrixAggregation
NewMatrixAggregation returns a MatrixAggregation.
type MatrixStatsAggregate ¶
type MatrixStatsAggregate struct { DocCount int64 `json:"doc_count"` Fields []MatrixStatsFields `json:"fields,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MatrixStatsAggregate type.
func NewMatrixStatsAggregate ¶
func NewMatrixStatsAggregate() *MatrixStatsAggregate
NewMatrixStatsAggregate returns a MatrixStatsAggregate.
type MatrixStatsAggregation ¶
type MatrixStatsAggregation struct { Fields []string `json:"fields,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Missing map[string]Float64 `json:"missing,omitempty"` Mode *sortmode.SortMode `json:"mode,omitempty"` Name *string `json:"name,omitempty"` }
MatrixStatsAggregation type.
func NewMatrixStatsAggregation ¶
func NewMatrixStatsAggregation() *MatrixStatsAggregation
NewMatrixStatsAggregation returns a MatrixStatsAggregation.
type MatrixStatsFields ¶
type MatrixStatsFields struct { Correlation map[string]Float64 `json:"correlation"` Count int64 `json:"count"` Covariance map[string]Float64 `json:"covariance"` Kurtosis Float64 `json:"kurtosis"` Mean Float64 `json:"mean"` Name string `json:"name"` Skewness Float64 `json:"skewness"` Variance Float64 `json:"variance"` }
MatrixStatsFields type.
func NewMatrixStatsFields ¶
func NewMatrixStatsFields() *MatrixStatsFields
NewMatrixStatsFields returns a MatrixStatsFields.
type MaxAggregate ¶
type MaxAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. Value Float64 `json:"value,omitempty"` ValueAsString *string `json:"value_as_string,omitempty"` }
MaxAggregate type.
func NewMaxAggregate ¶
func NewMaxAggregate() *MaxAggregate
NewMaxAggregate returns a MaxAggregate.
type MaxAggregation ¶
type MaxAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` }
MaxAggregation type.
func NewMaxAggregation ¶
func NewMaxAggregation() *MaxAggregation
NewMaxAggregation returns a MaxAggregation.
type MaxBucketAggregation ¶
type MaxBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` }
MaxBucketAggregation type.
func NewMaxBucketAggregation ¶
func NewMaxBucketAggregation() *MaxBucketAggregation
NewMaxBucketAggregation returns a MaxBucketAggregation.
func (*MaxBucketAggregation) UnmarshalJSON ¶
func (s *MaxBucketAggregation) UnmarshalJSON(data []byte) error
type MedianAbsoluteDeviationAggregate ¶
type MedianAbsoluteDeviationAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. Value Float64 `json:"value,omitempty"` ValueAsString *string `json:"value_as_string,omitempty"` }
MedianAbsoluteDeviationAggregate type.
func NewMedianAbsoluteDeviationAggregate ¶
func NewMedianAbsoluteDeviationAggregate() *MedianAbsoluteDeviationAggregate
NewMedianAbsoluteDeviationAggregate returns a MedianAbsoluteDeviationAggregate.
type MedianAbsoluteDeviationAggregation ¶
type MedianAbsoluteDeviationAggregation struct { Compression *Float64 `json:"compression,omitempty"` Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` }
MedianAbsoluteDeviationAggregation type.
func NewMedianAbsoluteDeviationAggregation ¶
func NewMedianAbsoluteDeviationAggregation() *MedianAbsoluteDeviationAggregation
NewMedianAbsoluteDeviationAggregation returns a MedianAbsoluteDeviationAggregation.
type MemMlStats ¶
type MemMlStats struct { // AnomalyDetectors Amount of native memory set aside for anomaly detection jobs. AnomalyDetectors ByteSize `json:"anomaly_detectors,omitempty"` // AnomalyDetectorsInBytes Amount of native memory, in bytes, set aside for anomaly detection jobs. AnomalyDetectorsInBytes int `json:"anomaly_detectors_in_bytes"` // DataFrameAnalytics Amount of native memory set aside for data frame analytics jobs. DataFrameAnalytics ByteSize `json:"data_frame_analytics,omitempty"` // DataFrameAnalyticsInBytes Amount of native memory, in bytes, set aside for data frame analytics jobs. DataFrameAnalyticsInBytes int `json:"data_frame_analytics_in_bytes"` // Max Maximum amount of native memory (separate to the JVM heap) that may be used // by machine learning native processes. Max ByteSize `json:"max,omitempty"` // MaxInBytes Maximum amount of native memory (separate to the JVM heap), in bytes, that // may be used by machine learning native processes. MaxInBytes int `json:"max_in_bytes"` // NativeCodeOverhead Amount of native memory set aside for loading machine learning native code // shared libraries. NativeCodeOverhead ByteSize `json:"native_code_overhead,omitempty"` // NativeCodeOverheadInBytes Amount of native memory, in bytes, set aside for loading machine learning // native code shared libraries. NativeCodeOverheadInBytes int `json:"native_code_overhead_in_bytes"` // NativeInference Amount of native memory set aside for trained models that have a PyTorch // model_type. NativeInference ByteSize `json:"native_inference,omitempty"` // NativeInferenceInBytes Amount of native memory, in bytes, set aside for trained models that have a // PyTorch model_type. NativeInferenceInBytes int `json:"native_inference_in_bytes"` }
MemMlStats type.
type MemStats ¶
type MemStats struct { // AdjustedTotal If the amount of physical memory has been overridden using the // es.total_memory_bytes system property // then this reports the overridden value. Otherwise it reports the same value // as total. AdjustedTotal ByteSize `json:"adjusted_total,omitempty"` // AdjustedTotalInBytes If the amount of physical memory has been overridden using the // `es.total_memory_bytes` system property // then this reports the overridden value in bytes. Otherwise it reports the // same value as `total_in_bytes`. AdjustedTotalInBytes int `json:"adjusted_total_in_bytes"` // Ml Contains statistics about machine learning use of native memory on the node. Ml MemMlStats `json:"ml"` // Total Total amount of physical memory. Total ByteSize `json:"total,omitempty"` // TotalInBytes Total amount of physical memory in bytes. TotalInBytes int `json:"total_in_bytes"` }
MemStats type.
type Memory ¶
type Memory struct { Attributes map[string]string `json:"attributes"` EphemeralId string `json:"ephemeral_id"` // Jvm Contains Java Virtual Machine (JVM) statistics for the node. Jvm JvmStats `json:"jvm"` // Mem Contains statistics about memory usage for the node. Mem MemStats `json:"mem"` // Name Human-readable identifier for the node. Based on the Node name setting // setting. Name string `json:"name"` // Roles Roles assigned to the node. Roles []string `json:"roles"` // TransportAddress The host and port where transport HTTP connections are accepted. TransportAddress string `json:"transport_address"` }
Memory type.
type MemoryStats ¶
type MemoryStats struct { AdjustedTotalInBytes *int64 `json:"adjusted_total_in_bytes,omitempty"` FreeInBytes *int64 `json:"free_in_bytes,omitempty"` Resident *string `json:"resident,omitempty"` ResidentInBytes *int64 `json:"resident_in_bytes,omitempty"` TotalInBytes *int64 `json:"total_in_bytes,omitempty"` TotalVirtual *string `json:"total_virtual,omitempty"` TotalVirtualInBytes *int64 `json:"total_virtual_in_bytes,omitempty"` UsedInBytes *int64 `json:"used_in_bytes,omitempty"` }
MemoryStats type.
type Merge ¶
type Merge struct {
Scheduler *MergeScheduler `json:"scheduler,omitempty"`
}
Merge type.
type MergeScheduler ¶
type MergeScheduler struct { MaxMergeCount *int `json:"max_merge_count,omitempty"` MaxThreadCount *int `json:"max_thread_count,omitempty"` }
MergeScheduler type.
func NewMergeScheduler ¶
func NewMergeScheduler() *MergeScheduler
NewMergeScheduler returns a MergeScheduler.
type MergesStats ¶
type MergesStats struct { Current int64 `json:"current"` CurrentDocs int64 `json:"current_docs"` CurrentSize *string `json:"current_size,omitempty"` CurrentSizeInBytes int64 `json:"current_size_in_bytes"` Total int64 `json:"total"` TotalAutoThrottle *string `json:"total_auto_throttle,omitempty"` TotalAutoThrottleInBytes int64 `json:"total_auto_throttle_in_bytes"` TotalDocs int64 `json:"total_docs"` TotalSize *string `json:"total_size,omitempty"` TotalSizeInBytes int64 `json:"total_size_in_bytes"` TotalStoppedTime Duration `json:"total_stopped_time,omitempty"` TotalStoppedTimeInMillis int64 `json:"total_stopped_time_in_millis"` TotalThrottledTime Duration `json:"total_throttled_time,omitempty"` TotalThrottledTimeInMillis int64 `json:"total_throttled_time_in_millis"` TotalTime Duration `json:"total_time,omitempty"` TotalTimeInMillis int64 `json:"total_time_in_millis"` }
MergesStats type.
type MgetOperation ¶
type MgetOperation struct { // Id_ The unique document ID. Id_ string `json:"_id"` // Index_ The index that contains the document. Index_ *string `json:"_index,omitempty"` // Routing The key for the primary shard the document resides on. Required if routing is // used during indexing. Routing *string `json:"routing,omitempty"` // Source_ If `false`, excludes all _source fields. Source_ SourceConfig `json:"_source,omitempty"` // StoredFields The stored fields you want to retrieve. StoredFields []string `json:"stored_fields,omitempty"` Version *int64 `json:"version,omitempty"` VersionType *versiontype.VersionType `json:"version_type,omitempty"` }
MgetOperation type.
func NewMgetOperation ¶
func NewMgetOperation() *MgetOperation
NewMgetOperation returns a MgetOperation.
type MigrationFeatureIndexInfo ¶
type MigrationFeatureIndexInfo struct { FailureCause *ErrorCause `json:"failure_cause,omitempty"` Index string `json:"index"` Version string `json:"version"` }
MigrationFeatureIndexInfo type.
func NewMigrationFeatureIndexInfo ¶
func NewMigrationFeatureIndexInfo() *MigrationFeatureIndexInfo
NewMigrationFeatureIndexInfo returns a MigrationFeatureIndexInfo.
type MinAggregate ¶
type MinAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. Value Float64 `json:"value,omitempty"` ValueAsString *string `json:"value_as_string,omitempty"` }
MinAggregate type.
func NewMinAggregate ¶
func NewMinAggregate() *MinAggregate
NewMinAggregate returns a MinAggregate.
type MinAggregation ¶
type MinAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` }
MinAggregation type.
func NewMinAggregation ¶
func NewMinAggregation() *MinAggregation
NewMinAggregation returns a MinAggregation.
type MinBucketAggregation ¶
type MinBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` }
MinBucketAggregation type.
func NewMinBucketAggregation ¶
func NewMinBucketAggregation() *MinBucketAggregation
NewMinBucketAggregation returns a MinBucketAggregation.
func (*MinBucketAggregation) UnmarshalJSON ¶
func (s *MinBucketAggregation) UnmarshalJSON(data []byte) error
type MinimalLicenseInformation ¶
type MinimalLicenseInformation struct { ExpiryDateInMillis int64 `json:"expiry_date_in_millis"` Mode licensetype.LicenseType `json:"mode"` Status licensestatus.LicenseStatus `json:"status"` Type licensetype.LicenseType `json:"type"` Uid string `json:"uid"` }
MinimalLicenseInformation type.
func NewMinimalLicenseInformation ¶
func NewMinimalLicenseInformation() *MinimalLicenseInformation
NewMinimalLicenseInformation returns a MinimalLicenseInformation.
type MinimumShouldMatch ¶
type MinimumShouldMatch interface{}
MinimumShouldMatch holds the union for the following types:
int string
type Missing ¶
type Missing interface{}
Missing holds the union for the following types:
string int Float64 bool
type MissingAggregate ¶
type MissingAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MissingAggregate type.
func NewMissingAggregate ¶
func NewMissingAggregate() *MissingAggregate
NewMissingAggregate returns a MissingAggregate.
func (MissingAggregate) MarshalJSON ¶
func (s MissingAggregate) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*MissingAggregate) UnmarshalJSON ¶
func (s *MissingAggregate) UnmarshalJSON(data []byte) error
type MissingAggregation ¶
type MissingAggregation struct { Field *string `json:"field,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Missing Missing `json:"missing,omitempty"` Name *string `json:"name,omitempty"` }
MissingAggregation type.
func NewMissingAggregation ¶
func NewMissingAggregation() *MissingAggregation
NewMissingAggregation returns a MissingAggregation.
type MlDataFrameAnalyticsJobs ¶
type MlDataFrameAnalyticsJobs struct { All_ MlDataFrameAnalyticsJobsCount `json:"_all"` AnalysisCounts *MlDataFrameAnalyticsJobsAnalysis `json:"analysis_counts,omitempty"` MemoryUsage *MlDataFrameAnalyticsJobsMemory `json:"memory_usage,omitempty"` Stopped *MlDataFrameAnalyticsJobsCount `json:"stopped,omitempty"` }
MlDataFrameAnalyticsJobs type.
func NewMlDataFrameAnalyticsJobs ¶
func NewMlDataFrameAnalyticsJobs() *MlDataFrameAnalyticsJobs
NewMlDataFrameAnalyticsJobs returns a MlDataFrameAnalyticsJobs.
type MlDataFrameAnalyticsJobsAnalysis ¶
type MlDataFrameAnalyticsJobsAnalysis struct { Classification *int `json:"classification,omitempty"` OutlierDetection *int `json:"outlier_detection,omitempty"` Regression *int `json:"regression,omitempty"` }
MlDataFrameAnalyticsJobsAnalysis type.
func NewMlDataFrameAnalyticsJobsAnalysis ¶
func NewMlDataFrameAnalyticsJobsAnalysis() *MlDataFrameAnalyticsJobsAnalysis
NewMlDataFrameAnalyticsJobsAnalysis returns a MlDataFrameAnalyticsJobsAnalysis.
type MlDataFrameAnalyticsJobsCount ¶
type MlDataFrameAnalyticsJobsCount struct {
Count int64 `json:"count"`
}
MlDataFrameAnalyticsJobsCount type.
func NewMlDataFrameAnalyticsJobsCount ¶
func NewMlDataFrameAnalyticsJobsCount() *MlDataFrameAnalyticsJobsCount
NewMlDataFrameAnalyticsJobsCount returns a MlDataFrameAnalyticsJobsCount.
type MlDataFrameAnalyticsJobsMemory ¶
type MlDataFrameAnalyticsJobsMemory struct {
PeakUsageBytes JobStatistics `json:"peak_usage_bytes"`
}
MlDataFrameAnalyticsJobsMemory type.
func NewMlDataFrameAnalyticsJobsMemory ¶
func NewMlDataFrameAnalyticsJobsMemory() *MlDataFrameAnalyticsJobsMemory
NewMlDataFrameAnalyticsJobsMemory returns a MlDataFrameAnalyticsJobsMemory.
type MlInference ¶
type MlInference struct { Deployments *MlInferenceDeployments `json:"deployments,omitempty"` IngestProcessors map[string]MlInferenceIngestProcessor `json:"ingest_processors"` TrainedModels MlInferenceTrainedModels `json:"trained_models"` }
MlInference type.
type MlInferenceDeployments ¶
type MlInferenceDeployments struct { Count int `json:"count"` InferenceCounts JobStatistics `json:"inference_counts"` ModelSizesBytes JobStatistics `json:"model_sizes_bytes"` TimeMs MlInferenceDeploymentsTimeMs `json:"time_ms"` }
MlInferenceDeployments type.
func NewMlInferenceDeployments ¶
func NewMlInferenceDeployments() *MlInferenceDeployments
NewMlInferenceDeployments returns a MlInferenceDeployments.
type MlInferenceDeploymentsTimeMs ¶
type MlInferenceDeploymentsTimeMs struct {
Avg Float64 `json:"avg"`
}
MlInferenceDeploymentsTimeMs type.
func NewMlInferenceDeploymentsTimeMs ¶
func NewMlInferenceDeploymentsTimeMs() *MlInferenceDeploymentsTimeMs
NewMlInferenceDeploymentsTimeMs returns a MlInferenceDeploymentsTimeMs.
type MlInferenceIngestProcessor ¶
type MlInferenceIngestProcessor struct { NumDocsProcessed MlInferenceIngestProcessorCount `json:"num_docs_processed"` NumFailures MlInferenceIngestProcessorCount `json:"num_failures"` Pipelines MlCounter `json:"pipelines"` TimeMs MlInferenceIngestProcessorCount `json:"time_ms"` }
MlInferenceIngestProcessor type.
func NewMlInferenceIngestProcessor ¶
func NewMlInferenceIngestProcessor() *MlInferenceIngestProcessor
NewMlInferenceIngestProcessor returns a MlInferenceIngestProcessor.
type MlInferenceIngestProcessorCount ¶
type MlInferenceIngestProcessorCount struct { Max int64 `json:"max"` Min int64 `json:"min"` Sum int64 `json:"sum"` }
MlInferenceIngestProcessorCount type.
func NewMlInferenceIngestProcessorCount ¶
func NewMlInferenceIngestProcessorCount() *MlInferenceIngestProcessorCount
NewMlInferenceIngestProcessorCount returns a MlInferenceIngestProcessorCount.
type MlInferenceTrainedModels ¶
type MlInferenceTrainedModels struct { All_ MlCounter `json:"_all"` Count *MlInferenceTrainedModelsCount `json:"count,omitempty"` EstimatedHeapMemoryUsageBytes *JobStatistics `json:"estimated_heap_memory_usage_bytes,omitempty"` EstimatedOperations *JobStatistics `json:"estimated_operations,omitempty"` ModelSizeBytes *JobStatistics `json:"model_size_bytes,omitempty"` }
MlInferenceTrainedModels type.
func NewMlInferenceTrainedModels ¶
func NewMlInferenceTrainedModels() *MlInferenceTrainedModels
NewMlInferenceTrainedModels returns a MlInferenceTrainedModels.
type MlInferenceTrainedModelsCount ¶
type MlInferenceTrainedModelsCount struct { Classification *int64 `json:"classification,omitempty"` Ner *int64 `json:"ner,omitempty"` Other int64 `json:"other"` PassThrough *int64 `json:"pass_through,omitempty"` Prepackaged int64 `json:"prepackaged"` Regression *int64 `json:"regression,omitempty"` TextEmbedding *int64 `json:"text_embedding,omitempty"` Total int64 `json:"total"` }
MlInferenceTrainedModelsCount type.
func NewMlInferenceTrainedModelsCount ¶
func NewMlInferenceTrainedModelsCount() *MlInferenceTrainedModelsCount
NewMlInferenceTrainedModelsCount returns a MlInferenceTrainedModelsCount.
type MlJobForecasts ¶
type MlJobForecasts struct { ForecastedJobs int64 `json:"forecasted_jobs"` Total int64 `json:"total"` }
MlJobForecasts type.
func NewMlJobForecasts ¶
func NewMlJobForecasts() *MlJobForecasts
NewMlJobForecasts returns a MlJobForecasts.
type ModelPlotConfig ¶
type ModelPlotConfig struct { // AnnotationsEnabled If true, enables calculation and storage of the model change annotations for // each entity that is being analyzed. AnnotationsEnabled *bool `json:"annotations_enabled,omitempty"` // Enabled If true, enables calculation and storage of the model bounds for each entity // that is being analyzed. Enabled *bool `json:"enabled,omitempty"` // Terms Limits data collection to this comma separated list of partition or by field // values. If terms are not specified or it is an empty string, no filtering is // applied. Wildcards are not supported. Only the specified terms can be viewed // when using the Single Metric Viewer. Terms *string `json:"terms,omitempty"` }
ModelPlotConfig type.
func NewModelPlotConfig ¶
func NewModelPlotConfig() *ModelPlotConfig
NewModelPlotConfig returns a ModelPlotConfig.
type ModelSizeStats ¶
type ModelSizeStats struct { AssignmentMemoryBasis *string `json:"assignment_memory_basis,omitempty"` BucketAllocationFailuresCount int64 `json:"bucket_allocation_failures_count"` CategorizationStatus categorizationstatus.CategorizationStatus `json:"categorization_status"` CategorizedDocCount int `json:"categorized_doc_count"` DeadCategoryCount int `json:"dead_category_count"` FailedCategoryCount int `json:"failed_category_count"` FrequentCategoryCount int `json:"frequent_category_count"` JobId string `json:"job_id"` LogTime DateTime `json:"log_time"` MemoryStatus memorystatus.MemoryStatus `json:"memory_status"` ModelBytes ByteSize `json:"model_bytes"` ModelBytesExceeded ByteSize `json:"model_bytes_exceeded,omitempty"` ModelBytesMemoryLimit ByteSize `json:"model_bytes_memory_limit,omitempty"` PeakModelBytes ByteSize `json:"peak_model_bytes,omitempty"` RareCategoryCount int `json:"rare_category_count"` ResultType string `json:"result_type"` Timestamp *int64 `json:"timestamp,omitempty"` TotalByFieldCount int64 `json:"total_by_field_count"` TotalCategoryCount int `json:"total_category_count"` TotalOverFieldCount int64 `json:"total_over_field_count"` TotalPartitionFieldCount int64 `json:"total_partition_field_count"` }
ModelSizeStats type.
func NewModelSizeStats ¶
func NewModelSizeStats() *ModelSizeStats
NewModelSizeStats returns a ModelSizeStats.
type ModelSnapshot ¶
type ModelSnapshot struct { // Description An optional description of the job. Description *string `json:"description,omitempty"` // JobId A numerical character string that uniquely identifies the job that the // snapshot was created for. JobId string `json:"job_id"` // LatestRecordTimeStamp The timestamp of the latest processed record. LatestRecordTimeStamp *int `json:"latest_record_time_stamp,omitempty"` // LatestResultTimeStamp The timestamp of the latest bucket result. LatestResultTimeStamp *int `json:"latest_result_time_stamp,omitempty"` // MinVersion The minimum version required to be able to restore the model snapshot. MinVersion string `json:"min_version"` // ModelSizeStats Summary information describing the model. ModelSizeStats *ModelSizeStats `json:"model_size_stats,omitempty"` // Retain If true, this snapshot will not be deleted during automatic cleanup of // snapshots older than model_snapshot_retention_days. However, this snapshot // will be deleted when the job is deleted. The default value is false. Retain bool `json:"retain"` // SnapshotDocCount For internal use only. SnapshotDocCount int64 `json:"snapshot_doc_count"` // SnapshotId A numerical character string that uniquely identifies the model snapshot. SnapshotId string `json:"snapshot_id"` // Timestamp The creation timestamp for the snapshot. Timestamp int64 `json:"timestamp"` }
ModelSnapshot type.
func NewModelSnapshot ¶
func NewModelSnapshot() *ModelSnapshot
NewModelSnapshot returns a ModelSnapshot.
type ModelSnapshotUpgrade ¶
type ModelSnapshotUpgrade struct { AssignmentExplanation string `json:"assignment_explanation"` JobId string `json:"job_id"` Node DiscoveryNode `json:"node"` SnapshotId string `json:"snapshot_id"` State snapshotupgradestate.SnapshotUpgradeState `json:"state"` }
ModelSnapshotUpgrade type.
func NewModelSnapshotUpgrade ¶
func NewModelSnapshotUpgrade() *ModelSnapshotUpgrade
NewModelSnapshotUpgrade returns a ModelSnapshotUpgrade.
type Monitoring ¶
type Monitoring struct { Available bool `json:"available"` CollectionEnabled bool `json:"collection_enabled"` Enabled bool `json:"enabled"` EnabledExporters map[string]int64 `json:"enabled_exporters"` }
Monitoring type.
type MoreLikeThisQuery ¶
type MoreLikeThisQuery struct { Analyzer *string `json:"analyzer,omitempty"` Boost *float32 `json:"boost,omitempty"` BoostTerms *Float64 `json:"boost_terms,omitempty"` FailOnUnsupportedField *bool `json:"fail_on_unsupported_field,omitempty"` Fields []string `json:"fields,omitempty"` Include *bool `json:"include,omitempty"` Like []Like `json:"like"` MaxDocFreq *int `json:"max_doc_freq,omitempty"` MaxQueryTerms *int `json:"max_query_terms,omitempty"` MaxWordLength *int `json:"max_word_length,omitempty"` MinDocFreq *int `json:"min_doc_freq,omitempty"` MinTermFreq *int `json:"min_term_freq,omitempty"` MinWordLength *int `json:"min_word_length,omitempty"` MinimumShouldMatch MinimumShouldMatch `json:"minimum_should_match,omitempty"` PerFieldAnalyzer map[string]string `json:"per_field_analyzer,omitempty"` QueryName_ *string `json:"_name,omitempty"` Routing *string `json:"routing,omitempty"` StopWords []string `json:"stop_words,omitempty"` Unlike []Like `json:"unlike,omitempty"` Version *int64 `json:"version,omitempty"` VersionType *versiontype.VersionType `json:"version_type,omitempty"` }
MoreLikeThisQuery type.
func NewMoreLikeThisQuery ¶
func NewMoreLikeThisQuery() *MoreLikeThisQuery
NewMoreLikeThisQuery returns a MoreLikeThisQuery.
type MountedSnapshot ¶
type MountedSnapshot struct { Indices []string `json:"indices"` Shards ShardStatistics `json:"shards"` Snapshot string `json:"snapshot"` }
MountedSnapshot type.
func NewMountedSnapshot ¶
func NewMountedSnapshot() *MountedSnapshot
NewMountedSnapshot returns a MountedSnapshot.
type MovingAverageAggregation ¶
type MovingAverageAggregation interface{}
MovingAverageAggregation holds the union for the following types:
LinearMovingAverageAggregation SimpleMovingAverageAggregation EwmaMovingAverageAggregation HoltMovingAverageAggregation HoltWintersMovingAverageAggregation
type MovingFunctionAggregation ¶
type MovingFunctionAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Script *string `json:"script,omitempty"` Shift *int `json:"shift,omitempty"` Window *int `json:"window,omitempty"` }
MovingFunctionAggregation type.
func NewMovingFunctionAggregation ¶
func NewMovingFunctionAggregation() *MovingFunctionAggregation
NewMovingFunctionAggregation returns a MovingFunctionAggregation.
func (*MovingFunctionAggregation) UnmarshalJSON ¶
func (s *MovingFunctionAggregation) UnmarshalJSON(data []byte) error
type MovingPercentilesAggregation ¶
type MovingPercentilesAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Keyed *bool `json:"keyed,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Shift *int `json:"shift,omitempty"` Window *int `json:"window,omitempty"` }
MovingPercentilesAggregation type.
func NewMovingPercentilesAggregation ¶
func NewMovingPercentilesAggregation() *MovingPercentilesAggregation
NewMovingPercentilesAggregation returns a MovingPercentilesAggregation.
func (*MovingPercentilesAggregation) UnmarshalJSON ¶
func (s *MovingPercentilesAggregation) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseAdjacencyMatrixBucket ¶
type MultiBucketAggregateBaseAdjacencyMatrixBucket struct { Buckets BucketsAdjacencyMatrixBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseAdjacencyMatrixBucket type.
func NewMultiBucketAggregateBaseAdjacencyMatrixBucket ¶
func NewMultiBucketAggregateBaseAdjacencyMatrixBucket() *MultiBucketAggregateBaseAdjacencyMatrixBucket
NewMultiBucketAggregateBaseAdjacencyMatrixBucket returns a MultiBucketAggregateBaseAdjacencyMatrixBucket.
func (*MultiBucketAggregateBaseAdjacencyMatrixBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseAdjacencyMatrixBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseCompositeBucket ¶
type MultiBucketAggregateBaseCompositeBucket struct { Buckets BucketsCompositeBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseCompositeBucket type.
func NewMultiBucketAggregateBaseCompositeBucket ¶
func NewMultiBucketAggregateBaseCompositeBucket() *MultiBucketAggregateBaseCompositeBucket
NewMultiBucketAggregateBaseCompositeBucket returns a MultiBucketAggregateBaseCompositeBucket.
func (*MultiBucketAggregateBaseCompositeBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseCompositeBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseDateHistogramBucket ¶
type MultiBucketAggregateBaseDateHistogramBucket struct { Buckets BucketsDateHistogramBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseDateHistogramBucket type.
func NewMultiBucketAggregateBaseDateHistogramBucket ¶
func NewMultiBucketAggregateBaseDateHistogramBucket() *MultiBucketAggregateBaseDateHistogramBucket
NewMultiBucketAggregateBaseDateHistogramBucket returns a MultiBucketAggregateBaseDateHistogramBucket.
func (*MultiBucketAggregateBaseDateHistogramBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseDateHistogramBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseDoubleTermsBucket ¶
type MultiBucketAggregateBaseDoubleTermsBucket struct { Buckets BucketsDoubleTermsBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseDoubleTermsBucket type.
func NewMultiBucketAggregateBaseDoubleTermsBucket ¶
func NewMultiBucketAggregateBaseDoubleTermsBucket() *MultiBucketAggregateBaseDoubleTermsBucket
NewMultiBucketAggregateBaseDoubleTermsBucket returns a MultiBucketAggregateBaseDoubleTermsBucket.
func (*MultiBucketAggregateBaseDoubleTermsBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseDoubleTermsBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseFiltersBucket ¶
type MultiBucketAggregateBaseFiltersBucket struct { Buckets BucketsFiltersBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseFiltersBucket type.
func NewMultiBucketAggregateBaseFiltersBucket ¶
func NewMultiBucketAggregateBaseFiltersBucket() *MultiBucketAggregateBaseFiltersBucket
NewMultiBucketAggregateBaseFiltersBucket returns a MultiBucketAggregateBaseFiltersBucket.
func (*MultiBucketAggregateBaseFiltersBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseFiltersBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseGeoHashGridBucket ¶
type MultiBucketAggregateBaseGeoHashGridBucket struct { Buckets BucketsGeoHashGridBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseGeoHashGridBucket type.
func NewMultiBucketAggregateBaseGeoHashGridBucket ¶
func NewMultiBucketAggregateBaseGeoHashGridBucket() *MultiBucketAggregateBaseGeoHashGridBucket
NewMultiBucketAggregateBaseGeoHashGridBucket returns a MultiBucketAggregateBaseGeoHashGridBucket.
func (*MultiBucketAggregateBaseGeoHashGridBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseGeoHashGridBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseGeoHexGridBucket ¶
type MultiBucketAggregateBaseGeoHexGridBucket struct { Buckets BucketsGeoHexGridBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseGeoHexGridBucket type.
func NewMultiBucketAggregateBaseGeoHexGridBucket ¶
func NewMultiBucketAggregateBaseGeoHexGridBucket() *MultiBucketAggregateBaseGeoHexGridBucket
NewMultiBucketAggregateBaseGeoHexGridBucket returns a MultiBucketAggregateBaseGeoHexGridBucket.
func (*MultiBucketAggregateBaseGeoHexGridBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseGeoHexGridBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseGeoTileGridBucket ¶
type MultiBucketAggregateBaseGeoTileGridBucket struct { Buckets BucketsGeoTileGridBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseGeoTileGridBucket type.
func NewMultiBucketAggregateBaseGeoTileGridBucket ¶
func NewMultiBucketAggregateBaseGeoTileGridBucket() *MultiBucketAggregateBaseGeoTileGridBucket
NewMultiBucketAggregateBaseGeoTileGridBucket returns a MultiBucketAggregateBaseGeoTileGridBucket.
func (*MultiBucketAggregateBaseGeoTileGridBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseGeoTileGridBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseHistogramBucket ¶
type MultiBucketAggregateBaseHistogramBucket struct { Buckets BucketsHistogramBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseHistogramBucket type.
func NewMultiBucketAggregateBaseHistogramBucket ¶
func NewMultiBucketAggregateBaseHistogramBucket() *MultiBucketAggregateBaseHistogramBucket
NewMultiBucketAggregateBaseHistogramBucket returns a MultiBucketAggregateBaseHistogramBucket.
func (*MultiBucketAggregateBaseHistogramBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseHistogramBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseIpPrefixBucket ¶
type MultiBucketAggregateBaseIpPrefixBucket struct { Buckets BucketsIpPrefixBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseIpPrefixBucket type.
func NewMultiBucketAggregateBaseIpPrefixBucket ¶
func NewMultiBucketAggregateBaseIpPrefixBucket() *MultiBucketAggregateBaseIpPrefixBucket
NewMultiBucketAggregateBaseIpPrefixBucket returns a MultiBucketAggregateBaseIpPrefixBucket.
func (*MultiBucketAggregateBaseIpPrefixBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseIpPrefixBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseIpRangeBucket ¶
type MultiBucketAggregateBaseIpRangeBucket struct { Buckets BucketsIpRangeBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseIpRangeBucket type.
func NewMultiBucketAggregateBaseIpRangeBucket ¶
func NewMultiBucketAggregateBaseIpRangeBucket() *MultiBucketAggregateBaseIpRangeBucket
NewMultiBucketAggregateBaseIpRangeBucket returns a MultiBucketAggregateBaseIpRangeBucket.
func (*MultiBucketAggregateBaseIpRangeBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseIpRangeBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseLongRareTermsBucket ¶
type MultiBucketAggregateBaseLongRareTermsBucket struct { Buckets BucketsLongRareTermsBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseLongRareTermsBucket type.
func NewMultiBucketAggregateBaseLongRareTermsBucket ¶
func NewMultiBucketAggregateBaseLongRareTermsBucket() *MultiBucketAggregateBaseLongRareTermsBucket
NewMultiBucketAggregateBaseLongRareTermsBucket returns a MultiBucketAggregateBaseLongRareTermsBucket.
func (*MultiBucketAggregateBaseLongRareTermsBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseLongRareTermsBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseLongTermsBucket ¶
type MultiBucketAggregateBaseLongTermsBucket struct { Buckets BucketsLongTermsBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseLongTermsBucket type.
func NewMultiBucketAggregateBaseLongTermsBucket ¶
func NewMultiBucketAggregateBaseLongTermsBucket() *MultiBucketAggregateBaseLongTermsBucket
NewMultiBucketAggregateBaseLongTermsBucket returns a MultiBucketAggregateBaseLongTermsBucket.
func (*MultiBucketAggregateBaseLongTermsBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseLongTermsBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseMultiTermsBucket ¶
type MultiBucketAggregateBaseMultiTermsBucket struct { Buckets BucketsMultiTermsBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseMultiTermsBucket type.
func NewMultiBucketAggregateBaseMultiTermsBucket ¶
func NewMultiBucketAggregateBaseMultiTermsBucket() *MultiBucketAggregateBaseMultiTermsBucket
NewMultiBucketAggregateBaseMultiTermsBucket returns a MultiBucketAggregateBaseMultiTermsBucket.
func (*MultiBucketAggregateBaseMultiTermsBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseMultiTermsBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseRangeBucket ¶
type MultiBucketAggregateBaseRangeBucket struct { Buckets BucketsRangeBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseRangeBucket type.
func NewMultiBucketAggregateBaseRangeBucket ¶
func NewMultiBucketAggregateBaseRangeBucket() *MultiBucketAggregateBaseRangeBucket
NewMultiBucketAggregateBaseRangeBucket returns a MultiBucketAggregateBaseRangeBucket.
func (*MultiBucketAggregateBaseRangeBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseRangeBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseSignificantLongTermsBucket ¶
type MultiBucketAggregateBaseSignificantLongTermsBucket struct { Buckets BucketsSignificantLongTermsBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseSignificantLongTermsBucket type.
func NewMultiBucketAggregateBaseSignificantLongTermsBucket ¶
func NewMultiBucketAggregateBaseSignificantLongTermsBucket() *MultiBucketAggregateBaseSignificantLongTermsBucket
NewMultiBucketAggregateBaseSignificantLongTermsBucket returns a MultiBucketAggregateBaseSignificantLongTermsBucket.
func (*MultiBucketAggregateBaseSignificantLongTermsBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseSignificantLongTermsBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseSignificantStringTermsBucket ¶
type MultiBucketAggregateBaseSignificantStringTermsBucket struct { Buckets BucketsSignificantStringTermsBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseSignificantStringTermsBucket type.
func NewMultiBucketAggregateBaseSignificantStringTermsBucket ¶
func NewMultiBucketAggregateBaseSignificantStringTermsBucket() *MultiBucketAggregateBaseSignificantStringTermsBucket
NewMultiBucketAggregateBaseSignificantStringTermsBucket returns a MultiBucketAggregateBaseSignificantStringTermsBucket.
func (*MultiBucketAggregateBaseSignificantStringTermsBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseSignificantStringTermsBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseStringRareTermsBucket ¶
type MultiBucketAggregateBaseStringRareTermsBucket struct { Buckets BucketsStringRareTermsBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseStringRareTermsBucket type.
func NewMultiBucketAggregateBaseStringRareTermsBucket ¶
func NewMultiBucketAggregateBaseStringRareTermsBucket() *MultiBucketAggregateBaseStringRareTermsBucket
NewMultiBucketAggregateBaseStringRareTermsBucket returns a MultiBucketAggregateBaseStringRareTermsBucket.
func (*MultiBucketAggregateBaseStringRareTermsBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseStringRareTermsBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseStringTermsBucket ¶
type MultiBucketAggregateBaseStringTermsBucket struct { Buckets BucketsStringTermsBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseStringTermsBucket type.
func NewMultiBucketAggregateBaseStringTermsBucket ¶
func NewMultiBucketAggregateBaseStringTermsBucket() *MultiBucketAggregateBaseStringTermsBucket
NewMultiBucketAggregateBaseStringTermsBucket returns a MultiBucketAggregateBaseStringTermsBucket.
func (*MultiBucketAggregateBaseStringTermsBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseStringTermsBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseVariableWidthHistogramBucket ¶
type MultiBucketAggregateBaseVariableWidthHistogramBucket struct { Buckets BucketsVariableWidthHistogramBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseVariableWidthHistogramBucket type.
func NewMultiBucketAggregateBaseVariableWidthHistogramBucket ¶
func NewMultiBucketAggregateBaseVariableWidthHistogramBucket() *MultiBucketAggregateBaseVariableWidthHistogramBucket
NewMultiBucketAggregateBaseVariableWidthHistogramBucket returns a MultiBucketAggregateBaseVariableWidthHistogramBucket.
func (*MultiBucketAggregateBaseVariableWidthHistogramBucket) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseVariableWidthHistogramBucket) UnmarshalJSON(data []byte) error
type MultiBucketAggregateBaseVoid ¶
type MultiBucketAggregateBaseVoid struct { Buckets BucketsVoid `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
MultiBucketAggregateBaseVoid type.
func NewMultiBucketAggregateBaseVoid ¶
func NewMultiBucketAggregateBaseVoid() *MultiBucketAggregateBaseVoid
NewMultiBucketAggregateBaseVoid returns a MultiBucketAggregateBaseVoid.
func (*MultiBucketAggregateBaseVoid) UnmarshalJSON ¶
func (s *MultiBucketAggregateBaseVoid) UnmarshalJSON(data []byte) error
type MultiGetError ¶
type MultiGetError struct { Error ErrorCause `json:"error"` Id_ string `json:"_id"` Index_ string `json:"_index"` }
MultiGetError type.
func NewMultiGetError ¶
func NewMultiGetError() *MultiGetError
NewMultiGetError returns a MultiGetError.
type MultiMatchQuery ¶
type MultiMatchQuery struct { Analyzer *string `json:"analyzer,omitempty"` AutoGenerateSynonymsPhraseQuery *bool `json:"auto_generate_synonyms_phrase_query,omitempty"` Boost *float32 `json:"boost,omitempty"` CutoffFrequency *Float64 `json:"cutoff_frequency,omitempty"` Fields []string `json:"fields,omitempty"` Fuzziness Fuzziness `json:"fuzziness,omitempty"` FuzzyRewrite *string `json:"fuzzy_rewrite,omitempty"` FuzzyTranspositions *bool `json:"fuzzy_transpositions,omitempty"` Lenient *bool `json:"lenient,omitempty"` MaxExpansions *int `json:"max_expansions,omitempty"` MinimumShouldMatch MinimumShouldMatch `json:"minimum_should_match,omitempty"` Operator *operator.Operator `json:"operator,omitempty"` PrefixLength *int `json:"prefix_length,omitempty"` Query string `json:"query"` QueryName_ *string `json:"_name,omitempty"` Slop *int `json:"slop,omitempty"` TieBreaker *Float64 `json:"tie_breaker,omitempty"` Type *textquerytype.TextQueryType `json:"type,omitempty"` ZeroTermsQuery *zerotermsquery.ZeroTermsQuery `json:"zero_terms_query,omitempty"` }
MultiMatchQuery type.
func NewMultiMatchQuery ¶
func NewMultiMatchQuery() *MultiMatchQuery
NewMultiMatchQuery returns a MultiMatchQuery.
type MultiTermLookup ¶
type MultiTermLookup struct { Field string `json:"field"` Missing Missing `json:"missing,omitempty"` }
MultiTermLookup type.
func NewMultiTermLookup ¶
func NewMultiTermLookup() *MultiTermLookup
NewMultiTermLookup returns a MultiTermLookup.
type MultiTermsAggregate ¶
type MultiTermsAggregate struct { Buckets BucketsMultiTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` }
MultiTermsAggregate type.
func NewMultiTermsAggregate ¶
func NewMultiTermsAggregate() *MultiTermsAggregate
NewMultiTermsAggregate returns a MultiTermsAggregate.
func (*MultiTermsAggregate) UnmarshalJSON ¶
func (s *MultiTermsAggregate) UnmarshalJSON(data []byte) error
type MultiTermsAggregation ¶
type MultiTermsAggregation struct { CollectMode *termsaggregationcollectmode.TermsAggregationCollectMode `json:"collect_mode,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` MinDocCount *int64 `json:"min_doc_count,omitempty"` Name *string `json:"name,omitempty"` Order AggregateOrder `json:"order,omitempty"` ShardMinDocCount *int64 `json:"shard_min_doc_count,omitempty"` ShardSize *int `json:"shard_size,omitempty"` ShowTermDocCountError *bool `json:"show_term_doc_count_error,omitempty"` Size *int `json:"size,omitempty"` Terms []MultiTermLookup `json:"terms"` }
MultiTermsAggregation type.
func NewMultiTermsAggregation ¶
func NewMultiTermsAggregation() *MultiTermsAggregation
NewMultiTermsAggregation returns a MultiTermsAggregation.
func (*MultiTermsAggregation) UnmarshalJSON ¶
func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error
type MultiTermsBucket ¶
type MultiTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` Key []FieldValue `json:"key"` KeyAsString *string `json:"key_as_string,omitempty"` }
MultiTermsBucket type.
func NewMultiTermsBucket ¶
func NewMultiTermsBucket() *MultiTermsBucket
NewMultiTermsBucket returns a MultiTermsBucket.
func (MultiTermsBucket) MarshalJSON ¶
func (s MultiTermsBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*MultiTermsBucket) UnmarshalJSON ¶
func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error
type MultiplexerTokenFilter ¶
type MultiplexerTokenFilter struct { Filters []string `json:"filters"` PreserveOriginal *bool `json:"preserve_original,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
MultiplexerTokenFilter type.
func NewMultiplexerTokenFilter ¶
func NewMultiplexerTokenFilter() *MultiplexerTokenFilter
NewMultiplexerTokenFilter returns a MultiplexerTokenFilter.
type Murmur3HashProperty ¶
type Murmur3HashProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
Murmur3HashProperty type.
func NewMurmur3HashProperty ¶
func NewMurmur3HashProperty() *Murmur3HashProperty
NewMurmur3HashProperty returns a Murmur3HashProperty.
func (*Murmur3HashProperty) UnmarshalJSON ¶
func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error
type MutualInformationHeuristic ¶
type MutualInformationHeuristic struct { BackgroundIsSuperset *bool `json:"background_is_superset,omitempty"` IncludeNegatives *bool `json:"include_negatives,omitempty"` }
MutualInformationHeuristic type.
func NewMutualInformationHeuristic ¶
func NewMutualInformationHeuristic() *MutualInformationHeuristic
NewMutualInformationHeuristic returns a MutualInformationHeuristic.
type NGramTokenFilter ¶
type NGramTokenFilter struct { MaxGram *int `json:"max_gram,omitempty"` MinGram *int `json:"min_gram,omitempty"` PreserveOriginal *bool `json:"preserve_original,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
NGramTokenFilter type.
func NewNGramTokenFilter ¶
func NewNGramTokenFilter() *NGramTokenFilter
NewNGramTokenFilter returns a NGramTokenFilter.
type NGramTokenizer ¶
type NGramTokenizer struct { CustomTokenChars *string `json:"custom_token_chars,omitempty"` MaxGram int `json:"max_gram"` MinGram int `json:"min_gram"` TokenChars []tokenchar.TokenChar `json:"token_chars"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
NGramTokenizer type.
func NewNGramTokenizer ¶
func NewNGramTokenizer() *NGramTokenizer
NewNGramTokenizer returns a NGramTokenizer.
type NativeCode ¶
NativeCode type.
type NativeCodeInformation ¶
type NativeCodeInformation struct { BuildHash string `json:"build_hash"` Version string `json:"version"` }
NativeCodeInformation type.
func NewNativeCodeInformation ¶
func NewNativeCodeInformation() *NativeCodeInformation
NewNativeCodeInformation returns a NativeCodeInformation.
type NerInferenceOptions ¶
type NerInferenceOptions struct { // ClassificationLabels The token classification labels. Must be IOB formatted tags ClassificationLabels []string `json:"classification_labels,omitempty"` // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` // Tokenization The tokenization options Tokenization *TokenizationConfigContainer `json:"tokenization,omitempty"` Vocabulary *Vocabulary `json:"vocabulary,omitempty"` }
NerInferenceOptions type.
func NewNerInferenceOptions ¶
func NewNerInferenceOptions() *NerInferenceOptions
NewNerInferenceOptions returns a NerInferenceOptions.
type NerInferenceUpdateOptions ¶
type NerInferenceUpdateOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` // Tokenization The tokenization options to update when inferring Tokenization *NlpTokenizationUpdateOptions `json:"tokenization,omitempty"` }
NerInferenceUpdateOptions type.
func NewNerInferenceUpdateOptions ¶
func NewNerInferenceUpdateOptions() *NerInferenceUpdateOptions
NewNerInferenceUpdateOptions returns a NerInferenceUpdateOptions.
type NestedAggregate ¶
type NestedAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
NestedAggregate type.
func NewNestedAggregate ¶
func NewNestedAggregate() *NestedAggregate
NewNestedAggregate returns a NestedAggregate.
func (NestedAggregate) MarshalJSON ¶
func (s NestedAggregate) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*NestedAggregate) UnmarshalJSON ¶
func (s *NestedAggregate) UnmarshalJSON(data []byte) error
type NestedAggregation ¶
type NestedAggregation struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Path *string `json:"path,omitempty"` }
NestedAggregation type.
func NewNestedAggregation ¶
func NewNestedAggregation() *NestedAggregation
NewNestedAggregation returns a NestedAggregation.
type NestedIdentity ¶
type NestedIdentity struct { Field string `json:"field"` Nested_ *NestedIdentity `json:"_nested,omitempty"` Offset int `json:"offset"` }
NestedIdentity type.
func NewNestedIdentity ¶
func NewNestedIdentity() *NestedIdentity
NewNestedIdentity returns a NestedIdentity.
type NestedProperty ¶
type NestedProperty struct { CopyTo []string `json:"copy_to,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Enabled *bool `json:"enabled,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IncludeInParent *bool `json:"include_in_parent,omitempty"` IncludeInRoot *bool `json:"include_in_root,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
NestedProperty type.
func NewNestedProperty ¶
func NewNestedProperty() *NestedProperty
NewNestedProperty returns a NestedProperty.
func (*NestedProperty) UnmarshalJSON ¶
func (s *NestedProperty) UnmarshalJSON(data []byte) error
type NestedQuery ¶
type NestedQuery struct { Boost *float32 `json:"boost,omitempty"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` InnerHits *InnerHits `json:"inner_hits,omitempty"` Path string `json:"path"` Query *Query `json:"query,omitempty"` QueryName_ *string `json:"_name,omitempty"` ScoreMode *childscoremode.ChildScoreMode `json:"score_mode,omitempty"` }
NestedQuery type.
type NestedSortValue ¶
type NestedSortValue struct { Filter *Query `json:"filter,omitempty"` MaxChildren *int `json:"max_children,omitempty"` Nested *NestedSortValue `json:"nested,omitempty"` Path string `json:"path"` }
NestedSortValue type.
func NewNestedSortValue ¶
func NewNestedSortValue() *NestedSortValue
NewNestedSortValue returns a NestedSortValue.
type NeverCondition ¶
type NeverCondition struct { }
NeverCondition type.
func NewNeverCondition ¶
func NewNeverCondition() *NeverCondition
NewNeverCondition returns a NeverCondition.
type NlpBertTokenizationConfig ¶
type NlpBertTokenizationConfig struct { // DoLowerCase Should the tokenizer lower case the text DoLowerCase *bool `json:"do_lower_case,omitempty"` // MaxSequenceLength Maximum input sequence length for the model MaxSequenceLength *int `json:"max_sequence_length,omitempty"` // Span Tokenization spanning options. Special value of -1 indicates no spanning // takes place Span *int `json:"span,omitempty"` // Truncate Should tokenization input be automatically truncated before sending to the // model for inference Truncate *tokenizationtruncate.TokenizationTruncate `json:"truncate,omitempty"` // WithSpecialTokens Is tokenization completed with special tokens WithSpecialTokens *bool `json:"with_special_tokens,omitempty"` }
NlpBertTokenizationConfig type.
func NewNlpBertTokenizationConfig ¶
func NewNlpBertTokenizationConfig() *NlpBertTokenizationConfig
NewNlpBertTokenizationConfig returns a NlpBertTokenizationConfig.
type NlpRobertaTokenizationConfig ¶
type NlpRobertaTokenizationConfig struct { // AddPrefixSpace Should the tokenizer prefix input with a space character AddPrefixSpace *bool `json:"add_prefix_space,omitempty"` // MaxSequenceLength Maximum input sequence length for the model MaxSequenceLength *int `json:"max_sequence_length,omitempty"` // Span Tokenization spanning options. Special value of -1 indicates no spanning // takes place Span *int `json:"span,omitempty"` // Truncate Should tokenization input be automatically truncated before sending to the // model for inference Truncate *tokenizationtruncate.TokenizationTruncate `json:"truncate,omitempty"` // WithSpecialTokens Is tokenization completed with special tokens WithSpecialTokens *bool `json:"with_special_tokens,omitempty"` }
NlpRobertaTokenizationConfig type.
func NewNlpRobertaTokenizationConfig ¶
func NewNlpRobertaTokenizationConfig() *NlpRobertaTokenizationConfig
NewNlpRobertaTokenizationConfig returns a NlpRobertaTokenizationConfig.
type NlpTokenizationUpdateOptions ¶
type NlpTokenizationUpdateOptions struct { // Span Span options to apply Span *int `json:"span,omitempty"` // Truncate Truncate options to apply Truncate *tokenizationtruncate.TokenizationTruncate `json:"truncate,omitempty"` }
NlpTokenizationUpdateOptions type.
func NewNlpTokenizationUpdateOptions ¶
func NewNlpTokenizationUpdateOptions() *NlpTokenizationUpdateOptions
NewNlpTokenizationUpdateOptions returns a NlpTokenizationUpdateOptions.
type NodeAllocationExplanation ¶
type NodeAllocationExplanation struct { Deciders []AllocationDecision `json:"deciders"` NodeAttributes map[string]string `json:"node_attributes"` NodeDecision decision.Decision `json:"node_decision"` NodeId string `json:"node_id"` NodeName string `json:"node_name"` Store *AllocationStore `json:"store,omitempty"` TransportAddress string `json:"transport_address"` WeightRanking int `json:"weight_ranking"` }
NodeAllocationExplanation type.
func NewNodeAllocationExplanation ¶
func NewNodeAllocationExplanation() *NodeAllocationExplanation
NewNodeAllocationExplanation returns a NodeAllocationExplanation.
type NodeAttributes ¶
type NodeAttributes struct { // Attributes Lists node attributes. Attributes map[string]string `json:"attributes"` // EphemeralId The ephemeral ID of the node. EphemeralId string `json:"ephemeral_id"` ExternalId *string `json:"external_id,omitempty"` // Id The unique identifier of the node. Id *string `json:"id,omitempty"` // Name The unique identifier of the node. Name string `json:"name"` Roles []noderole.NodeRole `json:"roles,omitempty"` // TransportAddress The host and port where transport HTTP connections are accepted. TransportAddress string `json:"transport_address"` }
NodeAttributes type.
func NewNodeAttributes ¶
func NewNodeAttributes() *NodeAttributes
NewNodeAttributes returns a NodeAttributes.
type NodeAttributesRecord ¶
type NodeAttributesRecord struct { // Attr attribute description Attr *string `json:"attr,omitempty"` // Host host name Host *string `json:"host,omitempty"` // Id unique node id Id *string `json:"id,omitempty"` // Ip ip address Ip *string `json:"ip,omitempty"` // Node node name Node *string `json:"node,omitempty"` // Pid process id Pid *string `json:"pid,omitempty"` // Port bound transport port Port *string `json:"port,omitempty"` // Value attribute value Value *string `json:"value,omitempty"` }
NodeAttributesRecord type.
func NewNodeAttributesRecord ¶
func NewNodeAttributesRecord() *NodeAttributesRecord
NewNodeAttributesRecord returns a NodeAttributesRecord.
type NodeBufferPool ¶
type NodeBufferPool struct { Count *int64 `json:"count,omitempty"` TotalCapacity *string `json:"total_capacity,omitempty"` TotalCapacityInBytes *int64 `json:"total_capacity_in_bytes,omitempty"` Used *string `json:"used,omitempty"` UsedInBytes *int64 `json:"used_in_bytes,omitempty"` }
NodeBufferPool type.
func NewNodeBufferPool ¶
func NewNodeBufferPool() *NodeBufferPool
NewNodeBufferPool returns a NodeBufferPool.
type NodeDiskUsage ¶
type NodeDiskUsage struct { LeastAvailable DiskUsage `json:"least_available"` MostAvailable DiskUsage `json:"most_available"` NodeName string `json:"node_name"` }
NodeDiskUsage type.
func NewNodeDiskUsage ¶
func NewNodeDiskUsage() *NodeDiskUsage
NewNodeDiskUsage returns a NodeDiskUsage.
type NodeInfo ¶
type NodeInfo struct { Aggregations map[string]NodeInfoAggregation `json:"aggregations,omitempty"` Attributes map[string]string `json:"attributes"` BuildFlavor string `json:"build_flavor"` // BuildHash Short hash of the last git commit in this release. BuildHash string `json:"build_hash"` BuildType string `json:"build_type"` // Host The node’s host name. Host string `json:"host"` Http *NodeInfoHttp `json:"http,omitempty"` Ingest *NodeInfoIngest `json:"ingest,omitempty"` // Ip The node’s IP address. Ip string `json:"ip"` Jvm *NodeJvmInfo `json:"jvm,omitempty"` Modules []PluginStats `json:"modules,omitempty"` // Name The node's name Name string `json:"name"` Network *NodeInfoNetwork `json:"network,omitempty"` Os *NodeOperatingSystemInfo `json:"os,omitempty"` Plugins []PluginStats `json:"plugins,omitempty"` Process *NodeProcessInfo `json:"process,omitempty"` Roles []noderole.NodeRole `json:"roles"` Settings *NodeInfoSettings `json:"settings,omitempty"` ThreadPool map[string]NodeThreadPoolInfo `json:"thread_pool,omitempty"` // TotalIndexingBuffer Total heap allowed to be used to hold recently indexed documents before they // must be written to disk. This size is a shared pool across all shards on this // node, and is controlled by Indexing Buffer settings. TotalIndexingBuffer *int64 `json:"total_indexing_buffer,omitempty"` // TotalIndexingBufferInBytes Same as total_indexing_buffer, but expressed in bytes. TotalIndexingBufferInBytes ByteSize `json:"total_indexing_buffer_in_bytes,omitempty"` Transport *NodeInfoTransport `json:"transport,omitempty"` // TransportAddress Host and port where transport HTTP connections are accepted. TransportAddress string `json:"transport_address"` // Version Elasticsearch version running on this node. Version string `json:"version"` }
NodeInfo type.
type NodeInfoAction ¶
type NodeInfoAction struct {
DestructiveRequiresName string `json:"destructive_requires_name"`
}
NodeInfoAction type.
func NewNodeInfoAction ¶
func NewNodeInfoAction() *NodeInfoAction
NewNodeInfoAction returns a NodeInfoAction.
type NodeInfoAggregation ¶
type NodeInfoAggregation struct {
Types []string `json:"types"`
}
NodeInfoAggregation type.
func NewNodeInfoAggregation ¶
func NewNodeInfoAggregation() *NodeInfoAggregation
NewNodeInfoAggregation returns a NodeInfoAggregation.
type NodeInfoBootstrap ¶
type NodeInfoBootstrap struct {
MemoryLock string `json:"memory_lock"`
}
NodeInfoBootstrap type.
func NewNodeInfoBootstrap ¶
func NewNodeInfoBootstrap() *NodeInfoBootstrap
NewNodeInfoBootstrap returns a NodeInfoBootstrap.
type NodeInfoClient ¶
type NodeInfoClient struct {
Type string `json:"type"`
}
NodeInfoClient type.
func NewNodeInfoClient ¶
func NewNodeInfoClient() *NodeInfoClient
NewNodeInfoClient returns a NodeInfoClient.
type NodeInfoDiscover ¶
type NodeInfoDiscover struct {
SeedHosts string `json:"seed_hosts"`
}
NodeInfoDiscover type.
func NewNodeInfoDiscover ¶
func NewNodeInfoDiscover() *NodeInfoDiscover
NewNodeInfoDiscover returns a NodeInfoDiscover.
type NodeInfoHttp ¶
type NodeInfoHttp struct { BoundAddress []string `json:"bound_address"` MaxContentLength ByteSize `json:"max_content_length,omitempty"` MaxContentLengthInBytes int64 `json:"max_content_length_in_bytes"` PublishAddress string `json:"publish_address"` }
NodeInfoHttp type.
func NewNodeInfoHttp ¶
func NewNodeInfoHttp() *NodeInfoHttp
NewNodeInfoHttp returns a NodeInfoHttp.
type NodeInfoIngest ¶
type NodeInfoIngest struct {
Processors []NodeInfoIngestProcessor `json:"processors"`
}
NodeInfoIngest type.
func NewNodeInfoIngest ¶
func NewNodeInfoIngest() *NodeInfoIngest
NewNodeInfoIngest returns a NodeInfoIngest.
type NodeInfoIngestDownloader ¶
type NodeInfoIngestDownloader struct {
Enabled string `json:"enabled"`
}
NodeInfoIngestDownloader type.
func NewNodeInfoIngestDownloader ¶
func NewNodeInfoIngestDownloader() *NodeInfoIngestDownloader
NewNodeInfoIngestDownloader returns a NodeInfoIngestDownloader.
type NodeInfoIngestInfo ¶
type NodeInfoIngestInfo struct {
Downloader NodeInfoIngestDownloader `json:"downloader"`
}
NodeInfoIngestInfo type.
func NewNodeInfoIngestInfo ¶
func NewNodeInfoIngestInfo() *NodeInfoIngestInfo
NewNodeInfoIngestInfo returns a NodeInfoIngestInfo.
type NodeInfoIngestProcessor ¶
type NodeInfoIngestProcessor struct {
Type string `json:"type"`
}
NodeInfoIngestProcessor type.
func NewNodeInfoIngestProcessor ¶
func NewNodeInfoIngestProcessor() *NodeInfoIngestProcessor
NewNodeInfoIngestProcessor returns a NodeInfoIngestProcessor.
type NodeInfoJvmMemory ¶
type NodeInfoJvmMemory struct { DirectMax ByteSize `json:"direct_max,omitempty"` DirectMaxInBytes int64 `json:"direct_max_in_bytes"` HeapInit ByteSize `json:"heap_init,omitempty"` HeapInitInBytes int64 `json:"heap_init_in_bytes"` HeapMax ByteSize `json:"heap_max,omitempty"` HeapMaxInBytes int64 `json:"heap_max_in_bytes"` NonHeapInit ByteSize `json:"non_heap_init,omitempty"` NonHeapInitInBytes int64 `json:"non_heap_init_in_bytes"` NonHeapMax ByteSize `json:"non_heap_max,omitempty"` NonHeapMaxInBytes int64 `json:"non_heap_max_in_bytes"` }
NodeInfoJvmMemory type.
func NewNodeInfoJvmMemory ¶
func NewNodeInfoJvmMemory() *NodeInfoJvmMemory
NewNodeInfoJvmMemory returns a NodeInfoJvmMemory.
type NodeInfoMemory ¶
type NodeInfoMemory struct { Total string `json:"total"` TotalInBytes int64 `json:"total_in_bytes"` }
NodeInfoMemory type.
func NewNodeInfoMemory ¶
func NewNodeInfoMemory() *NodeInfoMemory
NewNodeInfoMemory returns a NodeInfoMemory.
type NodeInfoNetwork ¶
type NodeInfoNetwork struct { PrimaryInterface NodeInfoNetworkInterface `json:"primary_interface"` RefreshInterval int `json:"refresh_interval"` }
NodeInfoNetwork type.
func NewNodeInfoNetwork ¶
func NewNodeInfoNetwork() *NodeInfoNetwork
NewNodeInfoNetwork returns a NodeInfoNetwork.
type NodeInfoNetworkInterface ¶
type NodeInfoNetworkInterface struct { Address string `json:"address"` MacAddress string `json:"mac_address"` Name string `json:"name"` }
NodeInfoNetworkInterface type.
func NewNodeInfoNetworkInterface ¶
func NewNodeInfoNetworkInterface() *NodeInfoNetworkInterface
NewNodeInfoNetworkInterface returns a NodeInfoNetworkInterface.
type NodeInfoOSCPU ¶
type NodeInfoOSCPU struct { CacheSize string `json:"cache_size"` CacheSizeInBytes int `json:"cache_size_in_bytes"` CoresPerSocket int `json:"cores_per_socket"` Mhz int `json:"mhz"` Model string `json:"model"` TotalCores int `json:"total_cores"` TotalSockets int `json:"total_sockets"` Vendor string `json:"vendor"` }
NodeInfoOSCPU type.
func NewNodeInfoOSCPU ¶
func NewNodeInfoOSCPU() *NodeInfoOSCPU
NewNodeInfoOSCPU returns a NodeInfoOSCPU.
type NodeInfoPath ¶
type NodeInfoPath struct { Data []string `json:"data,omitempty"` Home string `json:"home"` Logs string `json:"logs"` Repo []string `json:"repo"` }
NodeInfoPath type.
func NewNodeInfoPath ¶
func NewNodeInfoPath() *NodeInfoPath
NewNodeInfoPath returns a NodeInfoPath.
type NodeInfoRepositories ¶
type NodeInfoRepositories struct {
Url NodeInfoRepositoriesUrl `json:"url"`
}
NodeInfoRepositories type.
func NewNodeInfoRepositories ¶
func NewNodeInfoRepositories() *NodeInfoRepositories
NewNodeInfoRepositories returns a NodeInfoRepositories.
type NodeInfoRepositoriesUrl ¶
type NodeInfoRepositoriesUrl struct {
AllowedUrls string `json:"allowed_urls"`
}
NodeInfoRepositoriesUrl type.
func NewNodeInfoRepositoriesUrl ¶
func NewNodeInfoRepositoriesUrl() *NodeInfoRepositoriesUrl
NewNodeInfoRepositoriesUrl returns a NodeInfoRepositoriesUrl.
type NodeInfoScript ¶
type NodeInfoScript struct { AllowedTypes string `json:"allowed_types"` DisableMaxCompilationsRate string `json:"disable_max_compilations_rate"` }
NodeInfoScript type.
func NewNodeInfoScript ¶
func NewNodeInfoScript() *NodeInfoScript
NewNodeInfoScript returns a NodeInfoScript.
type NodeInfoSearch ¶
type NodeInfoSearch struct {
Remote NodeInfoSearchRemote `json:"remote"`
}
NodeInfoSearch type.
func NewNodeInfoSearch ¶
func NewNodeInfoSearch() *NodeInfoSearch
NewNodeInfoSearch returns a NodeInfoSearch.
type NodeInfoSearchRemote ¶
type NodeInfoSearchRemote struct {
Connect string `json:"connect"`
}
NodeInfoSearchRemote type.
func NewNodeInfoSearchRemote ¶
func NewNodeInfoSearchRemote() *NodeInfoSearchRemote
NewNodeInfoSearchRemote returns a NodeInfoSearchRemote.
type NodeInfoSettings ¶
type NodeInfoSettings struct { Action *NodeInfoAction `json:"action,omitempty"` Bootstrap *NodeInfoBootstrap `json:"bootstrap,omitempty"` Client NodeInfoClient `json:"client"` Cluster NodeInfoSettingsCluster `json:"cluster"` Discovery *NodeInfoDiscover `json:"discovery,omitempty"` Http NodeInfoSettingsHttp `json:"http"` Ingest *NodeInfoSettingsIngest `json:"ingest,omitempty"` Network *NodeInfoSettingsNetwork `json:"network,omitempty"` Node NodeInfoSettingsNode `json:"node"` Path NodeInfoPath `json:"path"` Repositories *NodeInfoRepositories `json:"repositories,omitempty"` Script *NodeInfoScript `json:"script,omitempty"` Search *NodeInfoSearch `json:"search,omitempty"` Transport NodeInfoSettingsTransport `json:"transport"` Xpack *NodeInfoXpack `json:"xpack,omitempty"` }
NodeInfoSettings type.
func NewNodeInfoSettings ¶
func NewNodeInfoSettings() *NodeInfoSettings
NewNodeInfoSettings returns a NodeInfoSettings.
type NodeInfoSettingsCluster ¶
type NodeInfoSettingsCluster struct { DeprecationIndexing *DeprecationIndexing `json:"deprecation_indexing,omitempty"` Election NodeInfoSettingsClusterElection `json:"election"` InitialMasterNodes *string `json:"initial_master_nodes,omitempty"` Name string `json:"name"` Routing *IndexRouting `json:"routing,omitempty"` }
NodeInfoSettingsCluster type.
func NewNodeInfoSettingsCluster ¶
func NewNodeInfoSettingsCluster() *NodeInfoSettingsCluster
NewNodeInfoSettingsCluster returns a NodeInfoSettingsCluster.
type NodeInfoSettingsClusterElection ¶
type NodeInfoSettingsClusterElection struct {
Strategy string `json:"strategy"`
}
NodeInfoSettingsClusterElection type.
func NewNodeInfoSettingsClusterElection ¶
func NewNodeInfoSettingsClusterElection() *NodeInfoSettingsClusterElection
NewNodeInfoSettingsClusterElection returns a NodeInfoSettingsClusterElection.
type NodeInfoSettingsHttp ¶
type NodeInfoSettingsHttp struct { Compression string `json:"compression,omitempty"` Port string `json:"port,omitempty"` Type NodeInfoSettingsHttpType `json:"type"` TypeDefault *string `json:"type.default,omitempty"` }
NodeInfoSettingsHttp type.
func NewNodeInfoSettingsHttp ¶
func NewNodeInfoSettingsHttp() *NodeInfoSettingsHttp
NewNodeInfoSettingsHttp returns a NodeInfoSettingsHttp.
type NodeInfoSettingsHttpType ¶
type NodeInfoSettingsHttpType struct {
Default string `json:"default"`
}
NodeInfoSettingsHttpType type.
func NewNodeInfoSettingsHttpType ¶
func NewNodeInfoSettingsHttpType() *NodeInfoSettingsHttpType
NewNodeInfoSettingsHttpType returns a NodeInfoSettingsHttpType.
type NodeInfoSettingsIngest ¶
type NodeInfoSettingsIngest struct { Append *NodeInfoIngestInfo `json:"append,omitempty"` Attachment *NodeInfoIngestInfo `json:"attachment,omitempty"` Bytes *NodeInfoIngestInfo `json:"bytes,omitempty"` Circle *NodeInfoIngestInfo `json:"circle,omitempty"` Convert *NodeInfoIngestInfo `json:"convert,omitempty"` Csv *NodeInfoIngestInfo `json:"csv,omitempty"` Date *NodeInfoIngestInfo `json:"date,omitempty"` DateIndexName *NodeInfoIngestInfo `json:"date_index_name,omitempty"` Dissect *NodeInfoIngestInfo `json:"dissect,omitempty"` DotExpander *NodeInfoIngestInfo `json:"dot_expander,omitempty"` Drop *NodeInfoIngestInfo `json:"drop,omitempty"` Enrich *NodeInfoIngestInfo `json:"enrich,omitempty"` Fail *NodeInfoIngestInfo `json:"fail,omitempty"` Foreach *NodeInfoIngestInfo `json:"foreach,omitempty"` Geoip *NodeInfoIngestInfo `json:"geoip,omitempty"` Grok *NodeInfoIngestInfo `json:"grok,omitempty"` Gsub *NodeInfoIngestInfo `json:"gsub,omitempty"` Inference *NodeInfoIngestInfo `json:"inference,omitempty"` Join *NodeInfoIngestInfo `json:"join,omitempty"` Json *NodeInfoIngestInfo `json:"json,omitempty"` Kv *NodeInfoIngestInfo `json:"kv,omitempty"` Lowercase *NodeInfoIngestInfo `json:"lowercase,omitempty"` Pipeline *NodeInfoIngestInfo `json:"pipeline,omitempty"` Remove *NodeInfoIngestInfo `json:"remove,omitempty"` Rename *NodeInfoIngestInfo `json:"rename,omitempty"` Script *NodeInfoIngestInfo `json:"script,omitempty"` Set *NodeInfoIngestInfo `json:"set,omitempty"` SetSecurityUser *NodeInfoIngestInfo `json:"set_security_user,omitempty"` Sort *NodeInfoIngestInfo `json:"sort,omitempty"` Split *NodeInfoIngestInfo `json:"split,omitempty"` Trim *NodeInfoIngestInfo `json:"trim,omitempty"` Uppercase *NodeInfoIngestInfo `json:"uppercase,omitempty"` Urldecode *NodeInfoIngestInfo `json:"urldecode,omitempty"` UserAgent *NodeInfoIngestInfo `json:"user_agent,omitempty"` }
NodeInfoSettingsIngest type.
func NewNodeInfoSettingsIngest ¶
func NewNodeInfoSettingsIngest() *NodeInfoSettingsIngest
NewNodeInfoSettingsIngest returns a NodeInfoSettingsIngest.
type NodeInfoSettingsNetwork ¶
type NodeInfoSettingsNetwork struct {
Host string `json:"host"`
}
NodeInfoSettingsNetwork type.
func NewNodeInfoSettingsNetwork ¶
func NewNodeInfoSettingsNetwork() *NodeInfoSettingsNetwork
NewNodeInfoSettingsNetwork returns a NodeInfoSettingsNetwork.
type NodeInfoSettingsNode ¶
type NodeInfoSettingsNode struct { Attr map[string]json.RawMessage `json:"attr"` MaxLocalStorageNodes *string `json:"max_local_storage_nodes,omitempty"` Name string `json:"name"` }
NodeInfoSettingsNode type.
func NewNodeInfoSettingsNode ¶
func NewNodeInfoSettingsNode() *NodeInfoSettingsNode
NewNodeInfoSettingsNode returns a NodeInfoSettingsNode.
type NodeInfoSettingsTransport ¶
type NodeInfoSettingsTransport struct { Features *NodeInfoSettingsTransportFeatures `json:"features,omitempty"` Type NodeInfoSettingsTransportType `json:"type"` TypeDefault *string `json:"type.default,omitempty"` }
NodeInfoSettingsTransport type.
func NewNodeInfoSettingsTransport ¶
func NewNodeInfoSettingsTransport() *NodeInfoSettingsTransport
NewNodeInfoSettingsTransport returns a NodeInfoSettingsTransport.
type NodeInfoSettingsTransportFeatures ¶
type NodeInfoSettingsTransportFeatures struct {
XPack string `json:"x-pack"`
}
NodeInfoSettingsTransportFeatures type.
func NewNodeInfoSettingsTransportFeatures ¶
func NewNodeInfoSettingsTransportFeatures() *NodeInfoSettingsTransportFeatures
NewNodeInfoSettingsTransportFeatures returns a NodeInfoSettingsTransportFeatures.
type NodeInfoSettingsTransportType ¶
type NodeInfoSettingsTransportType struct {
Default string `json:"default"`
}
NodeInfoSettingsTransportType type.
func NewNodeInfoSettingsTransportType ¶
func NewNodeInfoSettingsTransportType() *NodeInfoSettingsTransportType
NewNodeInfoSettingsTransportType returns a NodeInfoSettingsTransportType.
type NodeInfoTransport ¶
type NodeInfoTransport struct { BoundAddress []string `json:"bound_address"` Profiles map[string]string `json:"profiles"` PublishAddress string `json:"publish_address"` }
NodeInfoTransport type.
func NewNodeInfoTransport ¶
func NewNodeInfoTransport() *NodeInfoTransport
NewNodeInfoTransport returns a NodeInfoTransport.
type NodeInfoXpack ¶
type NodeInfoXpack struct { License *NodeInfoXpackLicense `json:"license,omitempty"` Notification map[string]json.RawMessage `json:"notification,omitempty"` Security NodeInfoXpackSecurity `json:"security"` }
NodeInfoXpack type.
func NewNodeInfoXpack ¶
func NewNodeInfoXpack() *NodeInfoXpack
NewNodeInfoXpack returns a NodeInfoXpack.
type NodeInfoXpackLicense ¶
type NodeInfoXpackLicense struct {
SelfGenerated NodeInfoXpackLicenseType `json:"self_generated"`
}
NodeInfoXpackLicense type.
func NewNodeInfoXpackLicense ¶
func NewNodeInfoXpackLicense() *NodeInfoXpackLicense
NewNodeInfoXpackLicense returns a NodeInfoXpackLicense.
type NodeInfoXpackLicenseType ¶
type NodeInfoXpackLicenseType struct {
Type string `json:"type"`
}
NodeInfoXpackLicenseType type.
func NewNodeInfoXpackLicenseType ¶
func NewNodeInfoXpackLicenseType() *NodeInfoXpackLicenseType
NewNodeInfoXpackLicenseType returns a NodeInfoXpackLicenseType.
type NodeInfoXpackSecurity ¶
type NodeInfoXpackSecurity struct { Authc *NodeInfoXpackSecurityAuthc `json:"authc,omitempty"` Enabled string `json:"enabled"` Http NodeInfoXpackSecuritySsl `json:"http"` Transport *NodeInfoXpackSecuritySsl `json:"transport,omitempty"` }
NodeInfoXpackSecurity type.
func NewNodeInfoXpackSecurity ¶
func NewNodeInfoXpackSecurity() *NodeInfoXpackSecurity
NewNodeInfoXpackSecurity returns a NodeInfoXpackSecurity.
type NodeInfoXpackSecurityAuthc ¶
type NodeInfoXpackSecurityAuthc struct { Realms NodeInfoXpackSecurityAuthcRealms `json:"realms"` Token NodeInfoXpackSecurityAuthcToken `json:"token"` }
NodeInfoXpackSecurityAuthc type.
func NewNodeInfoXpackSecurityAuthc ¶
func NewNodeInfoXpackSecurityAuthc() *NodeInfoXpackSecurityAuthc
NewNodeInfoXpackSecurityAuthc returns a NodeInfoXpackSecurityAuthc.
type NodeInfoXpackSecurityAuthcRealms ¶
type NodeInfoXpackSecurityAuthcRealms struct { File map[string]NodeInfoXpackSecurityAuthcRealmsStatus `json:"file,omitempty"` Native map[string]NodeInfoXpackSecurityAuthcRealmsStatus `json:"native,omitempty"` Pki map[string]NodeInfoXpackSecurityAuthcRealmsStatus `json:"pki,omitempty"` }
NodeInfoXpackSecurityAuthcRealms type.
func NewNodeInfoXpackSecurityAuthcRealms ¶
func NewNodeInfoXpackSecurityAuthcRealms() *NodeInfoXpackSecurityAuthcRealms
NewNodeInfoXpackSecurityAuthcRealms returns a NodeInfoXpackSecurityAuthcRealms.
type NodeInfoXpackSecurityAuthcRealmsStatus ¶
type NodeInfoXpackSecurityAuthcRealmsStatus struct { Enabled *string `json:"enabled,omitempty"` Order string `json:"order"` }
NodeInfoXpackSecurityAuthcRealmsStatus type.
func NewNodeInfoXpackSecurityAuthcRealmsStatus ¶
func NewNodeInfoXpackSecurityAuthcRealmsStatus() *NodeInfoXpackSecurityAuthcRealmsStatus
NewNodeInfoXpackSecurityAuthcRealmsStatus returns a NodeInfoXpackSecurityAuthcRealmsStatus.
type NodeInfoXpackSecurityAuthcToken ¶
type NodeInfoXpackSecurityAuthcToken struct {
Enabled string `json:"enabled"`
}
NodeInfoXpackSecurityAuthcToken type.
func NewNodeInfoXpackSecurityAuthcToken ¶
func NewNodeInfoXpackSecurityAuthcToken() *NodeInfoXpackSecurityAuthcToken
NewNodeInfoXpackSecurityAuthcToken returns a NodeInfoXpackSecurityAuthcToken.
type NodeInfoXpackSecuritySsl ¶
NodeInfoXpackSecuritySsl type.
func NewNodeInfoXpackSecuritySsl ¶
func NewNodeInfoXpackSecuritySsl() *NodeInfoXpackSecuritySsl
NewNodeInfoXpackSecuritySsl returns a NodeInfoXpackSecuritySsl.
type NodeJvmInfo ¶
type NodeJvmInfo struct { GcCollectors []string `json:"gc_collectors"` InputArguments []string `json:"input_arguments"` Mem NodeInfoJvmMemory `json:"mem"` MemoryPools []string `json:"memory_pools"` Pid int `json:"pid"` StartTimeInMillis int64 `json:"start_time_in_millis"` UsingBundledJdk bool `json:"using_bundled_jdk"` UsingCompressedOrdinaryObjectPointers string `json:"using_compressed_ordinary_object_pointers,omitempty"` Version string `json:"version"` VmName string `json:"vm_name"` VmVendor string `json:"vm_vendor"` VmVersion string `json:"vm_version"` }
NodeJvmInfo type.
type NodeOperatingSystemInfo ¶
type NodeOperatingSystemInfo struct { // AllocatedProcessors The number of processors actually used to calculate thread pool size. This // number can be set with the node.processors setting of a node and defaults to // the number of processors reported by the OS. AllocatedProcessors *int `json:"allocated_processors,omitempty"` // Arch Name of the JVM architecture (ex: amd64, x86) Arch string `json:"arch"` // AvailableProcessors Number of processors available to the Java virtual machine AvailableProcessors int `json:"available_processors"` Cpu *NodeInfoOSCPU `json:"cpu,omitempty"` Mem *NodeInfoMemory `json:"mem,omitempty"` // Name Name of the operating system (ex: Linux, Windows, Mac OS X) Name string `json:"name"` PrettyName string `json:"pretty_name"` // RefreshIntervalInMillis Refresh interval for the OS statistics RefreshIntervalInMillis int64 `json:"refresh_interval_in_millis"` Swap *NodeInfoMemory `json:"swap,omitempty"` // Version Version of the operating system Version string `json:"version"` }
NodeOperatingSystemInfo type.
func NewNodeOperatingSystemInfo ¶
func NewNodeOperatingSystemInfo() *NodeOperatingSystemInfo
NewNodeOperatingSystemInfo returns a NodeOperatingSystemInfo.
type NodePackagingType ¶
type NodePackagingType struct { Count int `json:"count"` Flavor string `json:"flavor"` Type string `json:"type"` }
NodePackagingType type.
func NewNodePackagingType ¶
func NewNodePackagingType() *NodePackagingType
NewNodePackagingType returns a NodePackagingType.
type NodeProcessInfo ¶
type NodeProcessInfo struct { // Id Process identifier (PID) Id int64 `json:"id"` // Mlockall Indicates if the process address space has been successfully locked in memory Mlockall bool `json:"mlockall"` // RefreshIntervalInMillis Refresh interval for the process statistics RefreshIntervalInMillis int64 `json:"refresh_interval_in_millis"` }
NodeProcessInfo type.
func NewNodeProcessInfo ¶
func NewNodeProcessInfo() *NodeProcessInfo
NewNodeProcessInfo returns a NodeProcessInfo.
type NodeReloadError ¶
type NodeReloadError struct { Name string `json:"name"` ReloadException *ErrorCause `json:"reload_exception,omitempty"` }
NodeReloadError type.
func NewNodeReloadError ¶
func NewNodeReloadError() *NodeReloadError
NewNodeReloadError returns a NodeReloadError.
type NodeReloadResult ¶
type NodeReloadResult interface{}
NodeReloadResult holds the union for the following types:
Stats NodeReloadError
type NodeShard ¶
type NodeShard struct { AllocationId map[string]string `json:"allocation_id,omitempty"` Index string `json:"index"` Node *string `json:"node,omitempty"` Primary bool `json:"primary"` RecoverySource map[string]string `json:"recovery_source,omitempty"` RelocatingNode string `json:"relocating_node,omitempty"` RelocationFailureInfo *RelocationFailureInfo `json:"relocation_failure_info,omitempty"` Shard int `json:"shard"` State shardroutingstate.ShardRoutingState `json:"state"` UnassignedInfo *UnassignedInformation `json:"unassigned_info,omitempty"` }
NodeShard type.
type NodeShutdownStatus ¶
type NodeShutdownStatus struct { NodeId string `json:"node_id"` PersistentTasks PersistentTaskStatus `json:"persistent_tasks"` Plugins PluginsStatus `json:"plugins"` Reason string `json:"reason"` ShardMigration ShardMigrationStatus `json:"shard_migration"` ShutdownStartedmillis int64 `json:"shutdown_startedmillis"` Status shutdownstatus.ShutdownStatus `json:"status"` Type shutdowntype.ShutdownType `json:"type"` }
NodeShutdownStatus type.
func NewNodeShutdownStatus ¶
func NewNodeShutdownStatus() *NodeShutdownStatus
NewNodeShutdownStatus returns a NodeShutdownStatus.
type NodeStatistics ¶
type NodeStatistics struct { // Failed Number of nodes that rejected the request or failed to respond. If this value // is not 0, a reason for the rejection or failure is included in the response. Failed int `json:"failed"` Failures []ErrorCause `json:"failures,omitempty"` // Successful Number of nodes that responded successfully to the request. Successful int `json:"successful"` // Total Total number of nodes selected by the request. Total int `json:"total"` }
NodeStatistics type.
func NewNodeStatistics ¶
func NewNodeStatistics() *NodeStatistics
NewNodeStatistics returns a NodeStatistics.
type NodeTasks ¶
type NodeTasks struct { Attributes map[string]string `json:"attributes,omitempty"` Host *string `json:"host,omitempty"` Ip *string `json:"ip,omitempty"` Name *string `json:"name,omitempty"` Roles []string `json:"roles,omitempty"` Tasks map[TaskId]TaskInfo `json:"tasks"` TransportAddress *string `json:"transport_address,omitempty"` }
NodeTasks type.
type NodeThreadPoolInfo ¶
type NodeThreadPoolInfo struct { Core *int `json:"core,omitempty"` KeepAlive Duration `json:"keep_alive,omitempty"` Max *int `json:"max,omitempty"` QueueSize int `json:"queue_size"` Size *int `json:"size,omitempty"` Type string `json:"type"` }
NodeThreadPoolInfo type.
func NewNodeThreadPoolInfo ¶
func NewNodeThreadPoolInfo() *NodeThreadPoolInfo
NewNodeThreadPoolInfo returns a NodeThreadPoolInfo.
type NodeUsage ¶
type NodeUsage struct { Aggregations map[string]json.RawMessage `json:"aggregations"` RestActions map[string]int `json:"rest_actions"` Since int64 `json:"since"` Timestamp int64 `json:"timestamp"` }
NodeUsage type.
type NodesContext ¶
type NodesContext struct { CacheEvictions *int64 `json:"cache_evictions,omitempty"` CompilationLimitTriggered *int64 `json:"compilation_limit_triggered,omitempty"` Compilations *int64 `json:"compilations,omitempty"` Context *string `json:"context,omitempty"` }
NodesContext type.
func NewNodesContext ¶
func NewNodesContext() *NodesContext
NewNodesContext returns a NodesContext.
type NodesCredentials ¶
type NodesCredentials struct { // FileTokens File-backed tokens collected from all nodes FileTokens map[string]NodesCredentialsFileToken `json:"file_tokens"` // NodeStats General status showing how nodes respond to the above collection request NodeStats NodeStatistics `json:"_nodes"` }
NodesCredentials type.
func NewNodesCredentials ¶
func NewNodesCredentials() *NodesCredentials
NewNodesCredentials returns a NodesCredentials.
type NodesCredentialsFileToken ¶
type NodesCredentialsFileToken struct {
Nodes []string `json:"nodes"`
}
NodesCredentialsFileToken type.
func NewNodesCredentialsFileToken ¶
func NewNodesCredentialsFileToken() *NodesCredentialsFileToken
NewNodesCredentialsFileToken returns a NodesCredentialsFileToken.
type NodesIndexingPressure ¶
type NodesIndexingPressure struct {
Memory *NodesIndexingPressureMemory `json:"memory,omitempty"`
}
NodesIndexingPressure type.
func NewNodesIndexingPressure ¶
func NewNodesIndexingPressure() *NodesIndexingPressure
NewNodesIndexingPressure returns a NodesIndexingPressure.
type NodesIndexingPressureMemory ¶
type NodesIndexingPressureMemory struct { Current *PressureMemory `json:"current,omitempty"` Limit ByteSize `json:"limit,omitempty"` LimitInBytes *int64 `json:"limit_in_bytes,omitempty"` Total *PressureMemory `json:"total,omitempty"` }
NodesIndexingPressureMemory type.
func NewNodesIndexingPressureMemory ¶
func NewNodesIndexingPressureMemory() *NodesIndexingPressureMemory
NewNodesIndexingPressureMemory returns a NodesIndexingPressureMemory.
type NodesIngest ¶
type NodesIngest struct { Pipelines map[string]IngestTotal `json:"pipelines,omitempty"` Total *IngestTotal `json:"total,omitempty"` }
NodesIngest type.
type NodesRecord ¶
type NodesRecord struct { // Build es build hash Build *string `json:"build,omitempty"` // BulkAvgSizeInBytes average size in bytes of shard bulk BulkAvgSizeInBytes *string `json:"bulk.avg_size_in_bytes,omitempty"` // BulkAvgTime average time spend in shard bulk BulkAvgTime *string `json:"bulk.avg_time,omitempty"` // BulkTotalOperations number of bulk shard ops BulkTotalOperations *string `json:"bulk.total_operations,omitempty"` // BulkTotalSizeInBytes total size in bytes of shard bulk BulkTotalSizeInBytes *string `json:"bulk.total_size_in_bytes,omitempty"` // BulkTotalTime time spend in shard bulk BulkTotalTime *string `json:"bulk.total_time,omitempty"` // CompletionSize size of completion CompletionSize *string `json:"completion.size,omitempty"` // Cpu recent cpu usage Cpu *string `json:"cpu,omitempty"` // DiskAvail available disk space DiskAvail ByteSize `json:"disk.avail,omitempty"` // DiskTotal total disk space DiskTotal ByteSize `json:"disk.total,omitempty"` // DiskUsed used disk space DiskUsed ByteSize `json:"disk.used,omitempty"` // DiskUsedPercent used disk space percentage DiskUsedPercent Percentage `json:"disk.used_percent,omitempty"` // FielddataEvictions fielddata evictions FielddataEvictions *string `json:"fielddata.evictions,omitempty"` // FielddataMemorySize used fielddata cache FielddataMemorySize *string `json:"fielddata.memory_size,omitempty"` // FileDescCurrent used file descriptors FileDescCurrent *string `json:"file_desc.current,omitempty"` // FileDescMax max file descriptors FileDescMax *string `json:"file_desc.max,omitempty"` // FileDescPercent used file descriptor ratio FileDescPercent Percentage `json:"file_desc.percent,omitempty"` // Flavor es distribution flavor Flavor *string `json:"flavor,omitempty"` // FlushTotal number of flushes FlushTotal *string `json:"flush.total,omitempty"` // FlushTotalTime time spent in flush FlushTotalTime *string `json:"flush.total_time,omitempty"` // GetCurrent number of current get ops GetCurrent *string `json:"get.current,omitempty"` // GetExistsTime time spent in successful gets GetExistsTime *string `json:"get.exists_time,omitempty"` // GetExistsTotal number of successful gets GetExistsTotal *string `json:"get.exists_total,omitempty"` // GetMissingTime time spent in failed gets GetMissingTime *string `json:"get.missing_time,omitempty"` // GetMissingTotal number of failed gets GetMissingTotal *string `json:"get.missing_total,omitempty"` // GetTime time spent in get GetTime *string `json:"get.time,omitempty"` // GetTotal number of get ops GetTotal *string `json:"get.total,omitempty"` // HeapCurrent used heap HeapCurrent *string `json:"heap.current,omitempty"` // HeapMax max configured heap HeapMax *string `json:"heap.max,omitempty"` // HeapPercent used heap ratio HeapPercent Percentage `json:"heap.percent,omitempty"` // HttpAddress bound http address HttpAddress *string `json:"http_address,omitempty"` // Id unique node id Id *string `json:"id,omitempty"` // IndexingDeleteCurrent number of current deletions IndexingDeleteCurrent *string `json:"indexing.delete_current,omitempty"` // IndexingDeleteTime time spent in deletions IndexingDeleteTime *string `json:"indexing.delete_time,omitempty"` // IndexingDeleteTotal number of delete ops IndexingDeleteTotal *string `json:"indexing.delete_total,omitempty"` // IndexingIndexCurrent number of current indexing ops IndexingIndexCurrent *string `json:"indexing.index_current,omitempty"` // IndexingIndexFailed number of failed indexing ops IndexingIndexFailed *string `json:"indexing.index_failed,omitempty"` // IndexingIndexTime time spent in indexing IndexingIndexTime *string `json:"indexing.index_time,omitempty"` // IndexingIndexTotal number of indexing ops IndexingIndexTotal *string `json:"indexing.index_total,omitempty"` // Ip ip address Ip *string `json:"ip,omitempty"` // Jdk jdk version Jdk *string `json:"jdk,omitempty"` // Load15M 15m load avg Load15M *string `json:"load_15m,omitempty"` // Load1M 1m load avg Load1M *string `json:"load_1m,omitempty"` // Load5M 5m load avg Load5M *string `json:"load_5m,omitempty"` // Master *:current master Master *string `json:"master,omitempty"` // MergesCurrent number of current merges MergesCurrent *string `json:"merges.current,omitempty"` // MergesCurrentDocs number of current merging docs MergesCurrentDocs *string `json:"merges.current_docs,omitempty"` // MergesCurrentSize size of current merges MergesCurrentSize *string `json:"merges.current_size,omitempty"` // MergesTotal number of completed merge ops MergesTotal *string `json:"merges.total,omitempty"` // MergesTotalDocs docs merged MergesTotalDocs *string `json:"merges.total_docs,omitempty"` // MergesTotalSize size merged MergesTotalSize *string `json:"merges.total_size,omitempty"` // MergesTotalTime time spent in merges MergesTotalTime *string `json:"merges.total_time,omitempty"` // Name node name Name *string `json:"name,omitempty"` // NodeRole m:master eligible node, d:data node, i:ingest node, -:coordinating node only NodeRole *string `json:"node.role,omitempty"` // Pid process id Pid *string `json:"pid,omitempty"` // Port bound transport port Port *string `json:"port,omitempty"` // QueryCacheEvictions query cache evictions QueryCacheEvictions *string `json:"query_cache.evictions,omitempty"` // QueryCacheHitCount query cache hit counts QueryCacheHitCount *string `json:"query_cache.hit_count,omitempty"` // QueryCacheMemorySize used query cache QueryCacheMemorySize *string `json:"query_cache.memory_size,omitempty"` // QueryCacheMissCount query cache miss counts QueryCacheMissCount *string `json:"query_cache.miss_count,omitempty"` // RamCurrent used machine memory RamCurrent *string `json:"ram.current,omitempty"` // RamMax total machine memory RamMax *string `json:"ram.max,omitempty"` // RamPercent used machine memory ratio RamPercent Percentage `json:"ram.percent,omitempty"` // RefreshExternalTime time spent in external refreshes RefreshExternalTime *string `json:"refresh.external_time,omitempty"` // RefreshExternalTotal total external refreshes RefreshExternalTotal *string `json:"refresh.external_total,omitempty"` // RefreshListeners number of pending refresh listeners RefreshListeners *string `json:"refresh.listeners,omitempty"` // RefreshTime time spent in refreshes RefreshTime *string `json:"refresh.time,omitempty"` // RefreshTotal total refreshes RefreshTotal *string `json:"refresh.total,omitempty"` // RequestCacheEvictions request cache evictions RequestCacheEvictions *string `json:"request_cache.evictions,omitempty"` // RequestCacheHitCount request cache hit counts RequestCacheHitCount *string `json:"request_cache.hit_count,omitempty"` // RequestCacheMemorySize used request cache RequestCacheMemorySize *string `json:"request_cache.memory_size,omitempty"` // RequestCacheMissCount request cache miss counts RequestCacheMissCount *string `json:"request_cache.miss_count,omitempty"` // ScriptCacheEvictions script cache evictions ScriptCacheEvictions *string `json:"script.cache_evictions,omitempty"` // ScriptCompilationLimitTriggered script cache compilation limit triggered ScriptCompilationLimitTriggered *string `json:"script.compilation_limit_triggered,omitempty"` // ScriptCompilations script compilations ScriptCompilations *string `json:"script.compilations,omitempty"` // SearchFetchCurrent current fetch phase ops SearchFetchCurrent *string `json:"search.fetch_current,omitempty"` // SearchFetchTime time spent in fetch phase SearchFetchTime *string `json:"search.fetch_time,omitempty"` // SearchFetchTotal total fetch ops SearchFetchTotal *string `json:"search.fetch_total,omitempty"` // SearchOpenContexts open search contexts SearchOpenContexts *string `json:"search.open_contexts,omitempty"` // SearchQueryCurrent current query phase ops SearchQueryCurrent *string `json:"search.query_current,omitempty"` // SearchQueryTime time spent in query phase SearchQueryTime *string `json:"search.query_time,omitempty"` // SearchQueryTotal total query phase ops SearchQueryTotal *string `json:"search.query_total,omitempty"` // SearchScrollCurrent open scroll contexts SearchScrollCurrent *string `json:"search.scroll_current,omitempty"` // SearchScrollTime time scroll contexts held open SearchScrollTime *string `json:"search.scroll_time,omitempty"` // SearchScrollTotal completed scroll contexts SearchScrollTotal *string `json:"search.scroll_total,omitempty"` // SegmentsCount number of segments SegmentsCount *string `json:"segments.count,omitempty"` // SegmentsFixedBitsetMemory memory used by fixed bit sets for nested object field types and export type // filters for types referred in _parent fields SegmentsFixedBitsetMemory *string `json:"segments.fixed_bitset_memory,omitempty"` // SegmentsIndexWriterMemory memory used by index writer SegmentsIndexWriterMemory *string `json:"segments.index_writer_memory,omitempty"` // SegmentsMemory memory used by segments SegmentsMemory *string `json:"segments.memory,omitempty"` // SegmentsVersionMapMemory memory used by version map SegmentsVersionMapMemory *string `json:"segments.version_map_memory,omitempty"` // SuggestCurrent number of current suggest ops SuggestCurrent *string `json:"suggest.current,omitempty"` // SuggestTime time spend in suggest SuggestTime *string `json:"suggest.time,omitempty"` // SuggestTotal number of suggest ops SuggestTotal *string `json:"suggest.total,omitempty"` // Type es distribution type Type *string `json:"type,omitempty"` // Uptime node uptime Uptime *string `json:"uptime,omitempty"` // Version es version Version *string `json:"version,omitempty"` }
NodesRecord type.
type NoriAnalyzer ¶
type NoriAnalyzer struct { DecompoundMode *noridecompoundmode.NoriDecompoundMode `json:"decompound_mode,omitempty"` Type string `json:"type,omitempty"` UserDictionary *string `json:"user_dictionary,omitempty"` Version *string `json:"version,omitempty"` }
NoriAnalyzer type.
func NewNoriAnalyzer ¶
func NewNoriAnalyzer() *NoriAnalyzer
NewNoriAnalyzer returns a NoriAnalyzer.
type NoriPartOfSpeechTokenFilter ¶
type NoriPartOfSpeechTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
NoriPartOfSpeechTokenFilter type.
func NewNoriPartOfSpeechTokenFilter ¶
func NewNoriPartOfSpeechTokenFilter() *NoriPartOfSpeechTokenFilter
NewNoriPartOfSpeechTokenFilter returns a NoriPartOfSpeechTokenFilter.
type NoriTokenizer ¶
type NoriTokenizer struct { DecompoundMode *noridecompoundmode.NoriDecompoundMode `json:"decompound_mode,omitempty"` DiscardPunctuation *bool `json:"discard_punctuation,omitempty"` Type string `json:"type,omitempty"` UserDictionary *string `json:"user_dictionary,omitempty"` UserDictionaryRules []string `json:"user_dictionary_rules,omitempty"` Version *string `json:"version,omitempty"` }
NoriTokenizer type.
func NewNoriTokenizer ¶
func NewNoriTokenizer() *NoriTokenizer
NewNoriTokenizer returns a NoriTokenizer.
type NormalizeAggregation ¶
type NormalizeAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Method *normalizemethod.NormalizeMethod `json:"method,omitempty"` Name *string `json:"name,omitempty"` }
NormalizeAggregation type.
func NewNormalizeAggregation ¶
func NewNormalizeAggregation() *NormalizeAggregation
NewNormalizeAggregation returns a NormalizeAggregation.
func (*NormalizeAggregation) UnmarshalJSON ¶
func (s *NormalizeAggregation) UnmarshalJSON(data []byte) error
type Normalizer ¶
type Normalizer interface{}
Normalizer holds the union for the following types:
LowercaseNormalizer CustomNormalizer
type NumberRangeQuery ¶
type NumberRangeQuery struct { Boost *float32 `json:"boost,omitempty"` From Float64 `json:"from,omitempty"` Gt *Float64 `json:"gt,omitempty"` Gte *Float64 `json:"gte,omitempty"` Lt *Float64 `json:"lt,omitempty"` Lte *Float64 `json:"lte,omitempty"` QueryName_ *string `json:"_name,omitempty"` Relation *rangerelation.RangeRelation `json:"relation,omitempty"` To Float64 `json:"to,omitempty"` }
NumberRangeQuery type.
func NewNumberRangeQuery ¶
func NewNumberRangeQuery() *NumberRangeQuery
NewNumberRangeQuery returns a NumberRangeQuery.
type NumericDecayFunction ¶
type NumericDecayFunction struct { MultiValueMode *multivaluemode.MultiValueMode `json:"multi_value_mode,omitempty"` NumericDecayFunction map[string]DecayPlacementdoubledouble `json:"-"` }
NumericDecayFunction type.
func NewNumericDecayFunction ¶
func NewNumericDecayFunction() *NumericDecayFunction
NewNumericDecayFunction returns a NumericDecayFunction.
func (NumericDecayFunction) MarshalJSON ¶
func (s NumericDecayFunction) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type NumericFielddata ¶
type NumericFielddata struct {
Format numericfielddataformat.NumericFielddataFormat `json:"format"`
}
NumericFielddata type.
func NewNumericFielddata ¶
func NewNumericFielddata() *NumericFielddata
NewNumericFielddata returns a NumericFielddata.
type ObjectProperty ¶
type ObjectProperty struct { CopyTo []string `json:"copy_to,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Enabled *bool `json:"enabled,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
ObjectProperty type.
func NewObjectProperty ¶
func NewObjectProperty() *ObjectProperty
NewObjectProperty returns a ObjectProperty.
func (*ObjectProperty) UnmarshalJSON ¶
func (s *ObjectProperty) UnmarshalJSON(data []byte) error
type OneHotEncodingPreprocessor ¶
type OneHotEncodingPreprocessor struct { Field string `json:"field"` HotMap map[string]string `json:"hot_map"` }
OneHotEncodingPreprocessor type.
func NewOneHotEncodingPreprocessor ¶
func NewOneHotEncodingPreprocessor() *OneHotEncodingPreprocessor
NewOneHotEncodingPreprocessor returns a OneHotEncodingPreprocessor.
type OperatingSystem ¶
type OperatingSystem struct { Cgroup *Cgroup `json:"cgroup,omitempty"` Cpu *Cpu `json:"cpu,omitempty"` Mem *ExtendedMemoryStats `json:"mem,omitempty"` Swap *MemoryStats `json:"swap,omitempty"` Timestamp *int64 `json:"timestamp,omitempty"` }
OperatingSystem type.
func NewOperatingSystem ¶
func NewOperatingSystem() *OperatingSystem
NewOperatingSystem returns a OperatingSystem.
type OperatingSystemMemoryInfo ¶
type OperatingSystemMemoryInfo struct { AdjustedTotalInBytes *int64 `json:"adjusted_total_in_bytes,omitempty"` FreeInBytes int64 `json:"free_in_bytes"` FreePercent int `json:"free_percent"` TotalInBytes int64 `json:"total_in_bytes"` UsedInBytes int64 `json:"used_in_bytes"` UsedPercent int `json:"used_percent"` }
OperatingSystemMemoryInfo type.
func NewOperatingSystemMemoryInfo ¶
func NewOperatingSystemMemoryInfo() *OperatingSystemMemoryInfo
NewOperatingSystemMemoryInfo returns a OperatingSystemMemoryInfo.
type OutlierDetectionParameters ¶
type OutlierDetectionParameters struct { ComputeFeatureInfluence *bool `json:"compute_feature_influence,omitempty"` FeatureInfluenceThreshold *Float64 `json:"feature_influence_threshold,omitempty"` Method *string `json:"method,omitempty"` NNeighbors *int `json:"n_neighbors,omitempty"` OutlierFraction *Float64 `json:"outlier_fraction,omitempty"` StandardizationEnabled *bool `json:"standardization_enabled,omitempty"` }
OutlierDetectionParameters type.
func NewOutlierDetectionParameters ¶
func NewOutlierDetectionParameters() *OutlierDetectionParameters
NewOutlierDetectionParameters returns a OutlierDetectionParameters.
type OverallBucket ¶
type OverallBucket struct { // BucketSpan The length of the bucket in seconds. Matches the job with the longest // bucket_span value. BucketSpan int64 `json:"bucket_span"` // IsInterim If true, this is an interim result. In other words, the results are // calculated based on partial input data. IsInterim bool `json:"is_interim"` // Jobs An array of objects that contain the max_anomaly_score per job_id. Jobs []OverallBucketJob `json:"jobs"` // OverallScore The top_n average of the maximum bucket anomaly_score per job. OverallScore Float64 `json:"overall_score"` // ResultType Internal. This is always set to overall_bucket. ResultType string `json:"result_type"` // Timestamp The start time of the bucket for which these results were calculated. Timestamp int64 `json:"timestamp"` // TimestampString The start time of the bucket for which these results were calculated. TimestampString DateTime `json:"timestamp_string"` }
OverallBucket type.
func NewOverallBucket ¶
func NewOverallBucket() *OverallBucket
NewOverallBucket returns a OverallBucket.
type OverallBucketJob ¶
type OverallBucketJob struct { JobId string `json:"job_id"` MaxAnomalyScore Float64 `json:"max_anomaly_score"` }
OverallBucketJob type.
func NewOverallBucketJob ¶
func NewOverallBucketJob() *OverallBucketJob
NewOverallBucketJob returns a OverallBucketJob.
type Overlapping ¶
type Overlapping struct { IndexPatterns []string `json:"index_patterns"` Name string `json:"name"` }
Overlapping type.
type Page ¶
type Page struct { // From Skips the specified number of items. From *int `json:"from,omitempty"` // Size Specifies the maximum number of items to obtain. Size *int `json:"size,omitempty"` }
Page type.
type PagerDutyAction ¶
type PagerDutyAction struct { Account *string `json:"account,omitempty"` AttachPayload bool `json:"attach_payload"` Client *string `json:"client,omitempty"` ClientUrl *string `json:"client_url,omitempty"` Contexts []PagerDutyContext `json:"contexts,omitempty"` Description string `json:"description"` EventType *pagerdutyeventtype.PagerDutyEventType `json:"event_type,omitempty"` IncidentKey string `json:"incident_key"` Proxy *PagerDutyEventProxy `json:"proxy,omitempty"` }
PagerDutyAction type.
func NewPagerDutyAction ¶
func NewPagerDutyAction() *PagerDutyAction
NewPagerDutyAction returns a PagerDutyAction.
type PagerDutyContext ¶
type PagerDutyContext struct { Href *string `json:"href,omitempty"` Src *string `json:"src,omitempty"` Type pagerdutycontexttype.PagerDutyContextType `json:"type"` }
PagerDutyContext type.
func NewPagerDutyContext ¶
func NewPagerDutyContext() *PagerDutyContext
NewPagerDutyContext returns a PagerDutyContext.
type PagerDutyEvent ¶
type PagerDutyEvent struct { Account *string `json:"account,omitempty"` AttachPayload bool `json:"attach_payload"` Client *string `json:"client,omitempty"` ClientUrl *string `json:"client_url,omitempty"` Contexts []PagerDutyContext `json:"contexts,omitempty"` Description string `json:"description"` EventType *pagerdutyeventtype.PagerDutyEventType `json:"event_type,omitempty"` IncidentKey string `json:"incident_key"` Proxy *PagerDutyEventProxy `json:"proxy,omitempty"` }
PagerDutyEvent type.
func NewPagerDutyEvent ¶
func NewPagerDutyEvent() *PagerDutyEvent
NewPagerDutyEvent returns a PagerDutyEvent.
type PagerDutyEventProxy ¶
type PagerDutyEventProxy struct { Host *string `json:"host,omitempty"` Port *int `json:"port,omitempty"` }
PagerDutyEventProxy type.
func NewPagerDutyEventProxy ¶
func NewPagerDutyEventProxy() *PagerDutyEventProxy
NewPagerDutyEventProxy returns a PagerDutyEventProxy.
type PagerDutyResult ¶
type PagerDutyResult struct { Event PagerDutyEvent `json:"event"` Reason *string `json:"reason,omitempty"` Request *HttpInputRequestResult `json:"request,omitempty"` Response *HttpInputResponseResult `json:"response,omitempty"` }
PagerDutyResult type.
func NewPagerDutyResult ¶
func NewPagerDutyResult() *PagerDutyResult
NewPagerDutyResult returns a PagerDutyResult.
type PainlessContextSetup ¶
type PainlessContextSetup struct { Document json.RawMessage `json:"document,omitempty"` Index string `json:"index"` Query Query `json:"query"` }
PainlessContextSetup type.
func NewPainlessContextSetup ¶
func NewPainlessContextSetup() *PainlessContextSetup
NewPainlessContextSetup returns a PainlessContextSetup.
type ParentAggregate ¶
type ParentAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
ParentAggregate type.
func NewParentAggregate ¶
func NewParentAggregate() *ParentAggregate
NewParentAggregate returns a ParentAggregate.
func (ParentAggregate) MarshalJSON ¶
func (s ParentAggregate) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*ParentAggregate) UnmarshalJSON ¶
func (s *ParentAggregate) UnmarshalJSON(data []byte) error
type ParentAggregation ¶
type ParentAggregation struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Type *string `json:"type,omitempty"` }
ParentAggregation type.
func NewParentAggregation ¶
func NewParentAggregation() *ParentAggregation
NewParentAggregation returns a ParentAggregation.
type ParentIdQuery ¶
type ParentIdQuery struct { Boost *float32 `json:"boost,omitempty"` Id *string `json:"id,omitempty"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` QueryName_ *string `json:"_name,omitempty"` Type *string `json:"type,omitempty"` }
ParentIdQuery type.
func NewParentIdQuery ¶
func NewParentIdQuery() *ParentIdQuery
NewParentIdQuery returns a ParentIdQuery.
type ParentTaskInfo ¶
type ParentTaskInfo struct { Action string `json:"action"` Cancellable bool `json:"cancellable"` Cancelled *bool `json:"cancelled,omitempty"` Children []TaskInfo `json:"children,omitempty"` Description *string `json:"description,omitempty"` Headers map[string]string `json:"headers"` Id int64 `json:"id"` Node string `json:"node"` ParentTaskId TaskId `json:"parent_task_id,omitempty"` RunningTime Duration `json:"running_time,omitempty"` RunningTimeInNanos int64 `json:"running_time_in_nanos"` StartTimeInMillis int64 `json:"start_time_in_millis"` Status *TaskStatus `json:"status,omitempty"` Type string `json:"type"` }
ParentTaskInfo type.
func NewParentTaskInfo ¶
func NewParentTaskInfo() *ParentTaskInfo
NewParentTaskInfo returns a ParentTaskInfo.
type PassThroughInferenceOptions ¶
type PassThroughInferenceOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` // Tokenization The tokenization options Tokenization *TokenizationConfigContainer `json:"tokenization,omitempty"` Vocabulary *Vocabulary `json:"vocabulary,omitempty"` }
PassThroughInferenceOptions type.
func NewPassThroughInferenceOptions ¶
func NewPassThroughInferenceOptions() *PassThroughInferenceOptions
NewPassThroughInferenceOptions returns a PassThroughInferenceOptions.
type PassThroughInferenceUpdateOptions ¶
type PassThroughInferenceUpdateOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` // Tokenization The tokenization options to update when inferring Tokenization *NlpTokenizationUpdateOptions `json:"tokenization,omitempty"` }
PassThroughInferenceUpdateOptions type.
func NewPassThroughInferenceUpdateOptions ¶
func NewPassThroughInferenceUpdateOptions() *PassThroughInferenceUpdateOptions
NewPassThroughInferenceUpdateOptions returns a PassThroughInferenceUpdateOptions.
type PathHierarchyTokenizer ¶
type PathHierarchyTokenizer struct { BufferSize int `json:"buffer_size"` Delimiter string `json:"delimiter"` Replacement string `json:"replacement"` Reverse bool `json:"reverse"` Skip int `json:"skip"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
PathHierarchyTokenizer type.
func NewPathHierarchyTokenizer ¶
func NewPathHierarchyTokenizer() *PathHierarchyTokenizer
NewPathHierarchyTokenizer returns a PathHierarchyTokenizer.
type PatternAnalyzer ¶
type PatternAnalyzer struct { Flags *string `json:"flags,omitempty"` Lowercase *bool `json:"lowercase,omitempty"` Pattern string `json:"pattern"` Stopwords []string `json:"stopwords,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
PatternAnalyzer type.
func NewPatternAnalyzer ¶
func NewPatternAnalyzer() *PatternAnalyzer
NewPatternAnalyzer returns a PatternAnalyzer.
type PatternCaptureTokenFilter ¶
type PatternCaptureTokenFilter struct { Patterns []string `json:"patterns"` PreserveOriginal *bool `json:"preserve_original,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
PatternCaptureTokenFilter type.
func NewPatternCaptureTokenFilter ¶
func NewPatternCaptureTokenFilter() *PatternCaptureTokenFilter
NewPatternCaptureTokenFilter returns a PatternCaptureTokenFilter.
type PatternReplaceCharFilter ¶
type PatternReplaceCharFilter struct { Flags *string `json:"flags,omitempty"` Pattern string `json:"pattern"` Replacement *string `json:"replacement,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
PatternReplaceCharFilter type.
func NewPatternReplaceCharFilter ¶
func NewPatternReplaceCharFilter() *PatternReplaceCharFilter
NewPatternReplaceCharFilter returns a PatternReplaceCharFilter.
type PatternReplaceTokenFilter ¶
type PatternReplaceTokenFilter struct { All *bool `json:"all,omitempty"` Flags *string `json:"flags,omitempty"` Pattern string `json:"pattern"` Replacement *string `json:"replacement,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
PatternReplaceTokenFilter type.
func NewPatternReplaceTokenFilter ¶
func NewPatternReplaceTokenFilter() *PatternReplaceTokenFilter
NewPatternReplaceTokenFilter returns a PatternReplaceTokenFilter.
type PatternTokenizer ¶
type PatternTokenizer struct { Flags string `json:"flags"` Group int `json:"group"` Pattern string `json:"pattern"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
PatternTokenizer type.
func NewPatternTokenizer ¶
func NewPatternTokenizer() *PatternTokenizer
NewPatternTokenizer returns a PatternTokenizer.
type PendingTask ¶
type PendingTask struct { Executing bool `json:"executing"` InsertOrder int `json:"insert_order"` Priority string `json:"priority"` Source string `json:"source"` TimeInQueue Duration `json:"time_in_queue,omitempty"` TimeInQueueMillis int64 `json:"time_in_queue_millis"` }
PendingTask type.
type PendingTasksRecord ¶
type PendingTasksRecord struct { // InsertOrder task insertion order InsertOrder *string `json:"insertOrder,omitempty"` // Priority task priority Priority *string `json:"priority,omitempty"` // Source task source Source *string `json:"source,omitempty"` // TimeInQueue how long task has been in queue TimeInQueue *string `json:"timeInQueue,omitempty"` }
PendingTasksRecord type.
func NewPendingTasksRecord ¶
func NewPendingTasksRecord() *PendingTasksRecord
NewPendingTasksRecord returns a PendingTasksRecord.
type PerPartitionCategorization ¶
type PerPartitionCategorization struct { // Enabled To enable this setting, you must also set the `partition_field_name` property // to the same value in every detector that uses the keyword `mlcategory`. // Otherwise, job creation fails. Enabled *bool `json:"enabled,omitempty"` // StopOnWarn This setting can be set to true only if per-partition categorization is // enabled. If true, both categorization and subsequent anomaly detection stops // for partitions where the categorization status changes to warn. This setting // makes it viable to have a job where it is expected that categorization works // well for some partitions but not others; you do not pay the cost of bad // categorization forever in the partitions where it works badly. StopOnWarn *bool `json:"stop_on_warn,omitempty"` }
PerPartitionCategorization type.
func NewPerPartitionCategorization ¶
func NewPerPartitionCategorization() *PerPartitionCategorization
NewPerPartitionCategorization returns a PerPartitionCategorization.
type Percentage ¶
type Percentage interface{}
Percentage holds the union for the following types:
string float32
type PercentageScoreHeuristic ¶
type PercentageScoreHeuristic struct { }
PercentageScoreHeuristic type.
func NewPercentageScoreHeuristic ¶
func NewPercentageScoreHeuristic() *PercentageScoreHeuristic
NewPercentageScoreHeuristic returns a PercentageScoreHeuristic.
type PercentileRanksAggregation ¶
type PercentileRanksAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Hdr *HdrMethod `json:"hdr,omitempty"` Keyed *bool `json:"keyed,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` Tdigest *TDigest `json:"tdigest,omitempty"` Values []Float64 `json:"values,omitempty"` }
PercentileRanksAggregation type.
func NewPercentileRanksAggregation ¶
func NewPercentileRanksAggregation() *PercentileRanksAggregation
NewPercentileRanksAggregation returns a PercentileRanksAggregation.
type Percentiles ¶
type Percentiles interface{}
Percentiles holds the union for the following types:
map[string]string []ArrayPercentilesItem
type PercentilesAggregation ¶
type PercentilesAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Hdr *HdrMethod `json:"hdr,omitempty"` Keyed *bool `json:"keyed,omitempty"` Missing Missing `json:"missing,omitempty"` Percents []Float64 `json:"percents,omitempty"` Script Script `json:"script,omitempty"` Tdigest *TDigest `json:"tdigest,omitempty"` }
PercentilesAggregation type.
func NewPercentilesAggregation ¶
func NewPercentilesAggregation() *PercentilesAggregation
NewPercentilesAggregation returns a PercentilesAggregation.
type PercentilesBucketAggregate ¶
type PercentilesBucketAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Values Percentiles `json:"values"` }
PercentilesBucketAggregate type.
func NewPercentilesBucketAggregate ¶
func NewPercentilesBucketAggregate() *PercentilesBucketAggregate
NewPercentilesBucketAggregate returns a PercentilesBucketAggregate.
type PercentilesBucketAggregation ¶
type PercentilesBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Percents []Float64 `json:"percents,omitempty"` }
PercentilesBucketAggregation type.
func NewPercentilesBucketAggregation ¶
func NewPercentilesBucketAggregation() *PercentilesBucketAggregation
NewPercentilesBucketAggregation returns a PercentilesBucketAggregation.
func (*PercentilesBucketAggregation) UnmarshalJSON ¶
func (s *PercentilesBucketAggregation) UnmarshalJSON(data []byte) error
type PercolateQuery ¶
type PercolateQuery struct { Boost *float32 `json:"boost,omitempty"` Document json.RawMessage `json:"document,omitempty"` Documents []json.RawMessage `json:"documents,omitempty"` Field string `json:"field"` Id *string `json:"id,omitempty"` Index *string `json:"index,omitempty"` Name *string `json:"name,omitempty"` Preference *string `json:"preference,omitempty"` QueryName_ *string `json:"_name,omitempty"` Routing *string `json:"routing,omitempty"` Version *int64 `json:"version,omitempty"` }
PercolateQuery type.
func NewPercolateQuery ¶
func NewPercolateQuery() *PercolateQuery
NewPercolateQuery returns a PercolateQuery.
type PercolatorProperty ¶
type PercolatorProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Type string `json:"type,omitempty"` }
PercolatorProperty type.
func NewPercolatorProperty ¶
func NewPercolatorProperty() *PercolatorProperty
NewPercolatorProperty returns a PercolatorProperty.
func (*PercolatorProperty) UnmarshalJSON ¶
func (s *PercolatorProperty) UnmarshalJSON(data []byte) error
type PersistentTaskStatus ¶
type PersistentTaskStatus struct {
Status shutdownstatus.ShutdownStatus `json:"status"`
}
PersistentTaskStatus type.
func NewPersistentTaskStatus ¶
func NewPersistentTaskStatus() *PersistentTaskStatus
NewPersistentTaskStatus returns a PersistentTaskStatus.
type Phase ¶
type Phase struct { Actions *IlmActions `json:"actions,omitempty"` Configurations *Configurations `json:"configurations,omitempty"` MinAge *Duration `json:"min_age,omitempty"` }
Phase type.
type Phases ¶
type Phases struct { Cold *Phase `json:"cold,omitempty"` Delete *Phase `json:"delete,omitempty"` Frozen *Phase `json:"frozen,omitempty"` Hot *Phase `json:"hot,omitempty"` Warm *Phase `json:"warm,omitempty"` }
Phases type.
type PhoneticTokenFilter ¶
type PhoneticTokenFilter struct { Encoder phoneticencoder.PhoneticEncoder `json:"encoder"` Languageset []phoneticlanguage.PhoneticLanguage `json:"languageset"` MaxCodeLen *int `json:"max_code_len,omitempty"` NameType phoneticnametype.PhoneticNameType `json:"name_type"` Replace *bool `json:"replace,omitempty"` RuleType phoneticruletype.PhoneticRuleType `json:"rule_type"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
PhoneticTokenFilter type.
func NewPhoneticTokenFilter ¶
func NewPhoneticTokenFilter() *PhoneticTokenFilter
NewPhoneticTokenFilter returns a PhoneticTokenFilter.
type PhraseSuggest ¶
type PhraseSuggest struct { Length int `json:"length"` Offset int `json:"offset"` Options []PhraseSuggestOption `json:"options"` Text string `json:"text"` }
PhraseSuggest type.
func NewPhraseSuggest ¶
func NewPhraseSuggest() *PhraseSuggest
NewPhraseSuggest returns a PhraseSuggest.
type PhraseSuggestCollate ¶
type PhraseSuggestCollate struct { Params map[string]json.RawMessage `json:"params,omitempty"` Prune *bool `json:"prune,omitempty"` Query PhraseSuggestCollateQuery `json:"query"` }
PhraseSuggestCollate type.
func NewPhraseSuggestCollate ¶
func NewPhraseSuggestCollate() *PhraseSuggestCollate
NewPhraseSuggestCollate returns a PhraseSuggestCollate.
type PhraseSuggestCollateQuery ¶
type PhraseSuggestCollateQuery struct { Id *string `json:"id,omitempty"` Source *string `json:"source,omitempty"` }
PhraseSuggestCollateQuery type.
func NewPhraseSuggestCollateQuery ¶
func NewPhraseSuggestCollateQuery() *PhraseSuggestCollateQuery
NewPhraseSuggestCollateQuery returns a PhraseSuggestCollateQuery.
type PhraseSuggestHighlight ¶
type PhraseSuggestHighlight struct { PostTag string `json:"post_tag"` PreTag string `json:"pre_tag"` }
PhraseSuggestHighlight type.
func NewPhraseSuggestHighlight ¶
func NewPhraseSuggestHighlight() *PhraseSuggestHighlight
NewPhraseSuggestHighlight returns a PhraseSuggestHighlight.
type PhraseSuggestOption ¶
type PhraseSuggestOption struct { CollateMatch *bool `json:"collate_match,omitempty"` Highlighted *string `json:"highlighted,omitempty"` Score Float64 `json:"score"` Text string `json:"text"` }
PhraseSuggestOption type.
func NewPhraseSuggestOption ¶
func NewPhraseSuggestOption() *PhraseSuggestOption
NewPhraseSuggestOption returns a PhraseSuggestOption.
type PhraseSuggester ¶
type PhraseSuggester struct { Analyzer *string `json:"analyzer,omitempty"` Collate *PhraseSuggestCollate `json:"collate,omitempty"` Confidence *Float64 `json:"confidence,omitempty"` DirectGenerator []DirectGenerator `json:"direct_generator,omitempty"` Field string `json:"field"` ForceUnigrams *bool `json:"force_unigrams,omitempty"` GramSize *int `json:"gram_size,omitempty"` Highlight *PhraseSuggestHighlight `json:"highlight,omitempty"` MaxErrors *Float64 `json:"max_errors,omitempty"` RealWordErrorLikelihood *Float64 `json:"real_word_error_likelihood,omitempty"` Separator *string `json:"separator,omitempty"` ShardSize *int `json:"shard_size,omitempty"` Size *int `json:"size,omitempty"` Smoothing *SmoothingModelContainer `json:"smoothing,omitempty"` Text *string `json:"text,omitempty"` TokenLimit *int `json:"token_limit,omitempty"` }
PhraseSuggester type.
func NewPhraseSuggester ¶
func NewPhraseSuggester() *PhraseSuggester
NewPhraseSuggester returns a PhraseSuggester.
type PinnedQuery ¶
type PinnedQuery struct { Boost *float32 `json:"boost,omitempty"` Docs []PinnedDoc `json:"docs,omitempty"` Ids []string `json:"ids,omitempty"` Organic *Query `json:"organic,omitempty"` QueryName_ *string `json:"_name,omitempty"` }
PinnedQuery type.
type PipelineConfig ¶
type PipelineConfig struct { Description *string `json:"description,omitempty"` Processors []ProcessorContainer `json:"processors"` Version *int64 `json:"version,omitempty"` }
PipelineConfig type.
func NewPipelineConfig ¶
func NewPipelineConfig() *PipelineConfig
NewPipelineConfig returns a PipelineConfig.
type PipelineMetadata ¶
PipelineMetadata type.
func NewPipelineMetadata ¶
func NewPipelineMetadata() *PipelineMetadata
NewPipelineMetadata returns a PipelineMetadata.
type PipelineProcessor ¶
type PipelineProcessor struct { Description *string `json:"description,omitempty"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissingPipeline *bool `json:"ignore_missing_pipeline,omitempty"` Name string `json:"name"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` }
PipelineProcessor type.
func NewPipelineProcessor ¶
func NewPipelineProcessor() *PipelineProcessor
NewPipelineProcessor returns a PipelineProcessor.
type PipelineSettings ¶
type PipelineSettings struct { PipelineBatchDelay int `json:"pipeline.batch.delay"` PipelineBatchSize int `json:"pipeline.batch.size"` PipelineWorkers int `json:"pipeline.workers"` QueueCheckpointWrites int `json:"queue.checkpoint.writes"` QueueMaxBytesNumber int `json:"queue.max_bytes.number"` QueueMaxBytesUnits string `json:"queue.max_bytes.units"` QueueType string `json:"queue.type"` }
PipelineSettings type.
func NewPipelineSettings ¶
func NewPipelineSettings() *PipelineSettings
NewPipelineSettings returns a PipelineSettings.
type PipelineSimulation ¶
type PipelineSimulation struct { Doc *DocumentSimulation `json:"doc,omitempty"` ProcessorResults []PipelineSimulation `json:"processor_results,omitempty"` ProcessorType *string `json:"processor_type,omitempty"` Status *actionstatusoptions.ActionStatusOptions `json:"status,omitempty"` Tag *string `json:"tag,omitempty"` }
PipelineSimulation type.
func NewPipelineSimulation ¶
func NewPipelineSimulation() *PipelineSimulation
NewPipelineSimulation returns a PipelineSimulation.
type Pivot ¶
type Pivot struct { // Aggregations Defines how to aggregate the grouped data. The following aggregations are // currently supported: average, bucket // script, bucket selector, cardinality, filter, geo bounds, geo centroid, geo // line, max, median absolute deviation, // min, missing, percentiles, rare terms, scripted metric, stats, sum, terms, // top metrics, value count, weighted // average. Aggregations map[string]Aggregations `json:"aggregations,omitempty"` // GroupBy Defines how to group the data. More than one grouping can be defined per // pivot. The following groupings are // currently supported: date histogram, geotile grid, histogram, terms. GroupBy map[string]PivotGroupByContainer `json:"group_by,omitempty"` }
Pivot type.
type PivotGroupByContainer ¶
type PivotGroupByContainer struct { DateHistogram *DateHistogramAggregation `json:"date_histogram,omitempty"` GeotileGrid *GeoTileGridAggregation `json:"geotile_grid,omitempty"` Histogram *HistogramAggregation `json:"histogram,omitempty"` Terms *TermsAggregation `json:"terms,omitempty"` }
PivotGroupByContainer type.
func NewPivotGroupByContainer ¶
func NewPivotGroupByContainer() *PivotGroupByContainer
NewPivotGroupByContainer returns a PivotGroupByContainer.
type PluginStats ¶
type PluginStats struct { Classname string `json:"classname"` Description string `json:"description"` ElasticsearchVersion string `json:"elasticsearch_version"` ExtendedPlugins []string `json:"extended_plugins"` HasNativeController bool `json:"has_native_controller"` JavaVersion string `json:"java_version"` Licensed bool `json:"licensed"` Name string `json:"name"` Version string `json:"version"` }
PluginStats type.
type PluginsRecord ¶
type PluginsRecord struct { // Component component Component *string `json:"component,omitempty"` // Description plugin details Description *string `json:"description,omitempty"` // Id unique node id Id *string `json:"id,omitempty"` // Name node name Name *string `json:"name,omitempty"` // Type plugin type Type *string `json:"type,omitempty"` // Version component version Version *string `json:"version,omitempty"` }
PluginsRecord type.
func NewPluginsRecord ¶
func NewPluginsRecord() *PluginsRecord
NewPluginsRecord returns a PluginsRecord.
type PluginsStatus ¶
type PluginsStatus struct {
Status shutdownstatus.ShutdownStatus `json:"status"`
}
PluginsStatus type.
func NewPluginsStatus ¶
func NewPluginsStatus() *PluginsStatus
NewPluginsStatus returns a PluginsStatus.
type PointInTimeReference ¶
type PointInTimeReference struct { Id string `json:"id"` KeepAlive Duration `json:"keep_alive,omitempty"` }
PointInTimeReference type.
func NewPointInTimeReference ¶
func NewPointInTimeReference() *PointInTimeReference
NewPointInTimeReference returns a PointInTimeReference.
type PointProperty ¶
type PointProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` IgnoreZValue *bool `json:"ignore_z_value,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue *string `json:"null_value,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
PointProperty type.
func NewPointProperty ¶
func NewPointProperty() *PointProperty
NewPointProperty returns a PointProperty.
func (*PointProperty) UnmarshalJSON ¶
func (s *PointProperty) UnmarshalJSON(data []byte) error
type Pool ¶
type Pool struct { MaxInBytes *int64 `json:"max_in_bytes,omitempty"` PeakMaxInBytes *int64 `json:"peak_max_in_bytes,omitempty"` PeakUsedInBytes *int64 `json:"peak_used_in_bytes,omitempty"` UsedInBytes *int64 `json:"used_in_bytes,omitempty"` }
Pool type.
type PorterStemTokenFilter ¶
type PorterStemTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
PorterStemTokenFilter type.
func NewPorterStemTokenFilter ¶
func NewPorterStemTokenFilter() *PorterStemTokenFilter
NewPorterStemTokenFilter returns a PorterStemTokenFilter.
type PostMigrationFeature ¶
type PostMigrationFeature struct {
FeatureName string `json:"feature_name"`
}
PostMigrationFeature type.
func NewPostMigrationFeature ¶
func NewPostMigrationFeature() *PostMigrationFeature
NewPostMigrationFeature returns a PostMigrationFeature.
type PredicateTokenFilter ¶
type PredicateTokenFilter struct { Script Script `json:"script"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
PredicateTokenFilter type.
func NewPredicateTokenFilter ¶
func NewPredicateTokenFilter() *PredicateTokenFilter
NewPredicateTokenFilter returns a PredicateTokenFilter.
type PredictedValue ¶
type PredictedValue interface{}
PredictedValue holds the union for the following types:
string Float64 bool int
type PrefixQuery ¶
type PrefixQuery struct { Boost *float32 `json:"boost,omitempty"` CaseInsensitive *bool `json:"case_insensitive,omitempty"` QueryName_ *string `json:"_name,omitempty"` Rewrite *string `json:"rewrite,omitempty"` Value string `json:"value"` }
PrefixQuery type.
type Preprocessor ¶
type Preprocessor struct { FrequencyEncoding *FrequencyEncodingPreprocessor `json:"frequency_encoding,omitempty"` OneHotEncoding *OneHotEncodingPreprocessor `json:"one_hot_encoding,omitempty"` TargetMeanEncoding *TargetMeanEncodingPreprocessor `json:"target_mean_encoding,omitempty"` }
Preprocessor type.
func NewPreprocessor ¶
func NewPreprocessor() *Preprocessor
NewPreprocessor returns a Preprocessor.
type PressureMemory ¶
type PressureMemory struct { All ByteSize `json:"all,omitempty"` AllInBytes *int64 `json:"all_in_bytes,omitempty"` CombinedCoordinatingAndPrimary ByteSize `json:"combined_coordinating_and_primary,omitempty"` CombinedCoordinatingAndPrimaryInBytes *int64 `json:"combined_coordinating_and_primary_in_bytes,omitempty"` Coordinating ByteSize `json:"coordinating,omitempty"` CoordinatingInBytes *int64 `json:"coordinating_in_bytes,omitempty"` CoordinatingRejections *int64 `json:"coordinating_rejections,omitempty"` Primary ByteSize `json:"primary,omitempty"` PrimaryInBytes *int64 `json:"primary_in_bytes,omitempty"` PrimaryRejections *int64 `json:"primary_rejections,omitempty"` Replica ByteSize `json:"replica,omitempty"` ReplicaInBytes *int64 `json:"replica_in_bytes,omitempty"` ReplicaRejections *int64 `json:"replica_rejections,omitempty"` }
PressureMemory type.
func NewPressureMemory ¶
func NewPressureMemory() *PressureMemory
NewPressureMemory returns a PressureMemory.
type Privileges ¶
Privileges type alias.
type PrivilegesActions ¶
type PrivilegesActions struct { Actions []string `json:"actions"` Application *string `json:"application,omitempty"` Metadata map[string]json.RawMessage `json:"metadata,omitempty"` Name *string `json:"name,omitempty"` }
PrivilegesActions type.
func NewPrivilegesActions ¶
func NewPrivilegesActions() *PrivilegesActions
NewPrivilegesActions returns a PrivilegesActions.
type PrivilegesCheck ¶
type PrivilegesCheck struct { Application []ApplicationPrivilegesCheck `json:"application,omitempty"` // Cluster A list of the cluster privileges that you want to check. Cluster []clusterprivilege.ClusterPrivilege `json:"cluster,omitempty"` Index []IndexPrivilegesCheck `json:"index,omitempty"` }
PrivilegesCheck type.
func NewPrivilegesCheck ¶
func NewPrivilegesCheck() *PrivilegesCheck
NewPrivilegesCheck returns a PrivilegesCheck.
type Process ¶
type Process struct { Cpu *Cpu `json:"cpu,omitempty"` MaxFileDescriptors *int `json:"max_file_descriptors,omitempty"` Mem *MemoryStats `json:"mem,omitempty"` OpenFileDescriptors *int `json:"open_file_descriptors,omitempty"` Timestamp *int64 `json:"timestamp,omitempty"` }
Process type.
type Processor ¶
type Processor struct { Count *int64 `json:"count,omitempty"` Current *int64 `json:"current,omitempty"` Failed *int64 `json:"failed,omitempty"` TimeInMillis *int64 `json:"time_in_millis,omitempty"` }
Processor type.
type ProcessorContainer ¶
type ProcessorContainer struct { Append *AppendProcessor `json:"append,omitempty"` Attachment *AttachmentProcessor `json:"attachment,omitempty"` Bytes *BytesProcessor `json:"bytes,omitempty"` Circle *CircleProcessor `json:"circle,omitempty"` Convert *ConvertProcessor `json:"convert,omitempty"` Csv *CsvProcessor `json:"csv,omitempty"` Date *DateProcessor `json:"date,omitempty"` DateIndexName *DateIndexNameProcessor `json:"date_index_name,omitempty"` Dissect *DissectProcessor `json:"dissect,omitempty"` DotExpander *DotExpanderProcessor `json:"dot_expander,omitempty"` Drop *DropProcessor `json:"drop,omitempty"` Enrich *EnrichProcessor `json:"enrich,omitempty"` Fail *FailProcessor `json:"fail,omitempty"` Foreach *ForeachProcessor `json:"foreach,omitempty"` Geoip *GeoIpProcessor `json:"geoip,omitempty"` Grok *GrokProcessor `json:"grok,omitempty"` Gsub *GsubProcessor `json:"gsub,omitempty"` Inference *InferenceProcessor `json:"inference,omitempty"` Join *JoinProcessor `json:"join,omitempty"` Json *JsonProcessor `json:"json,omitempty"` Kv *KeyValueProcessor `json:"kv,omitempty"` Lowercase *LowercaseProcessor `json:"lowercase,omitempty"` Pipeline *PipelineProcessor `json:"pipeline,omitempty"` Remove *RemoveProcessor `json:"remove,omitempty"` Rename *RenameProcessor `json:"rename,omitempty"` Script Script `json:"script,omitempty"` Set *SetProcessor `json:"set,omitempty"` SetSecurityUser *SetSecurityUserProcessor `json:"set_security_user,omitempty"` Sort *SortProcessor `json:"sort,omitempty"` Split *SplitProcessor `json:"split,omitempty"` Trim *TrimProcessor `json:"trim,omitempty"` Uppercase *UppercaseProcessor `json:"uppercase,omitempty"` Urldecode *UrlDecodeProcessor `json:"urldecode,omitempty"` UserAgent *UserAgentProcessor `json:"user_agent,omitempty"` }
ProcessorContainer type.
func NewProcessorContainer ¶
func NewProcessorContainer() *ProcessorContainer
NewProcessorContainer returns a ProcessorContainer.
type Property ¶
type Property interface{}
Property holds the union for the following types:
BinaryProperty BooleanProperty DynamicProperty JoinProperty KeywordProperty MatchOnlyTextProperty PercolatorProperty RankFeatureProperty RankFeaturesProperty SearchAsYouTypeProperty TextProperty VersionProperty WildcardProperty DateNanosProperty DateProperty AggregateMetricDoubleProperty DenseVectorProperty FlattenedProperty NestedProperty ObjectProperty CompletionProperty ConstantKeywordProperty FieldAliasProperty HistogramProperty IpProperty Murmur3HashProperty TokenCountProperty GeoPointProperty GeoShapeProperty PointProperty ShapeProperty ByteNumberProperty DoubleNumberProperty FloatNumberProperty HalfFloatNumberProperty IntegerNumberProperty LongNumberProperty ScaledFloatNumberProperty ShortNumberProperty UnsignedLongNumberProperty DateRangeProperty DoubleRangeProperty FloatRangeProperty IntegerRangeProperty IpRangeProperty LongRangeProperty
type PublishedClusterStates ¶
type PublishedClusterStates struct { CompatibleDiffs *int64 `json:"compatible_diffs,omitempty"` FullStates *int64 `json:"full_states,omitempty"` IncompatibleDiffs *int64 `json:"incompatible_diffs,omitempty"` }
PublishedClusterStates type.
func NewPublishedClusterStates ¶
func NewPublishedClusterStates() *PublishedClusterStates
NewPublishedClusterStates returns a PublishedClusterStates.
type Query ¶
type Query struct { Bool *BoolQuery `json:"bool,omitempty"` Boosting *BoostingQuery `json:"boosting,omitempty"` CombinedFields *CombinedFieldsQuery `json:"combined_fields,omitempty"` Common map[string]CommonTermsQuery `json:"common,omitempty"` ConstantScore *ConstantScoreQuery `json:"constant_score,omitempty"` DisMax *DisMaxQuery `json:"dis_max,omitempty"` DistanceFeature DistanceFeatureQuery `json:"distance_feature,omitempty"` Exists *ExistsQuery `json:"exists,omitempty"` FieldMaskingSpan *SpanFieldMaskingQuery `json:"field_masking_span,omitempty"` FunctionScore *FunctionScoreQuery `json:"function_score,omitempty"` Fuzzy map[string]FuzzyQuery `json:"fuzzy,omitempty"` GeoBoundingBox *GeoBoundingBoxQuery `json:"geo_bounding_box,omitempty"` GeoDistance *GeoDistanceQuery `json:"geo_distance,omitempty"` GeoPolygon *GeoPolygonQuery `json:"geo_polygon,omitempty"` GeoShape *GeoShapeQuery `json:"geo_shape,omitempty"` HasChild *HasChildQuery `json:"has_child,omitempty"` HasParent *HasParentQuery `json:"has_parent,omitempty"` Ids *IdsQuery `json:"ids,omitempty"` Intervals map[string]IntervalsQuery `json:"intervals,omitempty"` Match map[string]MatchQuery `json:"match,omitempty"` MatchAll *MatchAllQuery `json:"match_all,omitempty"` MatchBoolPrefix map[string]MatchBoolPrefixQuery `json:"match_bool_prefix,omitempty"` MatchNone *MatchNoneQuery `json:"match_none,omitempty"` MatchPhrase map[string]MatchPhraseQuery `json:"match_phrase,omitempty"` MatchPhrasePrefix map[string]MatchPhrasePrefixQuery `json:"match_phrase_prefix,omitempty"` MoreLikeThis *MoreLikeThisQuery `json:"more_like_this,omitempty"` MultiMatch *MultiMatchQuery `json:"multi_match,omitempty"` Nested *NestedQuery `json:"nested,omitempty"` ParentId *ParentIdQuery `json:"parent_id,omitempty"` Percolate *PercolateQuery `json:"percolate,omitempty"` Pinned *PinnedQuery `json:"pinned,omitempty"` Prefix map[string]PrefixQuery `json:"prefix,omitempty"` QueryString *QueryStringQuery `json:"query_string,omitempty"` Range map[string]RangeQuery `json:"range,omitempty"` RankFeature *RankFeatureQuery `json:"rank_feature,omitempty"` Regexp map[string]RegexpQuery `json:"regexp,omitempty"` Script *ScriptQuery `json:"script,omitempty"` ScriptScore *ScriptScoreQuery `json:"script_score,omitempty"` Shape *ShapeQuery `json:"shape,omitempty"` SimpleQueryString *SimpleQueryStringQuery `json:"simple_query_string,omitempty"` SpanContaining *SpanContainingQuery `json:"span_containing,omitempty"` SpanFirst *SpanFirstQuery `json:"span_first,omitempty"` SpanMulti *SpanMultiTermQuery `json:"span_multi,omitempty"` SpanNear *SpanNearQuery `json:"span_near,omitempty"` SpanNot *SpanNotQuery `json:"span_not,omitempty"` SpanOr *SpanOrQuery `json:"span_or,omitempty"` SpanTerm map[string]SpanTermQuery `json:"span_term,omitempty"` SpanWithin *SpanWithinQuery `json:"span_within,omitempty"` Term map[string]TermQuery `json:"term,omitempty"` Terms *TermsQuery `json:"terms,omitempty"` TermsSet map[string]TermsSetQuery `json:"terms_set,omitempty"` Type *TypeQuery `json:"type,omitempty"` Wildcard map[string]WildcardQuery `json:"wildcard,omitempty"` Wrapper *WrapperQuery `json:"wrapper,omitempty"` }
Query type.
type QueryBreakdown ¶
type QueryBreakdown struct { Advance int64 `json:"advance"` AdvanceCount int64 `json:"advance_count"` BuildScorer int64 `json:"build_scorer"` BuildScorerCount int64 `json:"build_scorer_count"` ComputeMaxScore int64 `json:"compute_max_score"` ComputeMaxScoreCount int64 `json:"compute_max_score_count"` CreateWeight int64 `json:"create_weight"` CreateWeightCount int64 `json:"create_weight_count"` Match int64 `json:"match"` MatchCount int64 `json:"match_count"` NextDoc int64 `json:"next_doc"` NextDocCount int64 `json:"next_doc_count"` Score int64 `json:"score"` ScoreCount int64 `json:"score_count"` SetMinCompetitiveScore int64 `json:"set_min_competitive_score"` SetMinCompetitiveScoreCount int64 `json:"set_min_competitive_score_count"` ShallowAdvance int64 `json:"shallow_advance"` ShallowAdvanceCount int64 `json:"shallow_advance_count"` }
QueryBreakdown type.
func NewQueryBreakdown ¶
func NewQueryBreakdown() *QueryBreakdown
NewQueryBreakdown returns a QueryBreakdown.
type QueryCacheStats ¶
type QueryCacheStats struct { CacheCount int `json:"cache_count"` CacheSize int `json:"cache_size"` Evictions int `json:"evictions"` HitCount int `json:"hit_count"` MemorySize ByteSize `json:"memory_size,omitempty"` MemorySizeInBytes int `json:"memory_size_in_bytes"` MissCount int `json:"miss_count"` TotalCount int `json:"total_count"` }
QueryCacheStats type.
func NewQueryCacheStats ¶
func NewQueryCacheStats() *QueryCacheStats
NewQueryCacheStats returns a QueryCacheStats.
type QueryProfile ¶
type QueryProfile struct { Breakdown QueryBreakdown `json:"breakdown"` Children []QueryProfile `json:"children,omitempty"` Description string `json:"description"` TimeInNanos int64 `json:"time_in_nanos"` Type string `json:"type"` }
QueryProfile type.
func NewQueryProfile ¶
func NewQueryProfile() *QueryProfile
NewQueryProfile returns a QueryProfile.
type QueryStringQuery ¶
type QueryStringQuery struct { AllowLeadingWildcard *bool `json:"allow_leading_wildcard,omitempty"` AnalyzeWildcard *bool `json:"analyze_wildcard,omitempty"` Analyzer *string `json:"analyzer,omitempty"` AutoGenerateSynonymsPhraseQuery *bool `json:"auto_generate_synonyms_phrase_query,omitempty"` Boost *float32 `json:"boost,omitempty"` DefaultField *string `json:"default_field,omitempty"` DefaultOperator *operator.Operator `json:"default_operator,omitempty"` EnablePositionIncrements *bool `json:"enable_position_increments,omitempty"` Escape *bool `json:"escape,omitempty"` Fields []string `json:"fields,omitempty"` Fuzziness Fuzziness `json:"fuzziness,omitempty"` FuzzyMaxExpansions *int `json:"fuzzy_max_expansions,omitempty"` FuzzyPrefixLength *int `json:"fuzzy_prefix_length,omitempty"` FuzzyRewrite *string `json:"fuzzy_rewrite,omitempty"` FuzzyTranspositions *bool `json:"fuzzy_transpositions,omitempty"` Lenient *bool `json:"lenient,omitempty"` MaxDeterminizedStates *int `json:"max_determinized_states,omitempty"` MinimumShouldMatch MinimumShouldMatch `json:"minimum_should_match,omitempty"` PhraseSlop *Float64 `json:"phrase_slop,omitempty"` Query string `json:"query"` QueryName_ *string `json:"_name,omitempty"` QuoteAnalyzer *string `json:"quote_analyzer,omitempty"` QuoteFieldSuffix *string `json:"quote_field_suffix,omitempty"` Rewrite *string `json:"rewrite,omitempty"` TieBreaker *Float64 `json:"tie_breaker,omitempty"` TimeZone *string `json:"time_zone,omitempty"` Type *textquerytype.TextQueryType `json:"type,omitempty"` }
QueryStringQuery type.
func NewQueryStringQuery ¶
func NewQueryStringQuery() *QueryStringQuery
NewQueryStringQuery returns a QueryStringQuery.
type QueryVectorBuilder ¶
type QueryVectorBuilder struct {
TextEmbedding *TextEmbedding `json:"text_embedding,omitempty"`
}
QueryVectorBuilder type.
func NewQueryVectorBuilder ¶
func NewQueryVectorBuilder() *QueryVectorBuilder
NewQueryVectorBuilder returns a QueryVectorBuilder.
type QueryWatch ¶
type QueryWatch struct { Id_ string `json:"_id"` PrimaryTerm_ *int `json:"_primary_term,omitempty"` SeqNo_ *int64 `json:"_seq_no,omitempty"` Status *WatchStatus `json:"status,omitempty"` Watch *Watch `json:"watch,omitempty"` }
QueryWatch type.
type QuestionAnsweringInferenceOptions ¶
type QuestionAnsweringInferenceOptions struct { // MaxAnswerLength The maximum answer length to consider MaxAnswerLength *int `json:"max_answer_length,omitempty"` // NumTopClasses Specifies the number of top class predictions to return. Defaults to 0. NumTopClasses *int `json:"num_top_classes,omitempty"` // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` // Tokenization The tokenization options to update when inferring Tokenization *TokenizationConfigContainer `json:"tokenization,omitempty"` }
QuestionAnsweringInferenceOptions type.
func NewQuestionAnsweringInferenceOptions ¶
func NewQuestionAnsweringInferenceOptions() *QuestionAnsweringInferenceOptions
NewQuestionAnsweringInferenceOptions returns a QuestionAnsweringInferenceOptions.
type QuestionAnsweringInferenceUpdateOptions ¶
type QuestionAnsweringInferenceUpdateOptions struct { // MaxAnswerLength The maximum answer length to consider for extraction MaxAnswerLength *int `json:"max_answer_length,omitempty"` // NumTopClasses Specifies the number of top class predictions to return. Defaults to 0. NumTopClasses *int `json:"num_top_classes,omitempty"` // Question The question to answer given the inference context Question string `json:"question"` // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` // Tokenization The tokenization options to update when inferring Tokenization *NlpTokenizationUpdateOptions `json:"tokenization,omitempty"` }
QuestionAnsweringInferenceUpdateOptions type.
func NewQuestionAnsweringInferenceUpdateOptions ¶
func NewQuestionAnsweringInferenceUpdateOptions() *QuestionAnsweringInferenceUpdateOptions
NewQuestionAnsweringInferenceUpdateOptions returns a QuestionAnsweringInferenceUpdateOptions.
type RandomScoreFunction ¶
type RandomScoreFunction struct { Field *string `json:"field,omitempty"` Seed string `json:"seed,omitempty"` }
RandomScoreFunction type.
func NewRandomScoreFunction ¶
func NewRandomScoreFunction() *RandomScoreFunction
NewRandomScoreFunction returns a RandomScoreFunction.
type RangeAggregate ¶
type RangeAggregate struct { Buckets BucketsRangeBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
RangeAggregate type.
func NewRangeAggregate ¶
func NewRangeAggregate() *RangeAggregate
NewRangeAggregate returns a RangeAggregate.
func (*RangeAggregate) UnmarshalJSON ¶
func (s *RangeAggregate) UnmarshalJSON(data []byte) error
type RangeAggregation ¶
type RangeAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Keyed *bool `json:"keyed,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Missing *int `json:"missing,omitempty"` Name *string `json:"name,omitempty"` Ranges []AggregationRange `json:"ranges,omitempty"` Script Script `json:"script,omitempty"` }
RangeAggregation type.
func NewRangeAggregation ¶
func NewRangeAggregation() *RangeAggregation
NewRangeAggregation returns a RangeAggregation.
type RangeBucket ¶
type RangeBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` From *Float64 `json:"from,omitempty"` FromAsString *string `json:"from_as_string,omitempty"` // Key The bucket key. Present if the aggregation is _not_ keyed Key *string `json:"key,omitempty"` To *Float64 `json:"to,omitempty"` ToAsString *string `json:"to_as_string,omitempty"` }
RangeBucket type.
func (RangeBucket) MarshalJSON ¶
func (s RangeBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*RangeBucket) UnmarshalJSON ¶
func (s *RangeBucket) UnmarshalJSON(data []byte) error
type RangeQuery ¶
type RangeQuery interface{}
RangeQuery holds the union for the following types:
DateRangeQuery NumberRangeQuery
type RankEvalHit ¶
type RankEvalHit struct { Id_ string `json:"_id"` Index_ string `json:"_index"` Score_ Float64 `json:"_score"` }
RankEvalHit type.
type RankEvalHitItem ¶
type RankEvalHitItem struct { Hit RankEvalHit `json:"hit"` Rating Float64 `json:"rating,omitempty"` }
RankEvalHitItem type.
func NewRankEvalHitItem ¶
func NewRankEvalHitItem() *RankEvalHitItem
NewRankEvalHitItem returns a RankEvalHitItem.
type RankEvalMetric ¶
type RankEvalMetric struct { Dcg *RankEvalMetricDiscountedCumulativeGain `json:"dcg,omitempty"` ExpectedReciprocalRank *RankEvalMetricExpectedReciprocalRank `json:"expected_reciprocal_rank,omitempty"` MeanReciprocalRank *RankEvalMetricMeanReciprocalRank `json:"mean_reciprocal_rank,omitempty"` Precision *RankEvalMetricPrecision `json:"precision,omitempty"` Recall *RankEvalMetricRecall `json:"recall,omitempty"` }
RankEvalMetric type.
func NewRankEvalMetric ¶
func NewRankEvalMetric() *RankEvalMetric
NewRankEvalMetric returns a RankEvalMetric.
type RankEvalMetricDetail ¶
type RankEvalMetricDetail struct { // Hits The hits section shows a grouping of the search results with their supplied // ratings Hits []RankEvalHitItem `json:"hits"` // MetricDetails The metric_details give additional information about the calculated quality // metric (e.g. how many of the retrieved documents were relevant). The content // varies for each metric but allows for better interpretation of the results MetricDetails map[string]map[string]json.RawMessage `json:"metric_details"` // MetricScore The metric_score in the details section shows the contribution of this query // to the global quality metric score MetricScore Float64 `json:"metric_score"` // UnratedDocs The unrated_docs section contains an _index and _id entry for each document // in the search result for this query that didn’t have a ratings value. This // can be used to ask the user to supply ratings for these documents UnratedDocs []UnratedDocument `json:"unrated_docs"` }
RankEvalMetricDetail type.
func NewRankEvalMetricDetail ¶
func NewRankEvalMetricDetail() *RankEvalMetricDetail
NewRankEvalMetricDetail returns a RankEvalMetricDetail.
type RankEvalMetricDiscountedCumulativeGain ¶
type RankEvalMetricDiscountedCumulativeGain struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. K *int `json:"k,omitempty"` // Normalize If set to true, this metric will calculate the Normalized DCG. Normalize *bool `json:"normalize,omitempty"` }
RankEvalMetricDiscountedCumulativeGain type.
func NewRankEvalMetricDiscountedCumulativeGain ¶
func NewRankEvalMetricDiscountedCumulativeGain() *RankEvalMetricDiscountedCumulativeGain
NewRankEvalMetricDiscountedCumulativeGain returns a RankEvalMetricDiscountedCumulativeGain.
type RankEvalMetricExpectedReciprocalRank ¶
type RankEvalMetricExpectedReciprocalRank struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. K *int `json:"k,omitempty"` // MaximumRelevance The highest relevance grade used in the user-supplied relevance judgments. MaximumRelevance int `json:"maximum_relevance"` }
RankEvalMetricExpectedReciprocalRank type.
func NewRankEvalMetricExpectedReciprocalRank ¶
func NewRankEvalMetricExpectedReciprocalRank() *RankEvalMetricExpectedReciprocalRank
NewRankEvalMetricExpectedReciprocalRank returns a RankEvalMetricExpectedReciprocalRank.
type RankEvalMetricMeanReciprocalRank ¶
type RankEvalMetricMeanReciprocalRank struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. K *int `json:"k,omitempty"` // RelevantRatingThreshold Sets the rating threshold above which documents are considered to be // "relevant". RelevantRatingThreshold *int `json:"relevant_rating_threshold,omitempty"` }
RankEvalMetricMeanReciprocalRank type.
func NewRankEvalMetricMeanReciprocalRank ¶
func NewRankEvalMetricMeanReciprocalRank() *RankEvalMetricMeanReciprocalRank
NewRankEvalMetricMeanReciprocalRank returns a RankEvalMetricMeanReciprocalRank.
type RankEvalMetricPrecision ¶
type RankEvalMetricPrecision struct { // IgnoreUnlabeled Controls how unlabeled documents in the search results are counted. If set to // true, unlabeled documents are ignored and neither count as relevant or // irrelevant. Set to false (the default), they are treated as irrelevant. IgnoreUnlabeled *bool `json:"ignore_unlabeled,omitempty"` // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. K *int `json:"k,omitempty"` // RelevantRatingThreshold Sets the rating threshold above which documents are considered to be // "relevant". RelevantRatingThreshold *int `json:"relevant_rating_threshold,omitempty"` }
RankEvalMetricPrecision type.
func NewRankEvalMetricPrecision ¶
func NewRankEvalMetricPrecision() *RankEvalMetricPrecision
NewRankEvalMetricPrecision returns a RankEvalMetricPrecision.
type RankEvalMetricRatingTreshold ¶
type RankEvalMetricRatingTreshold struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. K *int `json:"k,omitempty"` // RelevantRatingThreshold Sets the rating threshold above which documents are considered to be // "relevant". RelevantRatingThreshold *int `json:"relevant_rating_threshold,omitempty"` }
RankEvalMetricRatingTreshold type.
func NewRankEvalMetricRatingTreshold ¶
func NewRankEvalMetricRatingTreshold() *RankEvalMetricRatingTreshold
NewRankEvalMetricRatingTreshold returns a RankEvalMetricRatingTreshold.
type RankEvalMetricRecall ¶
type RankEvalMetricRecall struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. K *int `json:"k,omitempty"` // RelevantRatingThreshold Sets the rating threshold above which documents are considered to be // "relevant". RelevantRatingThreshold *int `json:"relevant_rating_threshold,omitempty"` }
RankEvalMetricRecall type.
func NewRankEvalMetricRecall ¶
func NewRankEvalMetricRecall() *RankEvalMetricRecall
NewRankEvalMetricRecall returns a RankEvalMetricRecall.
type RankEvalQuery ¶
RankEvalQuery type.
func NewRankEvalQuery ¶
func NewRankEvalQuery() *RankEvalQuery
NewRankEvalQuery returns a RankEvalQuery.
type RankEvalRequestItem ¶
type RankEvalRequestItem struct { // Id The search request’s ID, used to group result details later. Id string `json:"id"` // Params The search template parameters. Params map[string]json.RawMessage `json:"params,omitempty"` // Ratings List of document ratings Ratings []DocumentRating `json:"ratings"` // Request The query being evaluated. Request *RankEvalQuery `json:"request,omitempty"` // TemplateId The search template Id TemplateId *string `json:"template_id,omitempty"` }
RankEvalRequestItem type.
func NewRankEvalRequestItem ¶
func NewRankEvalRequestItem() *RankEvalRequestItem
NewRankEvalRequestItem returns a RankEvalRequestItem.
type RankFeatureFunction ¶
type RankFeatureFunction struct { }
RankFeatureFunction type.
func NewRankFeatureFunction ¶
func NewRankFeatureFunction() *RankFeatureFunction
NewRankFeatureFunction returns a RankFeatureFunction.
type RankFeatureFunctionLinear ¶
type RankFeatureFunctionLinear struct { }
RankFeatureFunctionLinear type.
func NewRankFeatureFunctionLinear ¶
func NewRankFeatureFunctionLinear() *RankFeatureFunctionLinear
NewRankFeatureFunctionLinear returns a RankFeatureFunctionLinear.
type RankFeatureFunctionLogarithm ¶
type RankFeatureFunctionLogarithm struct {
ScalingFactor float32 `json:"scaling_factor"`
}
RankFeatureFunctionLogarithm type.
func NewRankFeatureFunctionLogarithm ¶
func NewRankFeatureFunctionLogarithm() *RankFeatureFunctionLogarithm
NewRankFeatureFunctionLogarithm returns a RankFeatureFunctionLogarithm.
type RankFeatureFunctionSaturation ¶
type RankFeatureFunctionSaturation struct {
Pivot *float32 `json:"pivot,omitempty"`
}
RankFeatureFunctionSaturation type.
func NewRankFeatureFunctionSaturation ¶
func NewRankFeatureFunctionSaturation() *RankFeatureFunctionSaturation
NewRankFeatureFunctionSaturation returns a RankFeatureFunctionSaturation.
type RankFeatureFunctionSigmoid ¶
type RankFeatureFunctionSigmoid struct { Exponent float32 `json:"exponent"` Pivot float32 `json:"pivot"` }
RankFeatureFunctionSigmoid type.
func NewRankFeatureFunctionSigmoid ¶
func NewRankFeatureFunctionSigmoid() *RankFeatureFunctionSigmoid
NewRankFeatureFunctionSigmoid returns a RankFeatureFunctionSigmoid.
type RankFeatureProperty ¶
type RankFeatureProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` PositiveScoreImpact *bool `json:"positive_score_impact,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Type string `json:"type,omitempty"` }
RankFeatureProperty type.
func NewRankFeatureProperty ¶
func NewRankFeatureProperty() *RankFeatureProperty
NewRankFeatureProperty returns a RankFeatureProperty.
func (*RankFeatureProperty) UnmarshalJSON ¶
func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error
type RankFeatureQuery ¶
type RankFeatureQuery struct { Boost *float32 `json:"boost,omitempty"` Field string `json:"field"` Linear *RankFeatureFunctionLinear `json:"linear,omitempty"` Log *RankFeatureFunctionLogarithm `json:"log,omitempty"` QueryName_ *string `json:"_name,omitempty"` Saturation *RankFeatureFunctionSaturation `json:"saturation,omitempty"` Sigmoid *RankFeatureFunctionSigmoid `json:"sigmoid,omitempty"` }
RankFeatureQuery type.
func NewRankFeatureQuery ¶
func NewRankFeatureQuery() *RankFeatureQuery
NewRankFeatureQuery returns a RankFeatureQuery.
type RankFeaturesProperty ¶
type RankFeaturesProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Type string `json:"type,omitempty"` }
RankFeaturesProperty type.
func NewRankFeaturesProperty ¶
func NewRankFeaturesProperty() *RankFeaturesProperty
NewRankFeaturesProperty returns a RankFeaturesProperty.
func (*RankFeaturesProperty) UnmarshalJSON ¶
func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error
type RareTermsAggregation ¶
type RareTermsAggregation struct { Exclude []string `json:"exclude,omitempty"` Field *string `json:"field,omitempty"` Include TermsInclude `json:"include,omitempty"` MaxDocCount *int64 `json:"max_doc_count,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Missing Missing `json:"missing,omitempty"` Name *string `json:"name,omitempty"` Precision *Float64 `json:"precision,omitempty"` ValueType *string `json:"value_type,omitempty"` }
RareTermsAggregation type.
func NewRareTermsAggregation ¶
func NewRareTermsAggregation() *RareTermsAggregation
NewRareTermsAggregation returns a RareTermsAggregation.
type RateAggregate ¶
type RateAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Value Float64 `json:"value"` ValueAsString *string `json:"value_as_string,omitempty"` }
RateAggregate type.
func NewRateAggregate ¶
func NewRateAggregate() *RateAggregate
NewRateAggregate returns a RateAggregate.
type RateAggregation ¶
type RateAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Missing Missing `json:"missing,omitempty"` Mode *ratemode.RateMode `json:"mode,omitempty"` Script Script `json:"script,omitempty"` Unit *calendarinterval.CalendarInterval `json:"unit,omitempty"` }
RateAggregation type.
func NewRateAggregation ¶
func NewRateAggregation() *RateAggregation
NewRateAggregation returns a RateAggregation.
type ReadException ¶
type ReadException struct { Exception ErrorCause `json:"exception"` FromSeqNo int64 `json:"from_seq_no"` Retries int `json:"retries"` }
ReadException type.
func NewReadException ¶
func NewReadException() *ReadException
NewReadException returns a ReadException.
type Recording ¶
type Recording struct { CumulativeExecutionCount *int64 `json:"cumulative_execution_count,omitempty"` CumulativeExecutionTime Duration `json:"cumulative_execution_time,omitempty"` CumulativeExecutionTimeMillis *int64 `json:"cumulative_execution_time_millis,omitempty"` Name *string `json:"name,omitempty"` }
Recording type.
type RecoveryBytes ¶
type RecoveryBytes struct { Percent Percentage `json:"percent"` Recovered ByteSize `json:"recovered,omitempty"` RecoveredFromSnapshot ByteSize `json:"recovered_from_snapshot,omitempty"` RecoveredFromSnapshotInBytes ByteSize `json:"recovered_from_snapshot_in_bytes,omitempty"` RecoveredInBytes ByteSize `json:"recovered_in_bytes"` Reused ByteSize `json:"reused,omitempty"` ReusedInBytes ByteSize `json:"reused_in_bytes"` Total ByteSize `json:"total,omitempty"` TotalInBytes ByteSize `json:"total_in_bytes"` }
RecoveryBytes type.
func NewRecoveryBytes ¶
func NewRecoveryBytes() *RecoveryBytes
NewRecoveryBytes returns a RecoveryBytes.
type RecoveryFiles ¶
type RecoveryFiles struct { Details []FileDetails `json:"details,omitempty"` Percent Percentage `json:"percent"` Recovered int64 `json:"recovered"` Reused int64 `json:"reused"` Total int64 `json:"total"` }
RecoveryFiles type.
func NewRecoveryFiles ¶
func NewRecoveryFiles() *RecoveryFiles
NewRecoveryFiles returns a RecoveryFiles.
type RecoveryIndexStatus ¶
type RecoveryIndexStatus struct { Bytes *RecoveryBytes `json:"bytes,omitempty"` Files RecoveryFiles `json:"files"` Size RecoveryBytes `json:"size"` SourceThrottleTime Duration `json:"source_throttle_time,omitempty"` SourceThrottleTimeInMillis int64 `json:"source_throttle_time_in_millis"` TargetThrottleTime Duration `json:"target_throttle_time,omitempty"` TargetThrottleTimeInMillis int64 `json:"target_throttle_time_in_millis"` TotalTime Duration `json:"total_time,omitempty"` TotalTimeInMillis int64 `json:"total_time_in_millis"` }
RecoveryIndexStatus type.
func NewRecoveryIndexStatus ¶
func NewRecoveryIndexStatus() *RecoveryIndexStatus
NewRecoveryIndexStatus returns a RecoveryIndexStatus.
type RecoveryOrigin ¶
type RecoveryOrigin struct { BootstrapNewHistoryUuid *bool `json:"bootstrap_new_history_uuid,omitempty"` Host *string `json:"host,omitempty"` Hostname *string `json:"hostname,omitempty"` Id *string `json:"id,omitempty"` Index *string `json:"index,omitempty"` Ip *string `json:"ip,omitempty"` Name *string `json:"name,omitempty"` Repository *string `json:"repository,omitempty"` RestoreUUID *string `json:"restoreUUID,omitempty"` Snapshot *string `json:"snapshot,omitempty"` TransportAddress *string `json:"transport_address,omitempty"` Version *string `json:"version,omitempty"` }
RecoveryOrigin type.
func NewRecoveryOrigin ¶
func NewRecoveryOrigin() *RecoveryOrigin
NewRecoveryOrigin returns a RecoveryOrigin.
type RecoveryRecord ¶
type RecoveryRecord struct { // Bytes number of bytes to recover Bytes *string `json:"bytes,omitempty"` // BytesPercent percent of bytes recovered BytesPercent Percentage `json:"bytes_percent,omitempty"` // BytesRecovered bytes recovered BytesRecovered *string `json:"bytes_recovered,omitempty"` // BytesTotal total number of bytes BytesTotal *string `json:"bytes_total,omitempty"` // Files number of files to recover Files *string `json:"files,omitempty"` // FilesPercent percent of files recovered FilesPercent Percentage `json:"files_percent,omitempty"` // FilesRecovered files recovered FilesRecovered *string `json:"files_recovered,omitempty"` // FilesTotal total number of files FilesTotal *string `json:"files_total,omitempty"` // Index index name Index *string `json:"index,omitempty"` // Repository repository Repository *string `json:"repository,omitempty"` // Shard shard name Shard *string `json:"shard,omitempty"` // Snapshot snapshot Snapshot *string `json:"snapshot,omitempty"` // SourceHost source host SourceHost *string `json:"source_host,omitempty"` // SourceNode source node name SourceNode *string `json:"source_node,omitempty"` // Stage recovery stage Stage *string `json:"stage,omitempty"` // StartTime recovery start time StartTime DateTime `json:"start_time,omitempty"` // StartTimeMillis recovery start time in epoch milliseconds StartTimeMillis *int64 `json:"start_time_millis,omitempty"` // StopTime recovery stop time StopTime DateTime `json:"stop_time,omitempty"` // StopTimeMillis recovery stop time in epoch milliseconds StopTimeMillis *int64 `json:"stop_time_millis,omitempty"` // TargetHost target host TargetHost *string `json:"target_host,omitempty"` // TargetNode target node name TargetNode *string `json:"target_node,omitempty"` // Time recovery time Time Duration `json:"time,omitempty"` // TranslogOps number of translog ops to recover TranslogOps *string `json:"translog_ops,omitempty"` // TranslogOpsPercent percent of translog ops recovered TranslogOpsPercent Percentage `json:"translog_ops_percent,omitempty"` // TranslogOpsRecovered translog ops recovered TranslogOpsRecovered *string `json:"translog_ops_recovered,omitempty"` // Type recovery type Type *string `json:"type,omitempty"` }
RecoveryRecord type.
func NewRecoveryRecord ¶
func NewRecoveryRecord() *RecoveryRecord
NewRecoveryRecord returns a RecoveryRecord.
type RecoveryStartStatus ¶
type RecoveryStartStatus struct { CheckIndexTime Duration `json:"check_index_time,omitempty"` CheckIndexTimeInMillis int64 `json:"check_index_time_in_millis"` TotalTime Duration `json:"total_time,omitempty"` TotalTimeInMillis int64 `json:"total_time_in_millis"` }
RecoveryStartStatus type.
func NewRecoveryStartStatus ¶
func NewRecoveryStartStatus() *RecoveryStartStatus
NewRecoveryStartStatus returns a RecoveryStartStatus.
type RecoveryStats ¶
type RecoveryStats struct { CurrentAsSource int64 `json:"current_as_source"` CurrentAsTarget int64 `json:"current_as_target"` ThrottleTime Duration `json:"throttle_time,omitempty"` ThrottleTimeInMillis int64 `json:"throttle_time_in_millis"` }
RecoveryStats type.
func NewRecoveryStats ¶
func NewRecoveryStats() *RecoveryStats
NewRecoveryStats returns a RecoveryStats.
type RecoveryStatus ¶
type RecoveryStatus struct {
Shards []ShardRecovery `json:"shards"`
}
RecoveryStatus type.
func NewRecoveryStatus ¶
func NewRecoveryStatus() *RecoveryStatus
NewRecoveryStatus returns a RecoveryStatus.
type RefreshStats ¶
type RefreshStats struct { ExternalTotal int64 `json:"external_total"` ExternalTotalTimeInMillis int64 `json:"external_total_time_in_millis"` Listeners int64 `json:"listeners"` Total int64 `json:"total"` TotalTime Duration `json:"total_time,omitempty"` TotalTimeInMillis int64 `json:"total_time_in_millis"` }
RefreshStats type.
func NewRefreshStats ¶
func NewRefreshStats() *RefreshStats
NewRefreshStats returns a RefreshStats.
type RegexpQuery ¶
type RegexpQuery struct { Boost *float32 `json:"boost,omitempty"` CaseInsensitive *bool `json:"case_insensitive,omitempty"` Flags *string `json:"flags,omitempty"` MaxDeterminizedStates *int `json:"max_determinized_states,omitempty"` QueryName_ *string `json:"_name,omitempty"` Rewrite *string `json:"rewrite,omitempty"` Value string `json:"value"` }
RegexpQuery type.
type RegressionInferenceOptions ¶
type RegressionInferenceOptions struct { // NumTopFeatureImportanceValues Specifies the maximum number of feature importance values per document. NumTopFeatureImportanceValues *int `json:"num_top_feature_importance_values,omitempty"` // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` }
RegressionInferenceOptions type.
func NewRegressionInferenceOptions ¶
func NewRegressionInferenceOptions() *RegressionInferenceOptions
NewRegressionInferenceOptions returns a RegressionInferenceOptions.
type ReindexDestination ¶
type ReindexDestination struct { Index string `json:"index"` OpType *optype.OpType `json:"op_type,omitempty"` Pipeline *string `json:"pipeline,omitempty"` Routing *string `json:"routing,omitempty"` VersionType *versiontype.VersionType `json:"version_type,omitempty"` }
ReindexDestination type.
func NewReindexDestination ¶
func NewReindexDestination() *ReindexDestination
NewReindexDestination returns a ReindexDestination.
type ReindexNode ¶
type ReindexNode struct { Attributes map[string]string `json:"attributes"` Host string `json:"host"` Ip string `json:"ip"` Name string `json:"name"` Roles []noderole.NodeRole `json:"roles,omitempty"` Tasks map[TaskId]ReindexTask `json:"tasks"` TransportAddress string `json:"transport_address"` }
ReindexNode type.
type ReindexSource ¶
type ReindexSource struct { Index []string `json:"index"` Query *Query `json:"query,omitempty"` Remote *RemoteSource `json:"remote,omitempty"` RuntimeMappings map[string]RuntimeField `json:"runtime_mappings,omitempty"` Size *int `json:"size,omitempty"` Slice *SlicedScroll `json:"slice,omitempty"` Sort []SortCombinations `json:"sort,omitempty"` SourceFields_ []string `json:"_source,omitempty"` }
ReindexSource type.
func NewReindexSource ¶
func NewReindexSource() *ReindexSource
NewReindexSource returns a ReindexSource.
type ReindexStatus ¶
type ReindexStatus struct { Batches int64 `json:"batches"` Created int64 `json:"created"` Deleted int64 `json:"deleted"` Noops int64 `json:"noops"` RequestsPerSecond float32 `json:"requests_per_second"` Retries Retries `json:"retries"` Throttled Duration `json:"throttled,omitempty"` ThrottledMillis int64 `json:"throttled_millis"` ThrottledUntil Duration `json:"throttled_until,omitempty"` ThrottledUntilMillis int64 `json:"throttled_until_millis"` Total int64 `json:"total"` Updated int64 `json:"updated"` VersionConflicts int64 `json:"version_conflicts"` }
ReindexStatus type.
func NewReindexStatus ¶
func NewReindexStatus() *ReindexStatus
NewReindexStatus returns a ReindexStatus.
type ReindexTask ¶
type ReindexTask struct { Action string `json:"action"` Cancellable bool `json:"cancellable"` Description string `json:"description"` Headers map[string][]string `json:"headers"` Id int64 `json:"id"` Node string `json:"node"` RunningTimeInNanos int64 `json:"running_time_in_nanos"` StartTimeInMillis int64 `json:"start_time_in_millis"` Status ReindexStatus `json:"status"` Type string `json:"type"` }
ReindexTask type.
type ReloadDetails ¶
type ReloadDetails struct { Index string `json:"index"` ReloadedAnalyzers []string `json:"reloaded_analyzers"` ReloadedNodeIds []string `json:"reloaded_node_ids"` }
ReloadDetails type.
func NewReloadDetails ¶
func NewReloadDetails() *ReloadDetails
NewReloadDetails returns a ReloadDetails.
type RelocationFailureInfo ¶
type RelocationFailureInfo struct {
FailedAttempts int `json:"failed_attempts"`
}
RelocationFailureInfo type.
func NewRelocationFailureInfo ¶
func NewRelocationFailureInfo() *RelocationFailureInfo
NewRelocationFailureInfo returns a RelocationFailureInfo.
type RemoteSource ¶
type RemoteSource struct { ConnectTimeout Duration `json:"connect_timeout,omitempty"` Headers map[string]string `json:"headers,omitempty"` Host string `json:"host"` Password *string `json:"password,omitempty"` SocketTimeout Duration `json:"socket_timeout,omitempty"` Username *string `json:"username,omitempty"` }
RemoteSource type.
func NewRemoteSource ¶
func NewRemoteSource() *RemoteSource
NewRemoteSource returns a RemoteSource.
type RemoveAction ¶
type RemoveAction struct { Alias *string `json:"alias,omitempty"` Aliases []string `json:"aliases,omitempty"` Index *string `json:"index,omitempty"` Indices []string `json:"indices,omitempty"` MustExist *bool `json:"must_exist,omitempty"` }
RemoveAction type.
func NewRemoveAction ¶
func NewRemoveAction() *RemoveAction
NewRemoveAction returns a RemoveAction.
type RemoveDuplicatesTokenFilter ¶
type RemoveDuplicatesTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
RemoveDuplicatesTokenFilter type.
func NewRemoveDuplicatesTokenFilter ¶
func NewRemoveDuplicatesTokenFilter() *RemoveDuplicatesTokenFilter
NewRemoveDuplicatesTokenFilter returns a RemoveDuplicatesTokenFilter.
type RemoveIndexAction ¶
type RemoveIndexAction struct { Index *string `json:"index,omitempty"` Indices []string `json:"indices,omitempty"` MustExist *bool `json:"must_exist,omitempty"` }
RemoveIndexAction type.
func NewRemoveIndexAction ¶
func NewRemoveIndexAction() *RemoveIndexAction
NewRemoveIndexAction returns a RemoveIndexAction.
type RemoveProcessor ¶
type RemoveProcessor struct { Description *string `json:"description,omitempty"` Field []string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` }
RemoveProcessor type.
func NewRemoveProcessor ¶
func NewRemoveProcessor() *RemoveProcessor
NewRemoveProcessor returns a RemoveProcessor.
type RenameProcessor ¶
type RenameProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` TargetField string `json:"target_field"` }
RenameProcessor type.
func NewRenameProcessor ¶
func NewRenameProcessor() *RenameProcessor
NewRenameProcessor returns a RenameProcessor.
type ReportingEmailAttachment ¶
type ReportingEmailAttachment struct { Inline *bool `json:"inline,omitempty"` Interval Duration `json:"interval,omitempty"` Request *HttpInputRequestDefinition `json:"request,omitempty"` Retries *int `json:"retries,omitempty"` Url string `json:"url"` }
ReportingEmailAttachment type.
func NewReportingEmailAttachment ¶
func NewReportingEmailAttachment() *ReportingEmailAttachment
NewReportingEmailAttachment returns a ReportingEmailAttachment.
type RepositoriesRecord ¶
type RepositoriesRecord struct { // Id unique repository id Id *string `json:"id,omitempty"` // Type repository type Type *string `json:"type,omitempty"` }
RepositoriesRecord type.
func NewRepositoriesRecord ¶
func NewRepositoriesRecord() *RepositoriesRecord
NewRepositoriesRecord returns a RepositoriesRecord.
type Repository ¶
type Repository struct { Settings RepositorySettings `json:"settings"` Type string `json:"type"` Uuid *string `json:"uuid,omitempty"` }
Repository type.
type RepositoryLocation ¶
type RepositoryLocation struct { BasePath string `json:"base_path"` // Bucket Bucket name (GCP, S3) Bucket *string `json:"bucket,omitempty"` // Container Container name (Azure) Container *string `json:"container,omitempty"` }
RepositoryLocation type.
func NewRepositoryLocation ¶
func NewRepositoryLocation() *RepositoryLocation
NewRepositoryLocation returns a RepositoryLocation.
type RepositoryMeteringInformation ¶
type RepositoryMeteringInformation struct { // Archived A flag that tells whether or not this object has been archived. When a // repository is closed or updated the // repository metering information is archived and kept for a certain period of // time. This allows retrieving the // repository metering information of previous repository instantiations. Archived bool `json:"archived"` // ClusterVersion The cluster state version when this object was archived, this field can be // used as a logical timestamp to delete // all the archived metrics up to an observed version. This field is only // present for archived repository metering // information objects. The main purpose of this field is to avoid possible race // conditions during repository metering // information deletions, i.e. deleting archived repositories metering // information that we haven’t observed yet. ClusterVersion *int64 `json:"cluster_version,omitempty"` // RepositoryEphemeralId An identifier that changes every time the repository is updated. RepositoryEphemeralId string `json:"repository_ephemeral_id"` // RepositoryLocation Represents an unique location within the repository. RepositoryLocation RepositoryLocation `json:"repository_location"` // RepositoryName Repository name. RepositoryName string `json:"repository_name"` // RepositoryStartedAt Time the repository was created or updated. Recorded in milliseconds since // the Unix Epoch. RepositoryStartedAt int64 `json:"repository_started_at"` // RepositoryStoppedAt Time the repository was deleted or updated. Recorded in milliseconds since // the Unix Epoch. RepositoryStoppedAt *int64 `json:"repository_stopped_at,omitempty"` // RepositoryType Repository type. RepositoryType string `json:"repository_type"` // RequestCounts An object with the number of request performed against the repository grouped // by request type. RequestCounts RequestCounts `json:"request_counts"` }
RepositoryMeteringInformation type.
func NewRepositoryMeteringInformation ¶
func NewRepositoryMeteringInformation() *RepositoryMeteringInformation
NewRepositoryMeteringInformation returns a RepositoryMeteringInformation.
type RepositorySettings ¶
type RepositorySettings struct { ChunkSize *string `json:"chunk_size,omitempty"` Compress string `json:"compress,omitempty"` ConcurrentStreams string `json:"concurrent_streams,omitempty"` Location string `json:"location"` ReadOnly string `json:"read_only,omitempty"` }
RepositorySettings type.
func NewRepositorySettings ¶
func NewRepositorySettings() *RepositorySettings
NewRepositorySettings returns a RepositorySettings.
type RequestCacheStats ¶
type RequestCacheStats struct { Evictions int64 `json:"evictions"` HitCount int64 `json:"hit_count"` MemorySize *string `json:"memory_size,omitempty"` MemorySizeInBytes int64 `json:"memory_size_in_bytes"` MissCount int64 `json:"miss_count"` }
RequestCacheStats type.
func NewRequestCacheStats ¶
func NewRequestCacheStats() *RequestCacheStats
NewRequestCacheStats returns a RequestCacheStats.
type RequestCounts ¶
type RequestCounts struct { // GetBlob Number of Get Blob requests (Azure) GetBlob *int64 `json:"GetBlob,omitempty"` // GetBlobProperties Number of Get Blob Properties requests (Azure) GetBlobProperties *int64 `json:"GetBlobProperties,omitempty"` // GetObject Number of get object requests (GCP, S3) GetObject *int64 `json:"GetObject,omitempty"` // InsertObject Number of insert object requests, including simple, multipart and resumable // uploads. Resumable uploads // can perform multiple http requests to insert a single object but they are // considered as a single request // since they are billed as an individual operation. (GCP) InsertObject *int64 `json:"InsertObject,omitempty"` // ListBlobs Number of List Blobs requests (Azure) ListBlobs *int64 `json:"ListBlobs,omitempty"` // ListObjects Number of list objects requests (GCP, S3) ListObjects *int64 `json:"ListObjects,omitempty"` // PutBlob Number of Put Blob requests (Azure) PutBlob *int64 `json:"PutBlob,omitempty"` // PutBlock Number of Put Block (Azure) PutBlock *int64 `json:"PutBlock,omitempty"` // PutBlockList Number of Put Block List requests PutBlockList *int64 `json:"PutBlockList,omitempty"` // PutMultipartObject Number of Multipart requests, including CreateMultipartUpload, UploadPart and // CompleteMultipartUpload requests (S3) PutMultipartObject *int64 `json:"PutMultipartObject,omitempty"` // PutObject Number of PutObject requests (S3) PutObject *int64 `json:"PutObject,omitempty"` }
RequestCounts type.
func NewRequestCounts ¶
func NewRequestCounts() *RequestCounts
NewRequestCounts returns a RequestCounts.
type RerouteDecision ¶
type RerouteDecision struct { Decider string `json:"decider"` Decision string `json:"decision"` Explanation string `json:"explanation"` }
RerouteDecision type.
func NewRerouteDecision ¶
func NewRerouteDecision() *RerouteDecision
NewRerouteDecision returns a RerouteDecision.
type RerouteExplanation ¶
type RerouteExplanation struct { Command string `json:"command"` Decisions []RerouteDecision `json:"decisions"` Parameters RerouteParameters `json:"parameters"` }
RerouteExplanation type.
func NewRerouteExplanation ¶
func NewRerouteExplanation() *RerouteExplanation
NewRerouteExplanation returns a RerouteExplanation.
type RerouteParameters ¶
type RerouteParameters struct { AllowPrimary bool `json:"allow_primary"` FromNode *string `json:"from_node,omitempty"` Index string `json:"index"` Node string `json:"node"` Shard int `json:"shard"` ToNode *string `json:"to_node,omitempty"` }
RerouteParameters type.
func NewRerouteParameters ¶
func NewRerouteParameters() *RerouteParameters
NewRerouteParameters returns a RerouteParameters.
type Rescore ¶
type Rescore struct { Query RescoreQuery `json:"query"` WindowSize *int `json:"window_size,omitempty"` }
Rescore type.
type RescoreQuery ¶
type RescoreQuery struct { Query Query `json:"rescore_query"` QueryWeight *Float64 `json:"query_weight,omitempty"` RescoreQueryWeight *Float64 `json:"rescore_query_weight,omitempty"` ScoreMode *scoremode.ScoreMode `json:"score_mode,omitempty"` }
RescoreQuery type.
func NewRescoreQuery ¶
func NewRescoreQuery() *RescoreQuery
NewRescoreQuery returns a RescoreQuery.
type ReservedSize ¶
type ReservedSize struct { NodeId string `json:"node_id"` Path string `json:"path"` Shards []string `json:"shards"` Total int64 `json:"total"` }
ReservedSize type.
func NewReservedSize ¶
func NewReservedSize() *ReservedSize
NewReservedSize returns a ReservedSize.
type ResolveIndexAliasItem ¶
ResolveIndexAliasItem type.
func NewResolveIndexAliasItem ¶
func NewResolveIndexAliasItem() *ResolveIndexAliasItem
NewResolveIndexAliasItem returns a ResolveIndexAliasItem.
type ResolveIndexDataStreamsItem ¶
type ResolveIndexDataStreamsItem struct { BackingIndices []string `json:"backing_indices"` Name string `json:"name"` TimestampField string `json:"timestamp_field"` }
ResolveIndexDataStreamsItem type.
func NewResolveIndexDataStreamsItem ¶
func NewResolveIndexDataStreamsItem() *ResolveIndexDataStreamsItem
NewResolveIndexDataStreamsItem returns a ResolveIndexDataStreamsItem.
type ResolveIndexItem ¶
type ResolveIndexItem struct { Aliases []string `json:"aliases,omitempty"` Attributes []string `json:"attributes"` DataStream *string `json:"data_stream,omitempty"` Name string `json:"name"` }
ResolveIndexItem type.
func NewResolveIndexItem ¶
func NewResolveIndexItem() *ResolveIndexItem
NewResolveIndexItem returns a ResolveIndexItem.
type ResourcePrivileges ¶
ResourcePrivileges type alias.
type ResponseBody ¶
type ResponseBody struct { Aggregations map[string]Aggregate `json:"aggregations,omitempty"` Clusters_ *ClusterStatistics `json:"_clusters,omitempty"` Fields map[string]json.RawMessage `json:"fields,omitempty"` Hits HitsMetadata `json:"hits"` MaxScore *Float64 `json:"max_score,omitempty"` NumReducePhases *int64 `json:"num_reduce_phases,omitempty"` PitId *string `json:"pit_id,omitempty"` Profile *Profile `json:"profile,omitempty"` ScrollId_ *string `json:"_scroll_id,omitempty"` Shards_ ShardStatistics `json:"_shards"` Suggest map[string][]Suggest `json:"suggest,omitempty"` TerminatedEarly *bool `json:"terminated_early,omitempty"` TimedOut bool `json:"timed_out"` Took int64 `json:"took"` }
ResponseBody type.
func NewResponseBody ¶
func NewResponseBody() *ResponseBody
NewResponseBody returns a ResponseBody.
func (*ResponseBody) UnmarshalJSON ¶
func (s *ResponseBody) UnmarshalJSON(data []byte) error
type ResponseItem ¶
type ResponseItem interface{}
ResponseItem holds the union for the following types:
GetResult MultiGetError
type Retention ¶
type Retention struct { // ExpireAfter Time period after which a snapshot is considered expired and eligible for // deletion. SLM deletes expired snapshots based on the slm.retention_schedule. ExpireAfter Duration `json:"expire_after"` // MaxCount Maximum number of snapshots to retain, even if the snapshots have not yet // expired. If the number of snapshots in the repository exceeds this limit, the // policy retains the most recent snapshots and deletes older snapshots. MaxCount int `json:"max_count"` // MinCount Minimum number of snapshots to retain, even if the snapshots have expired. MinCount int `json:"min_count"` }
Retention type.
type RetentionLease ¶
type RetentionLease struct {
Period Duration `json:"period"`
}
RetentionLease type.
func NewRetentionLease ¶
func NewRetentionLease() *RetentionLease
NewRetentionLease returns a RetentionLease.
type RetentionPolicy ¶
type RetentionPolicy struct { // Field The date field that is used to calculate the age of the document. Field string `json:"field"` // MaxAge Specifies the maximum age of a document in the destination index. Documents // that are older than the configured // value are removed from the destination index. MaxAge Duration `json:"max_age"` }
RetentionPolicy type.
func NewRetentionPolicy ¶
func NewRetentionPolicy() *RetentionPolicy
NewRetentionPolicy returns a RetentionPolicy.
type RetentionPolicyContainer ¶
type RetentionPolicyContainer struct { // Time Specifies that the transform uses a time field to set the retention policy. Time *RetentionPolicy `json:"time,omitempty"` }
RetentionPolicyContainer type.
func NewRetentionPolicyContainer ¶
func NewRetentionPolicyContainer() *RetentionPolicyContainer
NewRetentionPolicyContainer returns a RetentionPolicyContainer.
type ReverseNestedAggregate ¶
type ReverseNestedAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
ReverseNestedAggregate type.
func NewReverseNestedAggregate ¶
func NewReverseNestedAggregate() *ReverseNestedAggregate
NewReverseNestedAggregate returns a ReverseNestedAggregate.
func (ReverseNestedAggregate) MarshalJSON ¶
func (s ReverseNestedAggregate) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*ReverseNestedAggregate) UnmarshalJSON ¶
func (s *ReverseNestedAggregate) UnmarshalJSON(data []byte) error
type ReverseNestedAggregation ¶
type ReverseNestedAggregation struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Path *string `json:"path,omitempty"` }
ReverseNestedAggregation type.
func NewReverseNestedAggregation ¶
func NewReverseNestedAggregation() *ReverseNestedAggregation
NewReverseNestedAggregation returns a ReverseNestedAggregation.
type ReverseTokenFilter ¶
type ReverseTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
ReverseTokenFilter type.
func NewReverseTokenFilter ¶
func NewReverseTokenFilter() *ReverseTokenFilter
NewReverseTokenFilter returns a ReverseTokenFilter.
type Role ¶
type Role struct { Applications []ApplicationPrivileges `json:"applications"` Cluster []string `json:"cluster"` Global map[string]map[string]map[string][]string `json:"global,omitempty"` Indices []IndicesPrivileges `json:"indices"` Metadata map[string]json.RawMessage `json:"metadata"` RoleTemplates []RoleTemplate `json:"role_templates,omitempty"` RunAs []string `json:"run_as"` TransientMetadata TransientMetadataConfig `json:"transient_metadata"` }
Role type.
type RoleDescriptor ¶
type RoleDescriptor struct { Applications []ApplicationPrivileges `json:"applications,omitempty"` Cluster []string `json:"cluster,omitempty"` Global []GlobalPrivilege `json:"global,omitempty"` Indices []IndicesPrivileges `json:"indices,omitempty"` Metadata map[string]json.RawMessage `json:"metadata,omitempty"` RunAs []string `json:"run_as,omitempty"` TransientMetadata *TransientMetadataConfig `json:"transient_metadata,omitempty"` }
RoleDescriptor type.
func NewRoleDescriptor ¶
func NewRoleDescriptor() *RoleDescriptor
NewRoleDescriptor returns a RoleDescriptor.
type RoleDescriptorRead ¶
type RoleDescriptorRead struct { Applications []ApplicationPrivileges `json:"applications,omitempty"` Cluster []string `json:"cluster"` Global []GlobalPrivilege `json:"global,omitempty"` Indices []IndicesPrivileges `json:"indices"` Metadata map[string]json.RawMessage `json:"metadata,omitempty"` RunAs []string `json:"run_as,omitempty"` TransientMetadata *TransientMetadataConfig `json:"transient_metadata,omitempty"` }
RoleDescriptorRead type.
func NewRoleDescriptorRead ¶
func NewRoleDescriptorRead() *RoleDescriptorRead
NewRoleDescriptorRead returns a RoleDescriptorRead.
type RoleDescriptorWrapper ¶
type RoleDescriptorWrapper struct {
RoleDescriptor RoleDescriptorRead `json:"role_descriptor"`
}
RoleDescriptorWrapper type.
func NewRoleDescriptorWrapper ¶
func NewRoleDescriptorWrapper() *RoleDescriptorWrapper
NewRoleDescriptorWrapper returns a RoleDescriptorWrapper.
type RoleMappingRule ¶
type RoleMappingRule struct { All []RoleMappingRule `json:"all,omitempty"` Any []RoleMappingRule `json:"any,omitempty"` Except *RoleMappingRule `json:"except,omitempty"` Field *FieldRule `json:"field,omitempty"` }
RoleMappingRule type.
func NewRoleMappingRule ¶
func NewRoleMappingRule() *RoleMappingRule
NewRoleMappingRule returns a RoleMappingRule.
type RoleTemplate ¶
type RoleTemplate struct { Format *templateformat.TemplateFormat `json:"format,omitempty"` Template Script `json:"template"` }
RoleTemplate type.
func NewRoleTemplate ¶
func NewRoleTemplate() *RoleTemplate
NewRoleTemplate returns a RoleTemplate.
type RoleTemplateInlineQuery ¶
type RoleTemplateInlineQuery interface{}
RoleTemplateInlineQuery holds the union for the following types:
string Query
type RoleTemplateInlineScript ¶
type RoleTemplateInlineScript struct { Lang *scriptlanguage.ScriptLanguage `json:"lang,omitempty"` Options map[string]string `json:"options,omitempty"` Params map[string]json.RawMessage `json:"params,omitempty"` Source RoleTemplateInlineQuery `json:"source"` }
RoleTemplateInlineScript type.
func NewRoleTemplateInlineScript ¶
func NewRoleTemplateInlineScript() *RoleTemplateInlineScript
NewRoleTemplateInlineScript returns a RoleTemplateInlineScript.
func (*RoleTemplateInlineScript) UnmarshalJSON ¶
func (s *RoleTemplateInlineScript) UnmarshalJSON(data []byte) error
type RoleTemplateQuery ¶
type RoleTemplateQuery struct { // Template When you create a role, you can specify a query that defines the document // level security permissions. You can optionally // use Mustache templates in the role query to insert the username of the // current authenticated user into the role. // Like other places in Elasticsearch that support templating or scripting, you // can specify inline, stored, or file-based // templates and define custom parameters. You access the details for the // current authenticated user through the _user parameter. Template RoleTemplateScript `json:"template,omitempty"` }
RoleTemplateQuery type.
func NewRoleTemplateQuery ¶
func NewRoleTemplateQuery() *RoleTemplateQuery
NewRoleTemplateQuery returns a RoleTemplateQuery.
type RoleTemplateScript ¶
type RoleTemplateScript interface{}
RoleTemplateScript holds the union for the following types:
RoleTemplateInlineScript StoredScriptId
type RolloverConditions ¶
type RolloverConditions struct { MaxAge Duration `json:"max_age,omitempty"` MaxAgeMillis *int64 `json:"max_age_millis,omitempty"` MaxDocs *int64 `json:"max_docs,omitempty"` MaxPrimaryShardDocs *int64 `json:"max_primary_shard_docs,omitempty"` MaxPrimaryShardSize ByteSize `json:"max_primary_shard_size,omitempty"` MaxPrimaryShardSizeBytes *int64 `json:"max_primary_shard_size_bytes,omitempty"` MaxSize ByteSize `json:"max_size,omitempty"` MaxSizeBytes *int64 `json:"max_size_bytes,omitempty"` MinAge Duration `json:"min_age,omitempty"` MinDocs *int64 `json:"min_docs,omitempty"` MinPrimaryShardDocs *int64 `json:"min_primary_shard_docs,omitempty"` MinPrimaryShardSize ByteSize `json:"min_primary_shard_size,omitempty"` MinPrimaryShardSizeBytes *int64 `json:"min_primary_shard_size_bytes,omitempty"` MinSize ByteSize `json:"min_size,omitempty"` MinSizeBytes *int64 `json:"min_size_bytes,omitempty"` }
RolloverConditions type.
func NewRolloverConditions ¶
func NewRolloverConditions() *RolloverConditions
NewRolloverConditions returns a RolloverConditions.
type RollupCapabilities ¶
type RollupCapabilities struct {
RollupJobs []RollupCapabilitySummary `json:"rollup_jobs"`
}
RollupCapabilities type.
func NewRollupCapabilities ¶
func NewRollupCapabilities() *RollupCapabilities
NewRollupCapabilities returns a RollupCapabilities.
type RollupCapabilitySummary ¶
type RollupCapabilitySummary struct { Fields map[string][]RollupFieldSummary `json:"fields"` IndexPattern string `json:"index_pattern"` JobId string `json:"job_id"` RollupIndex string `json:"rollup_index"` }
RollupCapabilitySummary type.
func NewRollupCapabilitySummary ¶
func NewRollupCapabilitySummary() *RollupCapabilitySummary
NewRollupCapabilitySummary returns a RollupCapabilitySummary.
type RollupFieldSummary ¶
type RollupFieldSummary struct { Agg string `json:"agg"` CalendarInterval Duration `json:"calendar_interval,omitempty"` TimeZone *string `json:"time_zone,omitempty"` }
RollupFieldSummary type.
func NewRollupFieldSummary ¶
func NewRollupFieldSummary() *RollupFieldSummary
NewRollupFieldSummary returns a RollupFieldSummary.
type RollupJob ¶
type RollupJob struct { Config RollupJobConfiguration `json:"config"` Stats RollupJobStats `json:"stats"` Status RollupJobStatus `json:"status"` }
RollupJob type.
type RollupJobConfiguration ¶
type RollupJobConfiguration struct { Cron string `json:"cron"` Groups Groupings `json:"groups"` Id string `json:"id"` IndexPattern string `json:"index_pattern"` Metrics []FieldMetric `json:"metrics"` PageSize int64 `json:"page_size"` RollupIndex string `json:"rollup_index"` Timeout Duration `json:"timeout"` }
RollupJobConfiguration type.
func NewRollupJobConfiguration ¶
func NewRollupJobConfiguration() *RollupJobConfiguration
NewRollupJobConfiguration returns a RollupJobConfiguration.
type RollupJobStats ¶
type RollupJobStats struct { DocumentsProcessed int64 `json:"documents_processed"` IndexFailures int64 `json:"index_failures"` IndexTimeInMs int64 `json:"index_time_in_ms"` IndexTotal int64 `json:"index_total"` PagesProcessed int64 `json:"pages_processed"` ProcessingTimeInMs int64 `json:"processing_time_in_ms"` ProcessingTotal int64 `json:"processing_total"` RollupsIndexed int64 `json:"rollups_indexed"` SearchFailures int64 `json:"search_failures"` SearchTimeInMs int64 `json:"search_time_in_ms"` SearchTotal int64 `json:"search_total"` TriggerCount int64 `json:"trigger_count"` }
RollupJobStats type.
func NewRollupJobStats ¶
func NewRollupJobStats() *RollupJobStats
NewRollupJobStats returns a RollupJobStats.
type RollupJobStatus ¶
type RollupJobStatus struct { CurrentPosition map[string]json.RawMessage `json:"current_position,omitempty"` JobState indexingjobstate.IndexingJobState `json:"job_state"` UpgradedDocId *bool `json:"upgraded_doc_id,omitempty"` }
RollupJobStatus type.
func NewRollupJobStatus ¶
func NewRollupJobStatus() *RollupJobStatus
NewRollupJobStatus returns a RollupJobStatus.
type RollupJobSummary ¶
type RollupJobSummary struct { Fields map[string][]RollupJobSummaryField `json:"fields"` IndexPattern string `json:"index_pattern"` JobId string `json:"job_id"` RollupIndex string `json:"rollup_index"` }
RollupJobSummary type.
func NewRollupJobSummary ¶
func NewRollupJobSummary() *RollupJobSummary
NewRollupJobSummary returns a RollupJobSummary.
type RollupJobSummaryField ¶
type RollupJobSummaryField struct { Agg string `json:"agg"` CalendarInterval Duration `json:"calendar_interval,omitempty"` TimeZone *string `json:"time_zone,omitempty"` }
RollupJobSummaryField type.
func NewRollupJobSummaryField ¶
func NewRollupJobSummaryField() *RollupJobSummaryField
NewRollupJobSummaryField returns a RollupJobSummaryField.
type RoutingField ¶
type RoutingField struct {
Required bool `json:"required"`
}
RoutingField type.
func NewRoutingField ¶
func NewRoutingField() *RoutingField
NewRoutingField returns a RoutingField.
type RuleCondition ¶
type RuleCondition struct { // AppliesTo Specifies the result property to which the condition applies. If your // detector uses `lat_long`, `metric`, `rare`, or `freq_rare` functions, you can // only specify conditions that apply to time. AppliesTo appliesto.AppliesTo `json:"applies_to"` // Operator Specifies the condition operator. The available options are greater than, // greater than or equals, less than, and less than or equals. Operator conditionoperator.ConditionOperator `json:"operator"` // Value The value that is compared against the `applies_to` field using the operator. Value Float64 `json:"value"` }
RuleCondition type.
func NewRuleCondition ¶
func NewRuleCondition() *RuleCondition
NewRuleCondition returns a RuleCondition.
type RunningStateSearchInterval ¶
type RunningStateSearchInterval struct { End Duration `json:"end,omitempty"` EndMs int64 `json:"end_ms"` Start Duration `json:"start,omitempty"` StartMs int64 `json:"start_ms"` }
RunningStateSearchInterval type.
func NewRunningStateSearchInterval ¶
func NewRunningStateSearchInterval() *RunningStateSearchInterval
NewRunningStateSearchInterval returns a RunningStateSearchInterval.
type RuntimeField ¶
type RuntimeField struct { // FetchFields For type `lookup` FetchFields []RuntimeFieldFetchFields `json:"fetch_fields,omitempty"` Format *string `json:"format,omitempty"` // InputField For type `lookup` InputField *string `json:"input_field,omitempty"` Script Script `json:"script,omitempty"` // TargetField For type `lookup` TargetField *string `json:"target_field,omitempty"` // TargetIndex For type `lookup` TargetIndex *string `json:"target_index,omitempty"` Type runtimefieldtype.RuntimeFieldType `json:"type"` }
RuntimeField type.
func NewRuntimeField ¶
func NewRuntimeField() *RuntimeField
NewRuntimeField returns a RuntimeField.
type RuntimeFieldFetchFields ¶
type RuntimeFieldFetchFields struct { Field string `json:"field"` Format *string `json:"format,omitempty"` }
RuntimeFieldFetchFields type.
func NewRuntimeFieldFetchFields ¶
func NewRuntimeFieldFetchFields() *RuntimeFieldFetchFields
NewRuntimeFieldFetchFields returns a RuntimeFieldFetchFields.
type RuntimeFieldsType ¶
type RuntimeFieldsType struct { CharsMax int64 `json:"chars_max"` CharsTotal int64 `json:"chars_total"` Count int64 `json:"count"` DocMax int64 `json:"doc_max"` DocTotal int64 `json:"doc_total"` IndexCount int64 `json:"index_count"` Lang []string `json:"lang"` LinesMax int64 `json:"lines_max"` LinesTotal int64 `json:"lines_total"` Name string `json:"name"` ScriptlessCount int64 `json:"scriptless_count"` ShadowedCount int64 `json:"shadowed_count"` SourceMax int64 `json:"source_max"` SourceTotal int64 `json:"source_total"` }
RuntimeFieldsType type.
func NewRuntimeFieldsType ¶
func NewRuntimeFieldsType() *RuntimeFieldsType
NewRuntimeFieldsType returns a RuntimeFieldsType.
type SLMPolicy ¶
type SLMPolicy struct { Config *Configuration `json:"config,omitempty"` Name string `json:"name"` Repository string `json:"repository"` Retention *Retention `json:"retention,omitempty"` Schedule string `json:"schedule"` }
SLMPolicy type.
type SampleDiversity ¶
type SampleDiversity struct { Field string `json:"field"` MaxDocsPerValue int `json:"max_docs_per_value"` }
SampleDiversity type.
func NewSampleDiversity ¶
func NewSampleDiversity() *SampleDiversity
NewSampleDiversity returns a SampleDiversity.
type SamplerAggregate ¶
type SamplerAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
SamplerAggregate type.
func NewSamplerAggregate ¶
func NewSamplerAggregate() *SamplerAggregate
NewSamplerAggregate returns a SamplerAggregate.
func (SamplerAggregate) MarshalJSON ¶
func (s SamplerAggregate) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*SamplerAggregate) UnmarshalJSON ¶
func (s *SamplerAggregate) UnmarshalJSON(data []byte) error
type SamplerAggregation ¶
type SamplerAggregation struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` ShardSize *int `json:"shard_size,omitempty"` }
SamplerAggregation type.
func NewSamplerAggregation ¶
func NewSamplerAggregation() *SamplerAggregation
NewSamplerAggregation returns a SamplerAggregation.
type ScaledFloatNumberProperty ¶
type ScaledFloatNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue *Float64 `json:"null_value,omitempty"` OnScriptError *onscripterror.OnScriptError `json:"on_script_error,omitempty"` Properties map[string]Property `json:"properties,omitempty"` ScalingFactor *Float64 `json:"scaling_factor,omitempty"` Script Script `json:"script,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` // TimeSeriesDimension For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesDimension *bool `json:"time_series_dimension,omitempty"` // TimeSeriesMetric For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesMetric *timeseriesmetrictype.TimeSeriesMetricType `json:"time_series_metric,omitempty"` Type string `json:"type,omitempty"` }
ScaledFloatNumberProperty type.
func NewScaledFloatNumberProperty ¶
func NewScaledFloatNumberProperty() *ScaledFloatNumberProperty
NewScaledFloatNumberProperty returns a ScaledFloatNumberProperty.
func (*ScaledFloatNumberProperty) UnmarshalJSON ¶
func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error
type ScheduleContainer ¶
type ScheduleContainer struct { Cron *string `json:"cron,omitempty"` Daily *DailySchedule `json:"daily,omitempty"` Hourly *HourlySchedule `json:"hourly,omitempty"` Interval Duration `json:"interval,omitempty"` Monthly []TimeOfMonth `json:"monthly,omitempty"` Weekly []TimeOfWeek `json:"weekly,omitempty"` Yearly []TimeOfYear `json:"yearly,omitempty"` }
ScheduleContainer type.
func NewScheduleContainer ¶
func NewScheduleContainer() *ScheduleContainer
NewScheduleContainer returns a ScheduleContainer.
type ScheduleTimeOfDay ¶
type ScheduleTimeOfDay interface{}
ScheduleTimeOfDay holds the union for the following types:
string HourAndMinute
type ScheduleTriggerEvent ¶
type ScheduleTriggerEvent struct { ScheduledTime DateTime `json:"scheduled_time"` TriggeredTime DateTime `json:"triggered_time,omitempty"` }
ScheduleTriggerEvent type.
func NewScheduleTriggerEvent ¶
func NewScheduleTriggerEvent() *ScheduleTriggerEvent
NewScheduleTriggerEvent returns a ScheduleTriggerEvent.
type Script ¶
type Script interface{}
Script holds the union for the following types:
InlineScript StoredScriptId
type ScriptCache ¶
type ScriptCache struct { CacheEvictions *int64 `json:"cache_evictions,omitempty"` CompilationLimitTriggered *int64 `json:"compilation_limit_triggered,omitempty"` Compilations *int64 `json:"compilations,omitempty"` Context *string `json:"context,omitempty"` }
ScriptCache type.
type ScriptCondition ¶
type ScriptCondition struct { Id *string `json:"id,omitempty"` Lang *string `json:"lang,omitempty"` Params map[string]json.RawMessage `json:"params,omitempty"` Source *string `json:"source,omitempty"` }
ScriptCondition type.
func NewScriptCondition ¶
func NewScriptCondition() *ScriptCondition
NewScriptCondition returns a ScriptCondition.
type ScriptField ¶
type ScriptField struct { IgnoreFailure *bool `json:"ignore_failure,omitempty"` Script Script `json:"script"` }
ScriptField type.
type ScriptQuery ¶
type ScriptQuery struct { Boost *float32 `json:"boost,omitempty"` QueryName_ *string `json:"_name,omitempty"` Script Script `json:"script"` }
ScriptQuery type.
type ScriptScoreFunction ¶
type ScriptScoreFunction struct {
Script Script `json:"script"`
}
ScriptScoreFunction type.
func NewScriptScoreFunction ¶
func NewScriptScoreFunction() *ScriptScoreFunction
NewScriptScoreFunction returns a ScriptScoreFunction.
type ScriptScoreQuery ¶
type ScriptScoreQuery struct { Boost *float32 `json:"boost,omitempty"` MinScore *float32 `json:"min_score,omitempty"` Query *Query `json:"query,omitempty"` QueryName_ *string `json:"_name,omitempty"` Script Script `json:"script"` }
ScriptScoreQuery type.
func NewScriptScoreQuery ¶
func NewScriptScoreQuery() *ScriptScoreQuery
NewScriptScoreQuery returns a ScriptScoreQuery.
type ScriptSort ¶
type ScriptSort struct { Mode *sortmode.SortMode `json:"mode,omitempty"` Nested *NestedSortValue `json:"nested,omitempty"` Order *sortorder.SortOrder `json:"order,omitempty"` Script Script `json:"script"` Type *scriptsorttype.ScriptSortType `json:"type,omitempty"` }
ScriptSort type.
type ScriptTransform ¶
type ScriptTransform struct { Id *string `json:"id,omitempty"` Lang *string `json:"lang,omitempty"` Params map[string]json.RawMessage `json:"params,omitempty"` Source *string `json:"source,omitempty"` }
ScriptTransform type.
func NewScriptTransform ¶
func NewScriptTransform() *ScriptTransform
NewScriptTransform returns a ScriptTransform.
type ScriptedHeuristic ¶
type ScriptedHeuristic struct {
Script Script `json:"script"`
}
ScriptedHeuristic type.
func NewScriptedHeuristic ¶
func NewScriptedHeuristic() *ScriptedHeuristic
NewScriptedHeuristic returns a ScriptedHeuristic.
type ScriptedMetricAggregate ¶
type ScriptedMetricAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Value json.RawMessage `json:"value,omitempty"` }
ScriptedMetricAggregate type.
func NewScriptedMetricAggregate ¶
func NewScriptedMetricAggregate() *ScriptedMetricAggregate
NewScriptedMetricAggregate returns a ScriptedMetricAggregate.
type ScriptedMetricAggregation ¶
type ScriptedMetricAggregation struct { CombineScript Script `json:"combine_script,omitempty"` Field *string `json:"field,omitempty"` InitScript Script `json:"init_script,omitempty"` MapScript Script `json:"map_script,omitempty"` Missing Missing `json:"missing,omitempty"` Params map[string]json.RawMessage `json:"params,omitempty"` ReduceScript Script `json:"reduce_script,omitempty"` Script Script `json:"script,omitempty"` }
ScriptedMetricAggregation type.
func NewScriptedMetricAggregation ¶
func NewScriptedMetricAggregation() *ScriptedMetricAggregation
NewScriptedMetricAggregation returns a ScriptedMetricAggregation.
type Scripting ¶
type Scripting struct { CacheEvictions *int64 `json:"cache_evictions,omitempty"` CompilationLimitTriggered *int64 `json:"compilation_limit_triggered,omitempty"` Compilations *int64 `json:"compilations,omitempty"` CompilationsHistory map[string]int64 `json:"compilations_history,omitempty"` Contexts []NodesContext `json:"contexts,omitempty"` }
Scripting type.
type SearchAsYouTypeProperty ¶
type SearchAsYouTypeProperty struct { Analyzer *string `json:"analyzer,omitempty"` CopyTo []string `json:"copy_to,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` Index *bool `json:"index,omitempty"` IndexOptions *indexoptions.IndexOptions `json:"index_options,omitempty"` MaxShingleSize *int `json:"max_shingle_size,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Norms *bool `json:"norms,omitempty"` Properties map[string]Property `json:"properties,omitempty"` SearchAnalyzer *string `json:"search_analyzer,omitempty"` SearchQuoteAnalyzer *string `json:"search_quote_analyzer,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` TermVector *termvectoroption.TermVectorOption `json:"term_vector,omitempty"` Type string `json:"type,omitempty"` }
SearchAsYouTypeProperty type.
func NewSearchAsYouTypeProperty ¶
func NewSearchAsYouTypeProperty() *SearchAsYouTypeProperty
NewSearchAsYouTypeProperty returns a SearchAsYouTypeProperty.
func (*SearchAsYouTypeProperty) UnmarshalJSON ¶
func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error
type SearchIdle ¶
type SearchIdle struct {
After Duration `json:"after,omitempty"`
}
SearchIdle type.
type SearchInput ¶
type SearchInput struct { Extract []string `json:"extract,omitempty"` Request SearchInputRequestDefinition `json:"request"` Timeout Duration `json:"timeout,omitempty"` }
SearchInput type.
type SearchInputRequestBody ¶
type SearchInputRequestBody struct {
Query Query `json:"query"`
}
SearchInputRequestBody type.
func NewSearchInputRequestBody ¶
func NewSearchInputRequestBody() *SearchInputRequestBody
NewSearchInputRequestBody returns a SearchInputRequestBody.
type SearchInputRequestDefinition ¶
type SearchInputRequestDefinition struct { Body *SearchInputRequestBody `json:"body,omitempty"` Indices []string `json:"indices,omitempty"` IndicesOptions *IndicesOptions `json:"indices_options,omitempty"` RestTotalHitsAsInt *bool `json:"rest_total_hits_as_int,omitempty"` SearchType *searchtype.SearchType `json:"search_type,omitempty"` Template *SearchTemplateRequestBody `json:"template,omitempty"` }
SearchInputRequestDefinition type.
func NewSearchInputRequestDefinition ¶
func NewSearchInputRequestDefinition() *SearchInputRequestDefinition
NewSearchInputRequestDefinition returns a SearchInputRequestDefinition.
type SearchProfile ¶
type SearchProfile struct { Collector []Collector `json:"collector"` Query []QueryProfile `json:"query"` RewriteTime int64 `json:"rewrite_time"` }
SearchProfile type.
func NewSearchProfile ¶
func NewSearchProfile() *SearchProfile
NewSearchProfile returns a SearchProfile.
type SearchStats ¶
type SearchStats struct { FetchCurrent int64 `json:"fetch_current"` FetchTime Duration `json:"fetch_time,omitempty"` FetchTimeInMillis int64 `json:"fetch_time_in_millis"` FetchTotal int64 `json:"fetch_total"` Groups map[string]SearchStats `json:"groups,omitempty"` OpenContexts *int64 `json:"open_contexts,omitempty"` QueryCurrent int64 `json:"query_current"` QueryTime Duration `json:"query_time,omitempty"` QueryTimeInMillis int64 `json:"query_time_in_millis"` QueryTotal int64 `json:"query_total"` ScrollCurrent int64 `json:"scroll_current"` ScrollTime Duration `json:"scroll_time,omitempty"` ScrollTimeInMillis int64 `json:"scroll_time_in_millis"` ScrollTotal int64 `json:"scroll_total"` SuggestCurrent int64 `json:"suggest_current"` SuggestTime Duration `json:"suggest_time,omitempty"` SuggestTimeInMillis int64 `json:"suggest_time_in_millis"` SuggestTotal int64 `json:"suggest_total"` }
SearchStats type.
type SearchTemplateRequestBody ¶
type SearchTemplateRequestBody struct { Explain *bool `json:"explain,omitempty"` // Id ID of the search template to use. If no source is specified, // this parameter is required. Id *string `json:"id,omitempty"` Params map[string]json.RawMessage `json:"params,omitempty"` Profile *bool `json:"profile,omitempty"` // Source An inline search template. Supports the same parameters as the search API's // request body. Also supports Mustache variables. If no id is specified, this // parameter is required. Source *string `json:"source,omitempty"` }
SearchTemplateRequestBody type.
func NewSearchTemplateRequestBody ¶
func NewSearchTemplateRequestBody() *SearchTemplateRequestBody
NewSearchTemplateRequestBody returns a SearchTemplateRequestBody.
type SearchTransform ¶
type SearchTransform struct { Request SearchInputRequestDefinition `json:"request"` Timeout Duration `json:"timeout"` }
SearchTransform type.
func NewSearchTransform ¶
func NewSearchTransform() *SearchTransform
NewSearchTransform returns a SearchTransform.
type SearchableSnapshots ¶
type SearchableSnapshots struct { Available bool `json:"available"` Enabled bool `json:"enabled"` FullCopyIndicesCount *int `json:"full_copy_indices_count,omitempty"` IndicesCount int `json:"indices_count"` }
SearchableSnapshots type.
func NewSearchableSnapshots ¶
func NewSearchableSnapshots() *SearchableSnapshots
NewSearchableSnapshots returns a SearchableSnapshots.
type Security ¶
type Security struct { Anonymous FeatureToggle `json:"anonymous"` ApiKeyService FeatureToggle `json:"api_key_service"` Audit Audit `json:"audit"` Available bool `json:"available"` Enabled bool `json:"enabled"` Fips140 FeatureToggle `json:"fips_140"` Ipfilter IpFilter `json:"ipfilter"` OperatorPrivileges Base `json:"operator_privileges"` Realms map[string]XpackRealm `json:"realms"` RoleMapping map[string]XpackRoleMapping `json:"role_mapping"` Roles SecurityRoles `json:"roles"` Ssl Ssl `json:"ssl"` SystemKey *FeatureToggle `json:"system_key,omitempty"` TokenService FeatureToggle `json:"token_service"` }
Security type.
type SecurityRealm ¶
type SecurityRealm struct {
Name string `json:"name"`
}
SecurityRealm type.
func NewSecurityRealm ¶
func NewSecurityRealm() *SecurityRealm
NewSecurityRealm returns a SecurityRealm.
type SecurityRoleMapping ¶
type SecurityRoleMapping struct { Enabled bool `json:"enabled"` Metadata map[string]json.RawMessage `json:"metadata"` RoleTemplates []RoleTemplate `json:"role_templates,omitempty"` Roles []string `json:"roles"` Rules RoleMappingRule `json:"rules"` }
SecurityRoleMapping type.
func NewSecurityRoleMapping ¶
func NewSecurityRoleMapping() *SecurityRoleMapping
NewSecurityRoleMapping returns a SecurityRoleMapping.
type SecurityRoles ¶
type SecurityRoles struct { Dls SecurityRolesDls `json:"dls"` File SecurityRolesFile `json:"file"` Native SecurityRolesNative `json:"native"` }
SecurityRoles type.
func NewSecurityRoles ¶
func NewSecurityRoles() *SecurityRoles
NewSecurityRoles returns a SecurityRoles.
type SecurityRolesDls ¶
type SecurityRolesDls struct {
BitSetCache SecurityRolesDlsBitSetCache `json:"bit_set_cache"`
}
SecurityRolesDls type.
func NewSecurityRolesDls ¶
func NewSecurityRolesDls() *SecurityRolesDls
NewSecurityRolesDls returns a SecurityRolesDls.
type SecurityRolesDlsBitSetCache ¶
type SecurityRolesDlsBitSetCache struct { Count int `json:"count"` Memory ByteSize `json:"memory,omitempty"` MemoryInBytes uint64 `json:"memory_in_bytes"` }
SecurityRolesDlsBitSetCache type.
func NewSecurityRolesDlsBitSetCache ¶
func NewSecurityRolesDlsBitSetCache() *SecurityRolesDlsBitSetCache
NewSecurityRolesDlsBitSetCache returns a SecurityRolesDlsBitSetCache.
type SecurityRolesFile ¶
type SecurityRolesFile struct { Dls bool `json:"dls"` Fls bool `json:"fls"` Size int64 `json:"size"` }
SecurityRolesFile type.
func NewSecurityRolesFile ¶
func NewSecurityRolesFile() *SecurityRolesFile
NewSecurityRolesFile returns a SecurityRolesFile.
type SecurityRolesNative ¶
type SecurityRolesNative struct { Dls bool `json:"dls"` Fls bool `json:"fls"` Size int64 `json:"size"` }
SecurityRolesNative type.
func NewSecurityRolesNative ¶
func NewSecurityRolesNative() *SecurityRolesNative
NewSecurityRolesNative returns a SecurityRolesNative.
type Segment ¶
type Segment struct { Attributes map[string]string `json:"attributes"` Committed bool `json:"committed"` Compound bool `json:"compound"` DeletedDocs int64 `json:"deleted_docs"` Generation int `json:"generation"` NumDocs int64 `json:"num_docs"` Search bool `json:"search"` SizeInBytes Float64 `json:"size_in_bytes"` Version string `json:"version"` }
Segment type.
type SegmentsRecord ¶
type SegmentsRecord struct { // Committed is segment committed Committed *string `json:"committed,omitempty"` // Compound is segment compound Compound *string `json:"compound,omitempty"` // DocsCount number of docs in segment DocsCount *string `json:"docs.count,omitempty"` // DocsDeleted number of deleted docs in segment DocsDeleted *string `json:"docs.deleted,omitempty"` // Generation segment generation Generation *string `json:"generation,omitempty"` // Id unique id of node where it lives Id *string `json:"id,omitempty"` // Index index name Index *string `json:"index,omitempty"` // Ip ip of node where it lives Ip *string `json:"ip,omitempty"` // Prirep primary or replica Prirep *string `json:"prirep,omitempty"` // Searchable is segment searched Searchable *string `json:"searchable,omitempty"` // Segment segment name Segment *string `json:"segment,omitempty"` // Shard shard name Shard *string `json:"shard,omitempty"` // Size segment size in bytes Size ByteSize `json:"size,omitempty"` // SizeMemory segment memory in bytes SizeMemory ByteSize `json:"size.memory,omitempty"` // Version version Version *string `json:"version,omitempty"` }
SegmentsRecord type.
func NewSegmentsRecord ¶
func NewSegmentsRecord() *SegmentsRecord
NewSegmentsRecord returns a SegmentsRecord.
type SegmentsStats ¶
type SegmentsStats struct { Count int `json:"count"` DocValuesMemory ByteSize `json:"doc_values_memory,omitempty"` DocValuesMemoryInBytes int `json:"doc_values_memory_in_bytes"` FileSizes map[string]ShardFileSizeInfo `json:"file_sizes"` FixedBitSet ByteSize `json:"fixed_bit_set,omitempty"` FixedBitSetMemoryInBytes int `json:"fixed_bit_set_memory_in_bytes"` IndexWriterMaxMemoryInBytes *int `json:"index_writer_max_memory_in_bytes,omitempty"` IndexWriterMemory ByteSize `json:"index_writer_memory,omitempty"` IndexWriterMemoryInBytes int `json:"index_writer_memory_in_bytes"` MaxUnsafeAutoIdTimestamp int64 `json:"max_unsafe_auto_id_timestamp"` Memory ByteSize `json:"memory,omitempty"` MemoryInBytes int `json:"memory_in_bytes"` NormsMemory ByteSize `json:"norms_memory,omitempty"` NormsMemoryInBytes int `json:"norms_memory_in_bytes"` PointsMemory ByteSize `json:"points_memory,omitempty"` PointsMemoryInBytes int `json:"points_memory_in_bytes"` StoredFieldsMemoryInBytes int `json:"stored_fields_memory_in_bytes"` StoredMemory ByteSize `json:"stored_memory,omitempty"` TermVectorsMemoryInBytes int `json:"term_vectors_memory_in_bytes"` TermVectoryMemory ByteSize `json:"term_vectory_memory,omitempty"` TermsMemory ByteSize `json:"terms_memory,omitempty"` TermsMemoryInBytes int `json:"terms_memory_in_bytes"` VersionMapMemory ByteSize `json:"version_map_memory,omitempty"` VersionMapMemoryInBytes int `json:"version_map_memory_in_bytes"` }
SegmentsStats type.
func NewSegmentsStats ¶
func NewSegmentsStats() *SegmentsStats
NewSegmentsStats returns a SegmentsStats.
type SerialDifferencingAggregation ¶
type SerialDifferencingAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Lag *int `json:"lag,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` }
SerialDifferencingAggregation type.
func NewSerialDifferencingAggregation ¶
func NewSerialDifferencingAggregation() *SerialDifferencingAggregation
NewSerialDifferencingAggregation returns a SerialDifferencingAggregation.
func (*SerialDifferencingAggregation) UnmarshalJSON ¶
func (s *SerialDifferencingAggregation) UnmarshalJSON(data []byte) error
type SerializedClusterState ¶
type SerializedClusterState struct { Diffs *SerializedClusterStateDetail `json:"diffs,omitempty"` FullStates *SerializedClusterStateDetail `json:"full_states,omitempty"` }
SerializedClusterState type.
func NewSerializedClusterState ¶
func NewSerializedClusterState() *SerializedClusterState
NewSerializedClusterState returns a SerializedClusterState.
type SerializedClusterStateDetail ¶
type SerializedClusterStateDetail struct { CompressedSize *string `json:"compressed_size,omitempty"` CompressedSizeInBytes *int64 `json:"compressed_size_in_bytes,omitempty"` Count *int64 `json:"count,omitempty"` UncompressedSize *string `json:"uncompressed_size,omitempty"` UncompressedSizeInBytes *int64 `json:"uncompressed_size_in_bytes,omitempty"` }
SerializedClusterStateDetail type.
func NewSerializedClusterStateDetail ¶
func NewSerializedClusterStateDetail() *SerializedClusterStateDetail
NewSerializedClusterStateDetail returns a SerializedClusterStateDetail.
type ServiceToken ¶
ServiceToken type.
func NewServiceToken ¶
func NewServiceToken() *ServiceToken
NewServiceToken returns a ServiceToken.
type SetProcessor ¶
type SetProcessor struct { CopyFrom *string `json:"copy_from,omitempty"` Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreEmptyValue *bool `json:"ignore_empty_value,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` MediaType *string `json:"media_type,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Override *bool `json:"override,omitempty"` Tag *string `json:"tag,omitempty"` Value json.RawMessage `json:"value,omitempty"` }
SetProcessor type.
func NewSetProcessor ¶
func NewSetProcessor() *SetProcessor
NewSetProcessor returns a SetProcessor.
type SetSecurityUserProcessor ¶
type SetSecurityUserProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Properties []string `json:"properties,omitempty"` Tag *string `json:"tag,omitempty"` }
SetSecurityUserProcessor type.
func NewSetSecurityUserProcessor ¶
func NewSetSecurityUserProcessor() *SetSecurityUserProcessor
NewSetSecurityUserProcessor returns a SetSecurityUserProcessor.
type Settings ¶
type Settings struct { // AlignCheckpoints Specifies whether the transform checkpoint ranges should be optimized for // performance. Such optimization can align // checkpoint ranges with the date histogram interval when date histogram is // specified as a group source in the // transform config. As a result, less document updates in the destination index // will be performed thus improving // overall performance. AlignCheckpoints *bool `json:"align_checkpoints,omitempty"` // DatesAsEpochMillis Defines if dates in the ouput should be written as ISO formatted string or as // millis since epoch. epoch_millis was // the default for transforms created before version 7.11. For compatible output // set this value to `true`. DatesAsEpochMillis *bool `json:"dates_as_epoch_millis,omitempty"` // DeduceMappings Specifies whether the transform should deduce the destination index mappings // from the transform configuration. DeduceMappings *bool `json:"deduce_mappings,omitempty"` // DocsPerSecond Specifies a limit on the number of input documents per second. This setting // throttles the transform by adding a // wait time between search requests. The default value is null, which disables // throttling. DocsPerSecond *float32 `json:"docs_per_second,omitempty"` // MaxPageSearchSize Defines the initial page size to use for the composite aggregation for each // checkpoint. If circuit breaker // exceptions occur, the page size is dynamically adjusted to a lower value. The // minimum value is `10` and the // maximum is `65,536`. MaxPageSearchSize *int `json:"max_page_search_size,omitempty"` // Unattended If `true`, the transform runs in unattended mode. In unattended mode, the // transform retries indefinitely in case // of an error which means the transform never fails. Setting the number of // retries other than infinite fails in // validation. Unattended *bool `json:"unattended,omitempty"` }
Settings type.
type SettingsAnalyze ¶
type SettingsAnalyze struct {
MaxTokenCount *int `json:"max_token_count,omitempty"`
}
SettingsAnalyze type.
func NewSettingsAnalyze ¶
func NewSettingsAnalyze() *SettingsAnalyze
NewSettingsAnalyze returns a SettingsAnalyze.
type SettingsHighlight ¶
type SettingsHighlight struct {
MaxAnalyzedOffset *int `json:"max_analyzed_offset,omitempty"`
}
SettingsHighlight type.
func NewSettingsHighlight ¶
func NewSettingsHighlight() *SettingsHighlight
NewSettingsHighlight returns a SettingsHighlight.
type SettingsQueryString ¶
type SettingsQueryString struct {
Lenient bool `json:"lenient"`
}
SettingsQueryString type.
func NewSettingsQueryString ¶
func NewSettingsQueryString() *SettingsQueryString
NewSettingsQueryString returns a SettingsQueryString.
type SettingsSearch ¶
type SettingsSearch struct { Idle *SearchIdle `json:"idle,omitempty"` Slowlog *SlowlogSettings `json:"slowlog,omitempty"` }
SettingsSearch type.
func NewSettingsSearch ¶
func NewSettingsSearch() *SettingsSearch
NewSettingsSearch returns a SettingsSearch.
type SettingsSimilarity ¶
type SettingsSimilarity struct { Bm25 *SettingsSimilarityBm25 `json:"bm25,omitempty"` Dfi *SettingsSimilarityDfi `json:"dfi,omitempty"` Dfr *SettingsSimilarityDfr `json:"dfr,omitempty"` Ib *SettingsSimilarityIb `json:"ib,omitempty"` Lmd *SettingsSimilarityLmd `json:"lmd,omitempty"` Lmj *SettingsSimilarityLmj `json:"lmj,omitempty"` ScriptedTfidf *SettingsSimilarityScriptedTfidf `json:"scripted_tfidf,omitempty"` }
SettingsSimilarity type.
func NewSettingsSimilarity ¶
func NewSettingsSimilarity() *SettingsSimilarity
NewSettingsSimilarity returns a SettingsSimilarity.
type SettingsSimilarityBm25 ¶
type SettingsSimilarityBm25 struct { B Float64 `json:"b"` DiscountOverlaps bool `json:"discount_overlaps"` K1 Float64 `json:"k1"` Type string `json:"type,omitempty"` }
SettingsSimilarityBm25 type.
func NewSettingsSimilarityBm25 ¶
func NewSettingsSimilarityBm25() *SettingsSimilarityBm25
NewSettingsSimilarityBm25 returns a SettingsSimilarityBm25.
type SettingsSimilarityDfi ¶
type SettingsSimilarityDfi struct { IndependenceMeasure dfiindependencemeasure.DFIIndependenceMeasure `json:"independence_measure"` Type string `json:"type,omitempty"` }
SettingsSimilarityDfi type.
func NewSettingsSimilarityDfi ¶
func NewSettingsSimilarityDfi() *SettingsSimilarityDfi
NewSettingsSimilarityDfi returns a SettingsSimilarityDfi.
type SettingsSimilarityDfr ¶
type SettingsSimilarityDfr struct { AfterEffect dfraftereffect.DFRAfterEffect `json:"after_effect"` BasicModel dfrbasicmodel.DFRBasicModel `json:"basic_model"` Normalization normalization.Normalization `json:"normalization"` Type string `json:"type,omitempty"` }
SettingsSimilarityDfr type.
func NewSettingsSimilarityDfr ¶
func NewSettingsSimilarityDfr() *SettingsSimilarityDfr
NewSettingsSimilarityDfr returns a SettingsSimilarityDfr.
type SettingsSimilarityIb ¶
type SettingsSimilarityIb struct { Distribution ibdistribution.IBDistribution `json:"distribution"` Lambda iblambda.IBLambda `json:"lambda"` Normalization normalization.Normalization `json:"normalization"` Type string `json:"type,omitempty"` }
SettingsSimilarityIb type.
func NewSettingsSimilarityIb ¶
func NewSettingsSimilarityIb() *SettingsSimilarityIb
NewSettingsSimilarityIb returns a SettingsSimilarityIb.
type SettingsSimilarityLmd ¶
SettingsSimilarityLmd type.
func NewSettingsSimilarityLmd ¶
func NewSettingsSimilarityLmd() *SettingsSimilarityLmd
NewSettingsSimilarityLmd returns a SettingsSimilarityLmd.
type SettingsSimilarityLmj ¶
type SettingsSimilarityLmj struct { Lambda Float64 `json:"lambda"` Type string `json:"type,omitempty"` }
SettingsSimilarityLmj type.
func NewSettingsSimilarityLmj ¶
func NewSettingsSimilarityLmj() *SettingsSimilarityLmj
NewSettingsSimilarityLmj returns a SettingsSimilarityLmj.
type SettingsSimilarityScriptedTfidf ¶
type SettingsSimilarityScriptedTfidf struct { Script Script `json:"script"` Type string `json:"type,omitempty"` }
SettingsSimilarityScriptedTfidf type.
func NewSettingsSimilarityScriptedTfidf ¶
func NewSettingsSimilarityScriptedTfidf() *SettingsSimilarityScriptedTfidf
NewSettingsSimilarityScriptedTfidf returns a SettingsSimilarityScriptedTfidf.
type ShapeFieldQuery ¶
type ShapeFieldQuery struct { IndexedShape *FieldLookup `json:"indexed_shape,omitempty"` Relation *geoshaperelation.GeoShapeRelation `json:"relation,omitempty"` Shape json.RawMessage `json:"shape,omitempty"` }
ShapeFieldQuery type.
func NewShapeFieldQuery ¶
func NewShapeFieldQuery() *ShapeFieldQuery
NewShapeFieldQuery returns a ShapeFieldQuery.
type ShapeProperty ¶
type ShapeProperty struct { Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` IgnoreZValue *bool `json:"ignore_z_value,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Orientation *geoorientation.GeoOrientation `json:"orientation,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
ShapeProperty type.
func NewShapeProperty ¶
func NewShapeProperty() *ShapeProperty
NewShapeProperty returns a ShapeProperty.
func (*ShapeProperty) UnmarshalJSON ¶
func (s *ShapeProperty) UnmarshalJSON(data []byte) error
type ShapeQuery ¶
type ShapeQuery struct { Boost *float32 `json:"boost,omitempty"` IgnoreUnmapped *bool `json:"ignore_unmapped,omitempty"` QueryName_ *string `json:"_name,omitempty"` ShapeQuery map[string]ShapeFieldQuery `json:"-"` }
ShapeQuery type.
func (ShapeQuery) MarshalJSON ¶
func (s ShapeQuery) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type ShardCommit ¶
type ShardCommit struct { Generation int `json:"generation"` Id string `json:"id"` NumDocs int64 `json:"num_docs"` UserData map[string]string `json:"user_data"` }
ShardCommit type.
type ShardFailure ¶
type ShardFailure struct { Index *string `json:"index,omitempty"` Node *string `json:"node,omitempty"` Reason ErrorCause `json:"reason"` Shard int `json:"shard"` Status *string `json:"status,omitempty"` }
ShardFailure type.
func NewShardFailure ¶
func NewShardFailure() *ShardFailure
NewShardFailure returns a ShardFailure.
type ShardFileSizeInfo ¶
type ShardFileSizeInfo struct { AverageSizeInBytes *int64 `json:"average_size_in_bytes,omitempty"` Count *int64 `json:"count,omitempty"` Description string `json:"description"` MaxSizeInBytes *int64 `json:"max_size_in_bytes,omitempty"` MinSizeInBytes *int64 `json:"min_size_in_bytes,omitempty"` SizeInBytes int64 `json:"size_in_bytes"` }
ShardFileSizeInfo type.
func NewShardFileSizeInfo ¶
func NewShardFileSizeInfo() *ShardFileSizeInfo
NewShardFileSizeInfo returns a ShardFileSizeInfo.
type ShardHealthStats ¶
type ShardHealthStats struct { ActiveShards int `json:"active_shards"` InitializingShards int `json:"initializing_shards"` PrimaryActive bool `json:"primary_active"` RelocatingShards int `json:"relocating_shards"` Status healthstatus.HealthStatus `json:"status"` UnassignedShards int `json:"unassigned_shards"` }
ShardHealthStats type.
func NewShardHealthStats ¶
func NewShardHealthStats() *ShardHealthStats
NewShardHealthStats returns a ShardHealthStats.
type ShardLease ¶
type ShardLease struct { Id string `json:"id"` RetainingSeqNo int64 `json:"retaining_seq_no"` Source string `json:"source"` Timestamp int64 `json:"timestamp"` }
ShardLease type.
type ShardMigrationStatus ¶
type ShardMigrationStatus struct {
Status shutdownstatus.ShutdownStatus `json:"status"`
}
ShardMigrationStatus type.
func NewShardMigrationStatus ¶
func NewShardMigrationStatus() *ShardMigrationStatus
NewShardMigrationStatus returns a ShardMigrationStatus.
type ShardPath ¶
type ShardPath struct { DataPath string `json:"data_path"` IsCustomDataPath bool `json:"is_custom_data_path"` StatePath string `json:"state_path"` }
ShardPath type.
type ShardProfile ¶
type ShardProfile struct { Aggregations []AggregationProfile `json:"aggregations"` Fetch *FetchProfile `json:"fetch,omitempty"` Id string `json:"id"` Searches []SearchProfile `json:"searches"` }
ShardProfile type.
func NewShardProfile ¶
func NewShardProfile() *ShardProfile
NewShardProfile returns a ShardProfile.
type ShardQueryCache ¶
type ShardQueryCache struct { CacheCount int64 `json:"cache_count"` CacheSize int64 `json:"cache_size"` Evictions int64 `json:"evictions"` HitCount int64 `json:"hit_count"` MemorySizeInBytes int64 `json:"memory_size_in_bytes"` MissCount int64 `json:"miss_count"` TotalCount int64 `json:"total_count"` }
ShardQueryCache type.
func NewShardQueryCache ¶
func NewShardQueryCache() *ShardQueryCache
NewShardQueryCache returns a ShardQueryCache.
type ShardRecovery ¶
type ShardRecovery struct { Id int64 `json:"id"` Index RecoveryIndexStatus `json:"index"` Primary bool `json:"primary"` Source RecoveryOrigin `json:"source"` Stage string `json:"stage"` Start *RecoveryStartStatus `json:"start,omitempty"` StartTime DateTime `json:"start_time,omitempty"` StartTimeInMillis int64 `json:"start_time_in_millis"` StopTime DateTime `json:"stop_time,omitempty"` StopTimeInMillis *int64 `json:"stop_time_in_millis,omitempty"` Target RecoveryOrigin `json:"target"` TotalTime Duration `json:"total_time,omitempty"` TotalTimeInMillis int64 `json:"total_time_in_millis"` Translog TranslogStatus `json:"translog"` Type string `json:"type"` VerifyIndex VerifyIndex `json:"verify_index"` }
ShardRecovery type.
func NewShardRecovery ¶
func NewShardRecovery() *ShardRecovery
NewShardRecovery returns a ShardRecovery.
type ShardRetentionLeases ¶
type ShardRetentionLeases struct { Leases []ShardLease `json:"leases"` PrimaryTerm int64 `json:"primary_term"` Version int64 `json:"version"` }
ShardRetentionLeases type.
func NewShardRetentionLeases ¶
func NewShardRetentionLeases() *ShardRetentionLeases
NewShardRetentionLeases returns a ShardRetentionLeases.
type ShardRouting ¶
type ShardRouting struct { Node string `json:"node"` Primary bool `json:"primary"` RelocatingNode string `json:"relocating_node,omitempty"` State shardroutingstate.ShardRoutingState `json:"state"` }
ShardRouting type.
func NewShardRouting ¶
func NewShardRouting() *ShardRouting
NewShardRouting returns a ShardRouting.
type ShardSegmentRouting ¶
type ShardSegmentRouting struct { Node string `json:"node"` Primary bool `json:"primary"` State string `json:"state"` }
ShardSegmentRouting type.
func NewShardSegmentRouting ¶
func NewShardSegmentRouting() *ShardSegmentRouting
NewShardSegmentRouting returns a ShardSegmentRouting.
type ShardSequenceNumber ¶
type ShardSequenceNumber struct { GlobalCheckpoint int64 `json:"global_checkpoint"` LocalCheckpoint int64 `json:"local_checkpoint"` MaxSeqNo int64 `json:"max_seq_no"` }
ShardSequenceNumber type.
func NewShardSequenceNumber ¶
func NewShardSequenceNumber() *ShardSequenceNumber
NewShardSequenceNumber returns a ShardSequenceNumber.
type ShardStatistics ¶
type ShardStatistics struct { Failed uint `json:"failed"` Failures []ShardFailure `json:"failures,omitempty"` Skipped *uint `json:"skipped,omitempty"` Successful uint `json:"successful"` Total uint `json:"total"` }
ShardStatistics type.
func NewShardStatistics ¶
func NewShardStatistics() *ShardStatistics
NewShardStatistics returns a ShardStatistics.
type ShardStore ¶
type ShardStore struct { Allocation shardstoreallocation.ShardStoreAllocation `json:"allocation"` AllocationId *string `json:"allocation_id,omitempty"` ShardStore map[string]ShardStoreNode `json:"-"` StoreException *ShardStoreException `json:"store_exception,omitempty"` }
ShardStore type.
func (ShardStore) MarshalJSON ¶
func (s ShardStore) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type ShardStoreException ¶
ShardStoreException type.
func NewShardStoreException ¶
func NewShardStoreException() *ShardStoreException
NewShardStoreException returns a ShardStoreException.
type ShardStoreIndex ¶
type ShardStoreIndex struct { Aliases []string `json:"aliases,omitempty"` Filter *Query `json:"filter,omitempty"` }
ShardStoreIndex type.
func NewShardStoreIndex ¶
func NewShardStoreIndex() *ShardStoreIndex
NewShardStoreIndex returns a ShardStoreIndex.
type ShardStoreNode ¶
type ShardStoreNode struct { Attributes map[string]string `json:"attributes"` EphemeralId *string `json:"ephemeral_id,omitempty"` ExternalId *string `json:"external_id,omitempty"` Name string `json:"name"` Roles []string `json:"roles"` TransportAddress string `json:"transport_address"` }
ShardStoreNode type.
func NewShardStoreNode ¶
func NewShardStoreNode() *ShardStoreNode
NewShardStoreNode returns a ShardStoreNode.
type ShardStoreWrapper ¶
type ShardStoreWrapper struct {
Stores []ShardStore `json:"stores"`
}
ShardStoreWrapper type.
func NewShardStoreWrapper ¶
func NewShardStoreWrapper() *ShardStoreWrapper
NewShardStoreWrapper returns a ShardStoreWrapper.
type ShardsRecord ¶
type ShardsRecord struct { // BulkAvgSizeInBytes avg size in bytes of shard bulk BulkAvgSizeInBytes *string `json:"bulk.avg_size_in_bytes,omitempty"` // BulkAvgTime average time spend in shard bulk BulkAvgTime *string `json:"bulk.avg_time,omitempty"` // BulkTotalOperations number of bulk shard ops BulkTotalOperations *string `json:"bulk.total_operations,omitempty"` // BulkTotalSizeInBytes total size in bytes of shard bulk BulkTotalSizeInBytes *string `json:"bulk.total_size_in_bytes,omitempty"` // BulkTotalTime time spend in shard bulk BulkTotalTime *string `json:"bulk.total_time,omitempty"` // CompletionSize size of completion CompletionSize *string `json:"completion.size,omitempty"` // Docs number of docs in shard Docs string `json:"docs,omitempty"` // FielddataEvictions fielddata evictions FielddataEvictions *string `json:"fielddata.evictions,omitempty"` // FielddataMemorySize used fielddata cache FielddataMemorySize *string `json:"fielddata.memory_size,omitempty"` // FlushTotal number of flushes FlushTotal *string `json:"flush.total,omitempty"` // FlushTotalTime time spent in flush FlushTotalTime *string `json:"flush.total_time,omitempty"` // GetCurrent number of current get ops GetCurrent *string `json:"get.current,omitempty"` // GetExistsTime time spent in successful gets GetExistsTime *string `json:"get.exists_time,omitempty"` // GetExistsTotal number of successful gets GetExistsTotal *string `json:"get.exists_total,omitempty"` // GetMissingTime time spent in failed gets GetMissingTime *string `json:"get.missing_time,omitempty"` // GetMissingTotal number of failed gets GetMissingTotal *string `json:"get.missing_total,omitempty"` // GetTime time spent in get GetTime *string `json:"get.time,omitempty"` // GetTotal number of get ops GetTotal *string `json:"get.total,omitempty"` // Id unique id of node where it lives Id *string `json:"id,omitempty"` // Index index name Index *string `json:"index,omitempty"` // IndexingDeleteCurrent number of current deletions IndexingDeleteCurrent *string `json:"indexing.delete_current,omitempty"` // IndexingDeleteTime time spent in deletions IndexingDeleteTime *string `json:"indexing.delete_time,omitempty"` // IndexingDeleteTotal number of delete ops IndexingDeleteTotal *string `json:"indexing.delete_total,omitempty"` // IndexingIndexCurrent number of current indexing ops IndexingIndexCurrent *string `json:"indexing.index_current,omitempty"` // IndexingIndexFailed number of failed indexing ops IndexingIndexFailed *string `json:"indexing.index_failed,omitempty"` // IndexingIndexTime time spent in indexing IndexingIndexTime *string `json:"indexing.index_time,omitempty"` // IndexingIndexTotal number of indexing ops IndexingIndexTotal *string `json:"indexing.index_total,omitempty"` // Ip ip of node where it lives Ip string `json:"ip,omitempty"` // MergesCurrent number of current merges MergesCurrent *string `json:"merges.current,omitempty"` // MergesCurrentDocs number of current merging docs MergesCurrentDocs *string `json:"merges.current_docs,omitempty"` // MergesCurrentSize size of current merges MergesCurrentSize *string `json:"merges.current_size,omitempty"` // MergesTotal number of completed merge ops MergesTotal *string `json:"merges.total,omitempty"` // MergesTotalDocs docs merged MergesTotalDocs *string `json:"merges.total_docs,omitempty"` // MergesTotalSize size merged MergesTotalSize *string `json:"merges.total_size,omitempty"` // MergesTotalTime time spent in merges MergesTotalTime *string `json:"merges.total_time,omitempty"` // Node name of node where it lives Node string `json:"node,omitempty"` // PathData shard data path PathData *string `json:"path.data,omitempty"` // PathState shard state path PathState *string `json:"path.state,omitempty"` // Prirep primary or replica Prirep *string `json:"prirep,omitempty"` // QueryCacheEvictions query cache evictions QueryCacheEvictions *string `json:"query_cache.evictions,omitempty"` // QueryCacheMemorySize used query cache QueryCacheMemorySize *string `json:"query_cache.memory_size,omitempty"` // RecoverysourceType recovery source type RecoverysourceType *string `json:"recoverysource.type,omitempty"` // RefreshExternalTime time spent in external refreshes RefreshExternalTime *string `json:"refresh.external_time,omitempty"` // RefreshExternalTotal total external refreshes RefreshExternalTotal *string `json:"refresh.external_total,omitempty"` // RefreshListeners number of pending refresh listeners RefreshListeners *string `json:"refresh.listeners,omitempty"` // RefreshTime time spent in refreshes RefreshTime *string `json:"refresh.time,omitempty"` // RefreshTotal total refreshes RefreshTotal *string `json:"refresh.total,omitempty"` // SearchFetchCurrent current fetch phase ops SearchFetchCurrent *string `json:"search.fetch_current,omitempty"` // SearchFetchTime time spent in fetch phase SearchFetchTime *string `json:"search.fetch_time,omitempty"` // SearchFetchTotal total fetch ops SearchFetchTotal *string `json:"search.fetch_total,omitempty"` // SearchOpenContexts open search contexts SearchOpenContexts *string `json:"search.open_contexts,omitempty"` // SearchQueryCurrent current query phase ops SearchQueryCurrent *string `json:"search.query_current,omitempty"` // SearchQueryTime time spent in query phase SearchQueryTime *string `json:"search.query_time,omitempty"` // SearchQueryTotal total query phase ops SearchQueryTotal *string `json:"search.query_total,omitempty"` // SearchScrollCurrent open scroll contexts SearchScrollCurrent *string `json:"search.scroll_current,omitempty"` // SearchScrollTime time scroll contexts held open SearchScrollTime *string `json:"search.scroll_time,omitempty"` // SearchScrollTotal completed scroll contexts SearchScrollTotal *string `json:"search.scroll_total,omitempty"` // SegmentsCount number of segments SegmentsCount *string `json:"segments.count,omitempty"` // SegmentsFixedBitsetMemory memory used by fixed bit sets for nested object field types and export type // filters for types referred in _parent fields SegmentsFixedBitsetMemory *string `json:"segments.fixed_bitset_memory,omitempty"` // SegmentsIndexWriterMemory memory used by index writer SegmentsIndexWriterMemory *string `json:"segments.index_writer_memory,omitempty"` // SegmentsMemory memory used by segments SegmentsMemory *string `json:"segments.memory,omitempty"` // SegmentsVersionMapMemory memory used by version map SegmentsVersionMapMemory *string `json:"segments.version_map_memory,omitempty"` // SeqNoGlobalCheckpoint global checkpoint SeqNoGlobalCheckpoint *string `json:"seq_no.global_checkpoint,omitempty"` // SeqNoLocalCheckpoint local checkpoint SeqNoLocalCheckpoint *string `json:"seq_no.local_checkpoint,omitempty"` // SeqNoMax max sequence number SeqNoMax *string `json:"seq_no.max,omitempty"` // Shard shard name Shard *string `json:"shard,omitempty"` // State shard state State *string `json:"state,omitempty"` // Store store size of shard (how much disk it uses) Store string `json:"store,omitempty"` // SyncId sync id SyncId *string `json:"sync_id,omitempty"` // UnassignedAt time shard became unassigned (UTC) UnassignedAt *string `json:"unassigned.at,omitempty"` // UnassignedDetails additional details as to why the shard became unassigned UnassignedDetails *string `json:"unassigned.details,omitempty"` // UnassignedFor time has been unassigned UnassignedFor *string `json:"unassigned.for,omitempty"` // UnassignedReason reason shard is unassigned UnassignedReason *string `json:"unassigned.reason,omitempty"` // WarmerCurrent current warmer ops WarmerCurrent *string `json:"warmer.current,omitempty"` // WarmerTotal total warmer ops WarmerTotal *string `json:"warmer.total,omitempty"` // WarmerTotalTime time spent in warmers WarmerTotalTime *string `json:"warmer.total_time,omitempty"` }
ShardsRecord type.
func NewShardsRecord ¶
func NewShardsRecord() *ShardsRecord
NewShardsRecord returns a ShardsRecord.
type ShardsSegment ¶
type ShardsSegment struct { NumCommittedSegments int `json:"num_committed_segments"` NumSearchSegments int `json:"num_search_segments"` Routing ShardSegmentRouting `json:"routing"` Segments map[string]Segment `json:"segments"` }
ShardsSegment type.
func NewShardsSegment ¶
func NewShardsSegment() *ShardsSegment
NewShardsSegment returns a ShardsSegment.
type ShardsStatsSummary ¶
type ShardsStatsSummary struct { Incremental ShardsStatsSummaryItem `json:"incremental"` StartTimeInMillis int64 `json:"start_time_in_millis"` Time Duration `json:"time,omitempty"` TimeInMillis int64 `json:"time_in_millis"` Total ShardsStatsSummaryItem `json:"total"` }
ShardsStatsSummary type.
func NewShardsStatsSummary ¶
func NewShardsStatsSummary() *ShardsStatsSummary
NewShardsStatsSummary returns a ShardsStatsSummary.
type ShardsStatsSummaryItem ¶
type ShardsStatsSummaryItem struct { FileCount int64 `json:"file_count"` SizeInBytes int64 `json:"size_in_bytes"` }
ShardsStatsSummaryItem type.
func NewShardsStatsSummaryItem ¶
func NewShardsStatsSummaryItem() *ShardsStatsSummaryItem
NewShardsStatsSummaryItem returns a ShardsStatsSummaryItem.
type ShardsTotalStats ¶
type ShardsTotalStats struct {
TotalCount int64 `json:"total_count"`
}
ShardsTotalStats type.
func NewShardsTotalStats ¶
func NewShardsTotalStats() *ShardsTotalStats
NewShardsTotalStats returns a ShardsTotalStats.
type ShingleTokenFilter ¶
type ShingleTokenFilter struct { FillerToken *string `json:"filler_token,omitempty"` MaxShingleSize string `json:"max_shingle_size,omitempty"` MinShingleSize string `json:"min_shingle_size,omitempty"` OutputUnigrams *bool `json:"output_unigrams,omitempty"` OutputUnigramsIfNoShingles *bool `json:"output_unigrams_if_no_shingles,omitempty"` TokenSeparator *string `json:"token_separator,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
ShingleTokenFilter type.
func NewShingleTokenFilter ¶
func NewShingleTokenFilter() *ShingleTokenFilter
NewShingleTokenFilter returns a ShingleTokenFilter.
type ShortNumberProperty ¶
type ShortNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue *int `json:"null_value,omitempty"` OnScriptError *onscripterror.OnScriptError `json:"on_script_error,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Script Script `json:"script,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` // TimeSeriesDimension For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesDimension *bool `json:"time_series_dimension,omitempty"` // TimeSeriesMetric For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesMetric *timeseriesmetrictype.TimeSeriesMetricType `json:"time_series_metric,omitempty"` Type string `json:"type,omitempty"` }
ShortNumberProperty type.
func NewShortNumberProperty ¶
func NewShortNumberProperty() *ShortNumberProperty
NewShortNumberProperty returns a ShortNumberProperty.
func (*ShortNumberProperty) UnmarshalJSON ¶
func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error
type ShrinkConfiguration ¶
type ShrinkConfiguration struct {
NumberOfShards int `json:"number_of_shards"`
}
ShrinkConfiguration type.
func NewShrinkConfiguration ¶
func NewShrinkConfiguration() *ShrinkConfiguration
NewShrinkConfiguration returns a ShrinkConfiguration.
type SignificantLongTermsAggregate ¶
type SignificantLongTermsAggregate struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantLongTermsBucket `json:"buckets"` DocCount *int64 `json:"doc_count,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
SignificantLongTermsAggregate type.
func NewSignificantLongTermsAggregate ¶
func NewSignificantLongTermsAggregate() *SignificantLongTermsAggregate
NewSignificantLongTermsAggregate returns a SignificantLongTermsAggregate.
func (*SignificantLongTermsAggregate) UnmarshalJSON ¶
func (s *SignificantLongTermsAggregate) UnmarshalJSON(data []byte) error
type SignificantLongTermsBucket ¶
type SignificantLongTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` BgCount int64 `json:"bg_count"` DocCount int64 `json:"doc_count"` Key int64 `json:"key"` KeyAsString *string `json:"key_as_string,omitempty"` Score Float64 `json:"score"` }
SignificantLongTermsBucket type.
func NewSignificantLongTermsBucket ¶
func NewSignificantLongTermsBucket() *SignificantLongTermsBucket
NewSignificantLongTermsBucket returns a SignificantLongTermsBucket.
func (SignificantLongTermsBucket) MarshalJSON ¶
func (s SignificantLongTermsBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*SignificantLongTermsBucket) UnmarshalJSON ¶
func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error
type SignificantStringTermsAggregate ¶
type SignificantStringTermsAggregate struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantStringTermsBucket `json:"buckets"` DocCount *int64 `json:"doc_count,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
SignificantStringTermsAggregate type.
func NewSignificantStringTermsAggregate ¶
func NewSignificantStringTermsAggregate() *SignificantStringTermsAggregate
NewSignificantStringTermsAggregate returns a SignificantStringTermsAggregate.
func (*SignificantStringTermsAggregate) UnmarshalJSON ¶
func (s *SignificantStringTermsAggregate) UnmarshalJSON(data []byte) error
type SignificantStringTermsBucket ¶
type SignificantStringTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` BgCount int64 `json:"bg_count"` DocCount int64 `json:"doc_count"` Key string `json:"key"` Score Float64 `json:"score"` }
SignificantStringTermsBucket type.
func NewSignificantStringTermsBucket ¶
func NewSignificantStringTermsBucket() *SignificantStringTermsBucket
NewSignificantStringTermsBucket returns a SignificantStringTermsBucket.
func (SignificantStringTermsBucket) MarshalJSON ¶
func (s SignificantStringTermsBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*SignificantStringTermsBucket) UnmarshalJSON ¶
func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error
type SignificantTermsAggregateBaseSignificantLongTermsBucket ¶
type SignificantTermsAggregateBaseSignificantLongTermsBucket struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantLongTermsBucket `json:"buckets"` DocCount *int64 `json:"doc_count,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
SignificantTermsAggregateBaseSignificantLongTermsBucket type.
func NewSignificantTermsAggregateBaseSignificantLongTermsBucket ¶
func NewSignificantTermsAggregateBaseSignificantLongTermsBucket() *SignificantTermsAggregateBaseSignificantLongTermsBucket
NewSignificantTermsAggregateBaseSignificantLongTermsBucket returns a SignificantTermsAggregateBaseSignificantLongTermsBucket.
func (*SignificantTermsAggregateBaseSignificantLongTermsBucket) UnmarshalJSON ¶
func (s *SignificantTermsAggregateBaseSignificantLongTermsBucket) UnmarshalJSON(data []byte) error
type SignificantTermsAggregateBaseSignificantStringTermsBucket ¶
type SignificantTermsAggregateBaseSignificantStringTermsBucket struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantStringTermsBucket `json:"buckets"` DocCount *int64 `json:"doc_count,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
SignificantTermsAggregateBaseSignificantStringTermsBucket type.
func NewSignificantTermsAggregateBaseSignificantStringTermsBucket ¶
func NewSignificantTermsAggregateBaseSignificantStringTermsBucket() *SignificantTermsAggregateBaseSignificantStringTermsBucket
NewSignificantTermsAggregateBaseSignificantStringTermsBucket returns a SignificantTermsAggregateBaseSignificantStringTermsBucket.
func (*SignificantTermsAggregateBaseSignificantStringTermsBucket) UnmarshalJSON ¶
func (s *SignificantTermsAggregateBaseSignificantStringTermsBucket) UnmarshalJSON(data []byte) error
type SignificantTermsAggregateBaseVoid ¶
type SignificantTermsAggregateBaseVoid struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsVoid `json:"buckets"` DocCount *int64 `json:"doc_count,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
SignificantTermsAggregateBaseVoid type.
func NewSignificantTermsAggregateBaseVoid ¶
func NewSignificantTermsAggregateBaseVoid() *SignificantTermsAggregateBaseVoid
NewSignificantTermsAggregateBaseVoid returns a SignificantTermsAggregateBaseVoid.
func (*SignificantTermsAggregateBaseVoid) UnmarshalJSON ¶
func (s *SignificantTermsAggregateBaseVoid) UnmarshalJSON(data []byte) error
type SignificantTermsAggregation ¶
type SignificantTermsAggregation struct { BackgroundFilter *Query `json:"background_filter,omitempty"` ChiSquare *ChiSquareHeuristic `json:"chi_square,omitempty"` Exclude []string `json:"exclude,omitempty"` ExecutionHint *termsaggregationexecutionhint.TermsAggregationExecutionHint `json:"execution_hint,omitempty"` Field *string `json:"field,omitempty"` Gnd *GoogleNormalizedDistanceHeuristic `json:"gnd,omitempty"` Include TermsInclude `json:"include,omitempty"` Jlh *EmptyObject `json:"jlh,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` MinDocCount *int64 `json:"min_doc_count,omitempty"` MutualInformation *MutualInformationHeuristic `json:"mutual_information,omitempty"` Name *string `json:"name,omitempty"` Percentage *PercentageScoreHeuristic `json:"percentage,omitempty"` ScriptHeuristic *ScriptedHeuristic `json:"script_heuristic,omitempty"` ShardMinDocCount *int64 `json:"shard_min_doc_count,omitempty"` ShardSize *int `json:"shard_size,omitempty"` Size *int `json:"size,omitempty"` }
SignificantTermsAggregation type.
func NewSignificantTermsAggregation ¶
func NewSignificantTermsAggregation() *SignificantTermsAggregation
NewSignificantTermsAggregation returns a SignificantTermsAggregation.
type SignificantTextAggregation ¶
type SignificantTextAggregation struct { BackgroundFilter *Query `json:"background_filter,omitempty"` ChiSquare *ChiSquareHeuristic `json:"chi_square,omitempty"` Exclude []string `json:"exclude,omitempty"` ExecutionHint *termsaggregationexecutionhint.TermsAggregationExecutionHint `json:"execution_hint,omitempty"` Field *string `json:"field,omitempty"` FilterDuplicateText *bool `json:"filter_duplicate_text,omitempty"` Gnd *GoogleNormalizedDistanceHeuristic `json:"gnd,omitempty"` Include []string `json:"include,omitempty"` Jlh *EmptyObject `json:"jlh,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` MinDocCount *int64 `json:"min_doc_count,omitempty"` MutualInformation *MutualInformationHeuristic `json:"mutual_information,omitempty"` Name *string `json:"name,omitempty"` Percentage *PercentageScoreHeuristic `json:"percentage,omitempty"` ScriptHeuristic *ScriptedHeuristic `json:"script_heuristic,omitempty"` ShardMinDocCount *int64 `json:"shard_min_doc_count,omitempty"` ShardSize *int `json:"shard_size,omitempty"` Size *int `json:"size,omitempty"` SourceFields []string `json:"source_fields,omitempty"` }
SignificantTextAggregation type.
func NewSignificantTextAggregation ¶
func NewSignificantTextAggregation() *SignificantTextAggregation
NewSignificantTextAggregation returns a SignificantTextAggregation.
type SimpleAnalyzer ¶
type SimpleAnalyzer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
SimpleAnalyzer type.
func NewSimpleAnalyzer ¶
func NewSimpleAnalyzer() *SimpleAnalyzer
NewSimpleAnalyzer returns a SimpleAnalyzer.
type SimpleMovingAverageAggregation ¶
type SimpleMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Minimize *bool `json:"minimize,omitempty"` Model string `json:"model,omitempty"` Name *string `json:"name,omitempty"` Predict *int `json:"predict,omitempty"` Settings EmptyObject `json:"settings"` Window *int `json:"window,omitempty"` }
SimpleMovingAverageAggregation type.
func NewSimpleMovingAverageAggregation ¶
func NewSimpleMovingAverageAggregation() *SimpleMovingAverageAggregation
NewSimpleMovingAverageAggregation returns a SimpleMovingAverageAggregation.
func (*SimpleMovingAverageAggregation) UnmarshalJSON ¶
func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error
type SimpleQueryStringFlags ¶
type SimpleQueryStringFlags interface{}
SimpleQueryStringFlags holds the union for the following types:
simplequerystringflag.SimpleQueryStringFlag string
type SimpleQueryStringQuery ¶
type SimpleQueryStringQuery struct { AnalyzeWildcard *bool `json:"analyze_wildcard,omitempty"` Analyzer *string `json:"analyzer,omitempty"` AutoGenerateSynonymsPhraseQuery *bool `json:"auto_generate_synonyms_phrase_query,omitempty"` Boost *float32 `json:"boost,omitempty"` DefaultOperator *operator.Operator `json:"default_operator,omitempty"` Fields []string `json:"fields,omitempty"` Flags SimpleQueryStringFlags `json:"flags,omitempty"` FuzzyMaxExpansions *int `json:"fuzzy_max_expansions,omitempty"` FuzzyPrefixLength *int `json:"fuzzy_prefix_length,omitempty"` FuzzyTranspositions *bool `json:"fuzzy_transpositions,omitempty"` Lenient *bool `json:"lenient,omitempty"` MinimumShouldMatch MinimumShouldMatch `json:"minimum_should_match,omitempty"` Query string `json:"query"` QueryName_ *string `json:"_name,omitempty"` QuoteFieldSuffix *string `json:"quote_field_suffix,omitempty"` }
SimpleQueryStringQuery type.
func NewSimpleQueryStringQuery ¶
func NewSimpleQueryStringQuery() *SimpleQueryStringQuery
NewSimpleQueryStringQuery returns a SimpleQueryStringQuery.
func (*SimpleQueryStringQuery) UnmarshalJSON ¶
func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error
type SimpleValueAggregate ¶
type SimpleValueAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. Value Float64 `json:"value,omitempty"` ValueAsString *string `json:"value_as_string,omitempty"` }
SimpleValueAggregate type.
func NewSimpleValueAggregate ¶
func NewSimpleValueAggregate() *SimpleValueAggregate
NewSimpleValueAggregate returns a SimpleValueAggregate.
type SimulateIngest ¶
type SimulateIngest struct { Pipeline *string `json:"pipeline,omitempty"` Timestamp DateTime `json:"timestamp"` }
SimulateIngest type.
func NewSimulateIngest ¶
func NewSimulateIngest() *SimulateIngest
NewSimulateIngest returns a SimulateIngest.
type SimulatedActions ¶
type SimulatedActions struct { Actions []string `json:"actions"` All *SimulatedActions `json:"all,omitempty"` UseAll bool `json:"use_all"` }
SimulatedActions type.
func NewSimulatedActions ¶
func NewSimulatedActions() *SimulatedActions
NewSimulatedActions returns a SimulatedActions.
type SlackAction ¶
type SlackAction struct { Account *string `json:"account,omitempty"` Message SlackMessage `json:"message"` }
SlackAction type.
type SlackAttachment ¶
type SlackAttachment struct { AuthorIcon *string `json:"author_icon,omitempty"` AuthorLink *string `json:"author_link,omitempty"` AuthorName string `json:"author_name"` Color *string `json:"color,omitempty"` Fallback *string `json:"fallback,omitempty"` Fields []SlackAttachmentField `json:"fields,omitempty"` ImageUrl *string `json:"image_url,omitempty"` Pretext *string `json:"pretext,omitempty"` Text *string `json:"text,omitempty"` ThumbUrl *string `json:"thumb_url,omitempty"` Title string `json:"title"` TitleLink *string `json:"title_link,omitempty"` Ts *int64 `json:"ts,omitempty"` }
SlackAttachment type.
func NewSlackAttachment ¶
func NewSlackAttachment() *SlackAttachment
NewSlackAttachment returns a SlackAttachment.
type SlackAttachmentField ¶
type SlackAttachmentField struct { Int bool `json:"short"` Title string `json:"title"` Value string `json:"value"` }
SlackAttachmentField type.
func NewSlackAttachmentField ¶
func NewSlackAttachmentField() *SlackAttachmentField
NewSlackAttachmentField returns a SlackAttachmentField.
type SlackDynamicAttachment ¶
type SlackDynamicAttachment struct { AttachmentTemplate SlackAttachment `json:"attachment_template"` ListPath string `json:"list_path"` }
SlackDynamicAttachment type.
func NewSlackDynamicAttachment ¶
func NewSlackDynamicAttachment() *SlackDynamicAttachment
NewSlackDynamicAttachment returns a SlackDynamicAttachment.
type SlackMessage ¶
type SlackMessage struct { Attachments []SlackAttachment `json:"attachments"` DynamicAttachments *SlackDynamicAttachment `json:"dynamic_attachments,omitempty"` From string `json:"from"` Icon *string `json:"icon,omitempty"` Text string `json:"text"` To []string `json:"to"` }
SlackMessage type.
func NewSlackMessage ¶
func NewSlackMessage() *SlackMessage
NewSlackMessage returns a SlackMessage.
type SlackResult ¶
type SlackResult struct { Account *string `json:"account,omitempty"` Message SlackMessage `json:"message"` }
SlackResult type.
type SlicedScroll ¶
type SlicedScroll struct { Field *string `json:"field,omitempty"` Id string `json:"id"` Max int `json:"max"` }
SlicedScroll type.
func NewSlicedScroll ¶
func NewSlicedScroll() *SlicedScroll
NewSlicedScroll returns a SlicedScroll.
type Slices ¶
type Slices interface{}
Slices holds the union for the following types:
int slicescalculation.SlicesCalculation
type Slm ¶
type Slm struct { Available bool `json:"available"` Enabled bool `json:"enabled"` PolicyCount *int `json:"policy_count,omitempty"` PolicyStats *Statistics `json:"policy_stats,omitempty"` }
Slm type.
type SlowlogSettings ¶
type SlowlogSettings struct { Level *string `json:"level,omitempty"` Reformat *bool `json:"reformat,omitempty"` Source *int `json:"source,omitempty"` Threshold *SlowlogTresholds `json:"threshold,omitempty"` }
SlowlogSettings type.
func NewSlowlogSettings ¶
func NewSlowlogSettings() *SlowlogSettings
NewSlowlogSettings returns a SlowlogSettings.
type SlowlogTresholdLevels ¶
type SlowlogTresholdLevels struct { Debug Duration `json:"debug,omitempty"` Info Duration `json:"info,omitempty"` Trace Duration `json:"trace,omitempty"` Warn Duration `json:"warn,omitempty"` }
SlowlogTresholdLevels type.
func NewSlowlogTresholdLevels ¶
func NewSlowlogTresholdLevels() *SlowlogTresholdLevels
NewSlowlogTresholdLevels returns a SlowlogTresholdLevels.
type SlowlogTresholds ¶
type SlowlogTresholds struct { Fetch *SlowlogTresholdLevels `json:"fetch,omitempty"` // Index The indexing slow log, similar in functionality to the search slow log. The // log file name ends with `_index_indexing_slowlog.json`. // Log and the thresholds are configured in the same way as the search slowlog. Index *SlowlogTresholdLevels `json:"index,omitempty"` Query *SlowlogTresholdLevels `json:"query,omitempty"` }
SlowlogTresholds type.
func NewSlowlogTresholds ¶
func NewSlowlogTresholds() *SlowlogTresholds
NewSlowlogTresholds returns a SlowlogTresholds.
type SmoothingModelContainer ¶
type SmoothingModelContainer struct { Laplace *LaplaceSmoothingModel `json:"laplace,omitempty"` LinearInterpolation *LinearInterpolationSmoothingModel `json:"linear_interpolation,omitempty"` StupidBackoff *StupidBackoffSmoothingModel `json:"stupid_backoff,omitempty"` }
SmoothingModelContainer type.
func NewSmoothingModelContainer ¶
func NewSmoothingModelContainer() *SmoothingModelContainer
NewSmoothingModelContainer returns a SmoothingModelContainer.
type SnapshotIndexStats ¶
type SnapshotIndexStats struct { Shards map[string]SnapshotShardsStatus `json:"shards"` ShardsStats SnapshotShardsStats `json:"shards_stats"` Stats SnapshotStats `json:"stats"` }
SnapshotIndexStats type.
func NewSnapshotIndexStats ¶
func NewSnapshotIndexStats() *SnapshotIndexStats
NewSnapshotIndexStats returns a SnapshotIndexStats.
type SnapshotInfo ¶
type SnapshotInfo struct { DataStreams []string `json:"data_streams"` Duration Duration `json:"duration,omitempty"` DurationInMillis *int64 `json:"duration_in_millis,omitempty"` EndTime DateTime `json:"end_time,omitempty"` EndTimeInMillis *int64 `json:"end_time_in_millis,omitempty"` Failures []SnapshotShardFailure `json:"failures,omitempty"` FeatureStates []InfoFeatureState `json:"feature_states,omitempty"` IncludeGlobalState *bool `json:"include_global_state,omitempty"` IndexDetails map[string]IndexDetails `json:"index_details,omitempty"` Indices []string `json:"indices,omitempty"` Metadata map[string]json.RawMessage `json:"metadata,omitempty"` Reason *string `json:"reason,omitempty"` Repository *string `json:"repository,omitempty"` Shards *ShardStatistics `json:"shards,omitempty"` Snapshot string `json:"snapshot"` StartTime DateTime `json:"start_time,omitempty"` StartTimeInMillis *int64 `json:"start_time_in_millis,omitempty"` State *string `json:"state,omitempty"` Uuid string `json:"uuid"` Version *string `json:"version,omitempty"` VersionId *int64 `json:"version_id,omitempty"` }
SnapshotInfo type.
func NewSnapshotInfo ¶
func NewSnapshotInfo() *SnapshotInfo
NewSnapshotInfo returns a SnapshotInfo.
type SnapshotLifecycle ¶
type SnapshotLifecycle struct { InProgress *InProgress `json:"in_progress,omitempty"` LastFailure *Invocation `json:"last_failure,omitempty"` LastSuccess *Invocation `json:"last_success,omitempty"` ModifiedDate DateTime `json:"modified_date,omitempty"` ModifiedDateMillis int64 `json:"modified_date_millis"` NextExecution DateTime `json:"next_execution,omitempty"` NextExecutionMillis int64 `json:"next_execution_millis"` Policy SLMPolicy `json:"policy"` Stats Statistics `json:"stats"` Version int64 `json:"version"` }
SnapshotLifecycle type.
func NewSnapshotLifecycle ¶
func NewSnapshotLifecycle() *SnapshotLifecycle
NewSnapshotLifecycle returns a SnapshotLifecycle.
type SnapshotResponseItem ¶
type SnapshotResponseItem struct { Error *ErrorCause `json:"error,omitempty"` Repository string `json:"repository"` Snapshots []SnapshotInfo `json:"snapshots,omitempty"` }
SnapshotResponseItem type.
func NewSnapshotResponseItem ¶
func NewSnapshotResponseItem() *SnapshotResponseItem
NewSnapshotResponseItem returns a SnapshotResponseItem.
type SnapshotRestore ¶
type SnapshotRestore struct { Indices []string `json:"indices"` Shards ShardStatistics `json:"shards"` Snapshot string `json:"snapshot"` }
SnapshotRestore type.
func NewSnapshotRestore ¶
func NewSnapshotRestore() *SnapshotRestore
NewSnapshotRestore returns a SnapshotRestore.
type SnapshotShardFailure ¶
type SnapshotShardFailure struct { Index string `json:"index"` NodeId string `json:"node_id"` Reason string `json:"reason"` ShardId string `json:"shard_id"` Status string `json:"status"` }
SnapshotShardFailure type.
func NewSnapshotShardFailure ¶
func NewSnapshotShardFailure() *SnapshotShardFailure
NewSnapshotShardFailure returns a SnapshotShardFailure.
type SnapshotShardsStats ¶
type SnapshotShardsStats struct { Done int64 `json:"done"` Failed int64 `json:"failed"` Finalizing int64 `json:"finalizing"` Initializing int64 `json:"initializing"` Started int64 `json:"started"` Total int64 `json:"total"` }
SnapshotShardsStats type.
func NewSnapshotShardsStats ¶
func NewSnapshotShardsStats() *SnapshotShardsStats
NewSnapshotShardsStats returns a SnapshotShardsStats.
type SnapshotShardsStatus ¶
type SnapshotShardsStatus struct { Stage shardsstatsstage.ShardsStatsStage `json:"stage"` Stats ShardsStatsSummary `json:"stats"` }
SnapshotShardsStatus type.
func NewSnapshotShardsStatus ¶
func NewSnapshotShardsStatus() *SnapshotShardsStatus
NewSnapshotShardsStatus returns a SnapshotShardsStatus.
type SnapshotStats ¶
type SnapshotStats struct { Incremental FileCountSnapshotStats `json:"incremental"` StartTimeInMillis int64 `json:"start_time_in_millis"` Time Duration `json:"time,omitempty"` TimeInMillis int64 `json:"time_in_millis"` Total FileCountSnapshotStats `json:"total"` }
SnapshotStats type.
func NewSnapshotStats ¶
func NewSnapshotStats() *SnapshotStats
NewSnapshotStats returns a SnapshotStats.
type SnapshotsRecord ¶
type SnapshotsRecord struct { // Duration duration Duration Duration `json:"duration,omitempty"` // EndEpoch end time in seconds since 1970-01-01 00:00:00 EndEpoch StringifiedEpochTimeUnitSeconds `json:"end_epoch,omitempty"` // EndTime end time in HH:MM:SS EndTime *string `json:"end_time,omitempty"` // FailedShards number of failed shards FailedShards *string `json:"failed_shards,omitempty"` // Id unique snapshot Id *string `json:"id,omitempty"` // Indices number of indices Indices *string `json:"indices,omitempty"` // Reason reason for failures Reason *string `json:"reason,omitempty"` // Repository repository name Repository *string `json:"repository,omitempty"` // StartEpoch start time in seconds since 1970-01-01 00:00:00 StartEpoch StringifiedEpochTimeUnitSeconds `json:"start_epoch,omitempty"` // StartTime start time in HH:MM:SS StartTime ScheduleTimeOfDay `json:"start_time,omitempty"` // Status snapshot name Status *string `json:"status,omitempty"` // SuccessfulShards number of successful shards SuccessfulShards *string `json:"successful_shards,omitempty"` // TotalShards number of total shards TotalShards *string `json:"total_shards,omitempty"` }
SnapshotsRecord type.
func NewSnapshotsRecord ¶
func NewSnapshotsRecord() *SnapshotsRecord
NewSnapshotsRecord returns a SnapshotsRecord.
func (*SnapshotsRecord) UnmarshalJSON ¶
func (s *SnapshotsRecord) UnmarshalJSON(data []byte) error
type SnowballAnalyzer ¶
type SnowballAnalyzer struct { Language snowballlanguage.SnowballLanguage `json:"language"` Stopwords []string `json:"stopwords,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
SnowballAnalyzer type.
func NewSnowballAnalyzer ¶
func NewSnowballAnalyzer() *SnowballAnalyzer
NewSnowballAnalyzer returns a SnowballAnalyzer.
type SnowballTokenFilter ¶
type SnowballTokenFilter struct { Language snowballlanguage.SnowballLanguage `json:"language"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
SnowballTokenFilter type.
func NewSnowballTokenFilter ¶
func NewSnowballTokenFilter() *SnowballTokenFilter
NewSnowballTokenFilter returns a SnowballTokenFilter.
type SoftDeletes ¶
type SoftDeletes struct { // Enabled Indicates whether soft deletes are enabled on the index. Enabled *bool `json:"enabled,omitempty"` // RetentionLease The maximum period to retain a shard history retention lease before it is // considered expired. // Shard history retention leases ensure that soft deletes are retained during // merges on the Lucene // index. If a soft delete is merged away before it can be replicated to a // follower the following // process will fail due to incomplete history on the leader. RetentionLease *RetentionLease `json:"retention_lease,omitempty"` }
SoftDeletes type.
type SortCombinations ¶
type SortCombinations interface{}
SortCombinations holds the union for the following types:
string SortOptions
type SortOptions ¶
type SortOptions struct { Doc_ *ScoreSort `json:"_doc,omitempty"` GeoDistance_ *GeoDistanceSort `json:"_geo_distance,omitempty"` Score_ *ScoreSort `json:"_score,omitempty"` Script_ *ScriptSort `json:"_script,omitempty"` SortOptions map[string]FieldSort `json:"-"` }
SortOptions type.
func (SortOptions) MarshalJSON ¶
func (s SortOptions) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type SortProcessor ¶
type SortProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Order *sortorder.SortOrder `json:"order,omitempty"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
SortProcessor type.
func NewSortProcessor ¶
func NewSortProcessor() *SortProcessor
NewSortProcessor returns a SortProcessor.
type SourceConfig ¶
type SourceConfig interface{}
SourceConfig holds the union for the following types:
bool SourceFilter
type SourceConfigParam ¶
type SourceConfigParam interface{}
SourceConfigParam holds the union for the following types:
bool []string
type SourceField ¶
type SourceField struct { Compress *bool `json:"compress,omitempty"` CompressThreshold *string `json:"compress_threshold,omitempty"` Enabled *bool `json:"enabled,omitempty"` Excludes []string `json:"excludes,omitempty"` Includes []string `json:"includes,omitempty"` Mode *sourcefieldmode.SourceFieldMode `json:"mode,omitempty"` }
SourceField type.
type SourceFilter ¶
type SourceFilter struct { Excludes []string `json:"excludes,omitempty"` Includes []string `json:"includes,omitempty"` }
SourceFilter type.
func NewSourceFilter ¶
func NewSourceFilter() *SourceFilter
NewSourceFilter returns a SourceFilter.
type SpanContainingQuery ¶
type SpanContainingQuery struct { Big *SpanQuery `json:"big,omitempty"` Boost *float32 `json:"boost,omitempty"` Little *SpanQuery `json:"little,omitempty"` QueryName_ *string `json:"_name,omitempty"` }
SpanContainingQuery type.
func NewSpanContainingQuery ¶
func NewSpanContainingQuery() *SpanContainingQuery
NewSpanContainingQuery returns a SpanContainingQuery.
type SpanFieldMaskingQuery ¶
type SpanFieldMaskingQuery struct { Boost *float32 `json:"boost,omitempty"` Field string `json:"field"` Query *SpanQuery `json:"query,omitempty"` QueryName_ *string `json:"_name,omitempty"` }
SpanFieldMaskingQuery type.
func NewSpanFieldMaskingQuery ¶
func NewSpanFieldMaskingQuery() *SpanFieldMaskingQuery
NewSpanFieldMaskingQuery returns a SpanFieldMaskingQuery.
type SpanFirstQuery ¶
type SpanFirstQuery struct { Boost *float32 `json:"boost,omitempty"` End int `json:"end"` Match *SpanQuery `json:"match,omitempty"` QueryName_ *string `json:"_name,omitempty"` }
SpanFirstQuery type.
func NewSpanFirstQuery ¶
func NewSpanFirstQuery() *SpanFirstQuery
NewSpanFirstQuery returns a SpanFirstQuery.
type SpanGapQuery ¶
SpanGapQuery type alias.
type SpanMultiTermQuery ¶
type SpanMultiTermQuery struct { Boost *float32 `json:"boost,omitempty"` // Match Should be a multi term query (one of wildcard, fuzzy, prefix, range or regexp // query) Match *Query `json:"match,omitempty"` QueryName_ *string `json:"_name,omitempty"` }
SpanMultiTermQuery type.
func NewSpanMultiTermQuery ¶
func NewSpanMultiTermQuery() *SpanMultiTermQuery
NewSpanMultiTermQuery returns a SpanMultiTermQuery.
type SpanNearQuery ¶
type SpanNearQuery struct { Boost *float32 `json:"boost,omitempty"` Clauses []SpanQuery `json:"clauses"` InOrder *bool `json:"in_order,omitempty"` QueryName_ *string `json:"_name,omitempty"` Slop *int `json:"slop,omitempty"` }
SpanNearQuery type.
func NewSpanNearQuery ¶
func NewSpanNearQuery() *SpanNearQuery
NewSpanNearQuery returns a SpanNearQuery.
type SpanNotQuery ¶
type SpanNotQuery struct { Boost *float32 `json:"boost,omitempty"` Dist *int `json:"dist,omitempty"` Exclude *SpanQuery `json:"exclude,omitempty"` Include *SpanQuery `json:"include,omitempty"` Post *int `json:"post,omitempty"` Pre *int `json:"pre,omitempty"` QueryName_ *string `json:"_name,omitempty"` }
SpanNotQuery type.
func NewSpanNotQuery ¶
func NewSpanNotQuery() *SpanNotQuery
NewSpanNotQuery returns a SpanNotQuery.
type SpanOrQuery ¶
type SpanOrQuery struct { Boost *float32 `json:"boost,omitempty"` Clauses []SpanQuery `json:"clauses"` QueryName_ *string `json:"_name,omitempty"` }
SpanOrQuery type.
type SpanQuery ¶
type SpanQuery struct { FieldMaskingSpan *SpanFieldMaskingQuery `json:"field_masking_span,omitempty"` SpanContaining *SpanContainingQuery `json:"span_containing,omitempty"` SpanFirst *SpanFirstQuery `json:"span_first,omitempty"` SpanGap map[string]int `json:"span_gap,omitempty"` SpanMulti *SpanMultiTermQuery `json:"span_multi,omitempty"` SpanNear *SpanNearQuery `json:"span_near,omitempty"` SpanNot *SpanNotQuery `json:"span_not,omitempty"` SpanOr *SpanOrQuery `json:"span_or,omitempty"` SpanTerm map[string]SpanTermQuery `json:"span_term,omitempty"` SpanWithin *SpanWithinQuery `json:"span_within,omitempty"` }
SpanQuery type.
type SpanTermQuery ¶
type SpanTermQuery struct { Boost *float32 `json:"boost,omitempty"` QueryName_ *string `json:"_name,omitempty"` Value string `json:"value"` }
SpanTermQuery type.
func NewSpanTermQuery ¶
func NewSpanTermQuery() *SpanTermQuery
NewSpanTermQuery returns a SpanTermQuery.
type SpanWithinQuery ¶
type SpanWithinQuery struct { Big *SpanQuery `json:"big,omitempty"` Boost *float32 `json:"boost,omitempty"` Little *SpanQuery `json:"little,omitempty"` QueryName_ *string `json:"_name,omitempty"` }
SpanWithinQuery type.
func NewSpanWithinQuery ¶
func NewSpanWithinQuery() *SpanWithinQuery
NewSpanWithinQuery returns a SpanWithinQuery.
type SplitProcessor ¶
type SplitProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` PreserveTrailing *bool `json:"preserve_trailing,omitempty"` Separator string `json:"separator"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
SplitProcessor type.
func NewSplitProcessor ¶
func NewSplitProcessor() *SplitProcessor
NewSplitProcessor returns a SplitProcessor.
type Sql ¶
type Sql struct { Available bool `json:"available"` Enabled bool `json:"enabled"` Features map[string]int `json:"features"` Queries map[string]XpackQuery `json:"queries"` }
Sql type.
type Ssl ¶
type Ssl struct { Http FeatureToggle `json:"http"` Transport FeatureToggle `json:"transport"` }
Ssl type.
type StandardAnalyzer ¶
type StandardAnalyzer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Stopwords []string `json:"stopwords,omitempty"` Type string `json:"type,omitempty"` }
StandardAnalyzer type.
func NewStandardAnalyzer ¶
func NewStandardAnalyzer() *StandardAnalyzer
NewStandardAnalyzer returns a StandardAnalyzer.
type StandardDeviationBounds ¶
type StandardDeviationBounds struct { Lower Float64 `json:"lower,omitempty"` LowerPopulation Float64 `json:"lower_population,omitempty"` LowerSampling Float64 `json:"lower_sampling,omitempty"` Upper Float64 `json:"upper,omitempty"` UpperPopulation Float64 `json:"upper_population,omitempty"` UpperSampling Float64 `json:"upper_sampling,omitempty"` }
StandardDeviationBounds type.
func NewStandardDeviationBounds ¶
func NewStandardDeviationBounds() *StandardDeviationBounds
NewStandardDeviationBounds returns a StandardDeviationBounds.
type StandardDeviationBoundsAsString ¶
type StandardDeviationBoundsAsString struct { Lower string `json:"lower"` LowerPopulation string `json:"lower_population"` LowerSampling string `json:"lower_sampling"` Upper string `json:"upper"` UpperPopulation string `json:"upper_population"` UpperSampling string `json:"upper_sampling"` }
StandardDeviationBoundsAsString type.
func NewStandardDeviationBoundsAsString ¶
func NewStandardDeviationBoundsAsString() *StandardDeviationBoundsAsString
NewStandardDeviationBoundsAsString returns a StandardDeviationBoundsAsString.
type StandardTokenizer ¶
type StandardTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
StandardTokenizer type.
func NewStandardTokenizer ¶
func NewStandardTokenizer() *StandardTokenizer
NewStandardTokenizer returns a StandardTokenizer.
type Statistics ¶
type Statistics struct { Policy *string `json:"policy,omitempty"` RetentionDeletionTime Duration `json:"retention_deletion_time,omitempty"` RetentionDeletionTimeMillis *int64 `json:"retention_deletion_time_millis,omitempty"` RetentionFailed *int64 `json:"retention_failed,omitempty"` RetentionRuns *int64 `json:"retention_runs,omitempty"` RetentionTimedOut *int64 `json:"retention_timed_out,omitempty"` TotalSnapshotDeletionFailures *int64 `json:"total_snapshot_deletion_failures,omitempty"` TotalSnapshotsDeleted *int64 `json:"total_snapshots_deleted,omitempty"` TotalSnapshotsFailed *int64 `json:"total_snapshots_failed,omitempty"` TotalSnapshotsTaken *int64 `json:"total_snapshots_taken,omitempty"` }
Statistics type.
type Stats ¶
type Stats struct { AdaptiveSelection map[string]AdaptiveSelection `json:"adaptive_selection,omitempty"` Attributes map[string]string `json:"attributes,omitempty"` Breakers map[string]Breaker `json:"breakers,omitempty"` Discovery *Discovery `json:"discovery,omitempty"` Fs *FileSystem `json:"fs,omitempty"` Host *string `json:"host,omitempty"` Http *Http `json:"http,omitempty"` IndexingPressure *NodesIndexingPressure `json:"indexing_pressure,omitempty"` Indices *IndicesShardStats `json:"indices,omitempty"` Ingest *NodesIngest `json:"ingest,omitempty"` Ip []string `json:"ip,omitempty"` Jvm *Jvm `json:"jvm,omitempty"` Name *string `json:"name,omitempty"` Os *OperatingSystem `json:"os,omitempty"` Process *Process `json:"process,omitempty"` Roles []noderole.NodeRole `json:"roles,omitempty"` Script *Scripting `json:"script,omitempty"` ScriptCache map[string][]ScriptCache `json:"script_cache,omitempty"` ThreadPool map[string]ThreadCount `json:"thread_pool,omitempty"` Timestamp *int64 `json:"timestamp,omitempty"` Transport *Transport `json:"transport,omitempty"` TransportAddress *string `json:"transport_address,omitempty"` }
Stats type.
type StatsAggregate ¶
type StatsAggregate struct { Avg Float64 `json:"avg,omitempty"` AvgAsString *string `json:"avg_as_string,omitempty"` Count int64 `json:"count"` Max Float64 `json:"max,omitempty"` MaxAsString *string `json:"max_as_string,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Min Float64 `json:"min,omitempty"` MinAsString *string `json:"min_as_string,omitempty"` Sum Float64 `json:"sum"` SumAsString *string `json:"sum_as_string,omitempty"` }
StatsAggregate type.
func NewStatsAggregate ¶
func NewStatsAggregate() *StatsAggregate
NewStatsAggregate returns a StatsAggregate.
type StatsAggregation ¶
type StatsAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` }
StatsAggregation type.
func NewStatsAggregation ¶
func NewStatsAggregation() *StatsAggregation
NewStatsAggregation returns a StatsAggregation.
type StatsBucketAggregate ¶
type StatsBucketAggregate struct { Avg Float64 `json:"avg,omitempty"` AvgAsString *string `json:"avg_as_string,omitempty"` Count int64 `json:"count"` Max Float64 `json:"max,omitempty"` MaxAsString *string `json:"max_as_string,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Min Float64 `json:"min,omitempty"` MinAsString *string `json:"min_as_string,omitempty"` Sum Float64 `json:"sum"` SumAsString *string `json:"sum_as_string,omitempty"` }
StatsBucketAggregate type.
func NewStatsBucketAggregate ¶
func NewStatsBucketAggregate() *StatsBucketAggregate
NewStatsBucketAggregate returns a StatsBucketAggregate.
type StatsBucketAggregation ¶
type StatsBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` }
StatsBucketAggregation type.
func NewStatsBucketAggregation ¶
func NewStatsBucketAggregation() *StatsBucketAggregation
NewStatsBucketAggregation returns a StatsBucketAggregation.
func (*StatsBucketAggregation) UnmarshalJSON ¶
func (s *StatsBucketAggregation) UnmarshalJSON(data []byte) error
type Status ¶
type Status struct { IncludeGlobalState bool `json:"include_global_state"` Indices map[string]SnapshotIndexStats `json:"indices"` Repository string `json:"repository"` ShardsStats SnapshotShardsStats `json:"shards_stats"` Snapshot string `json:"snapshot"` State string `json:"state"` Stats SnapshotStats `json:"stats"` Uuid string `json:"uuid"` }
Status type.
type StemmerOverrideTokenFilter ¶
type StemmerOverrideTokenFilter struct { Rules []string `json:"rules,omitempty"` RulesPath *string `json:"rules_path,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
StemmerOverrideTokenFilter type.
func NewStemmerOverrideTokenFilter ¶
func NewStemmerOverrideTokenFilter() *StemmerOverrideTokenFilter
NewStemmerOverrideTokenFilter returns a StemmerOverrideTokenFilter.
type StemmerTokenFilter ¶
type StemmerTokenFilter struct { Language string `json:"language"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
StemmerTokenFilter type.
func NewStemmerTokenFilter ¶
func NewStemmerTokenFilter() *StemmerTokenFilter
NewStemmerTokenFilter returns a StemmerTokenFilter.
type StepKey ¶
type StepKey struct { Action string `json:"action"` Name string `json:"name"` Phase string `json:"phase"` }
StepKey type.
type StopAnalyzer ¶
type StopAnalyzer struct { Stopwords []string `json:"stopwords,omitempty"` StopwordsPath *string `json:"stopwords_path,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
StopAnalyzer type.
func NewStopAnalyzer ¶
func NewStopAnalyzer() *StopAnalyzer
NewStopAnalyzer returns a StopAnalyzer.
type StopTokenFilter ¶
type StopTokenFilter struct { IgnoreCase *bool `json:"ignore_case,omitempty"` RemoveTrailing *bool `json:"remove_trailing,omitempty"` Stopwords []string `json:"stopwords,omitempty"` StopwordsPath *string `json:"stopwords_path,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
StopTokenFilter type.
func NewStopTokenFilter ¶
func NewStopTokenFilter() *StopTokenFilter
NewStopTokenFilter returns a StopTokenFilter.
type Storage ¶
type Storage struct { // AllowMmap You can restrict the use of the mmapfs and the related hybridfs store type // via the setting node.store.allow_mmap. // This is a boolean setting indicating whether or not memory-mapping is // allowed. The default is to allow it. This // setting is useful, for example, if you are in an environment where you can // not control the ability to create a lot // of memory maps so you need disable the ability to use memory-mapping. AllowMmap *bool `json:"allow_mmap,omitempty"` Type storagetype.StorageType `json:"type"` }
Storage type.
type StoreStats ¶
type StoreStats struct { Reserved ByteSize `json:"reserved,omitempty"` ReservedInBytes int `json:"reserved_in_bytes"` Size ByteSize `json:"size,omitempty"` SizeInBytes int `json:"size_in_bytes"` TotalDataSetSize ByteSize `json:"total_data_set_size,omitempty"` TotalDataSetSizeInBytes *int `json:"total_data_set_size_in_bytes,omitempty"` }
StoreStats type.
type StoredScript ¶
type StoredScript struct { Lang scriptlanguage.ScriptLanguage `json:"lang"` Options map[string]string `json:"options,omitempty"` Source string `json:"source"` }
StoredScript type.
func NewStoredScript ¶
func NewStoredScript() *StoredScript
NewStoredScript returns a StoredScript.
type StoredScriptId ¶
type StoredScriptId struct { Id string `json:"id"` Params map[string]json.RawMessage `json:"params,omitempty"` }
StoredScriptId type.
func NewStoredScriptId ¶
func NewStoredScriptId() *StoredScriptId
NewStoredScriptId returns a StoredScriptId.
type StringRareTermsAggregate ¶
type StringRareTermsAggregate struct { Buckets BucketsStringRareTermsBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
StringRareTermsAggregate type.
func NewStringRareTermsAggregate ¶
func NewStringRareTermsAggregate() *StringRareTermsAggregate
NewStringRareTermsAggregate returns a StringRareTermsAggregate.
func (*StringRareTermsAggregate) UnmarshalJSON ¶
func (s *StringRareTermsAggregate) UnmarshalJSON(data []byte) error
type StringRareTermsBucket ¶
type StringRareTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Key string `json:"key"` }
StringRareTermsBucket type.
func NewStringRareTermsBucket ¶
func NewStringRareTermsBucket() *StringRareTermsBucket
NewStringRareTermsBucket returns a StringRareTermsBucket.
func (StringRareTermsBucket) MarshalJSON ¶
func (s StringRareTermsBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*StringRareTermsBucket) UnmarshalJSON ¶
func (s *StringRareTermsBucket) UnmarshalJSON(data []byte) error
type StringStatsAggregate ¶
type StringStatsAggregate struct { AvgLength Float64 `json:"avg_length,omitempty"` AvgLengthAsString *string `json:"avg_length_as_string,omitempty"` Count int64 `json:"count"` Distribution map[string]Float64 `json:"distribution,omitempty"` Entropy Float64 `json:"entropy,omitempty"` MaxLength int `json:"max_length,omitempty"` MaxLengthAsString *string `json:"max_length_as_string,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` MinLength int `json:"min_length,omitempty"` MinLengthAsString *string `json:"min_length_as_string,omitempty"` }
StringStatsAggregate type.
func NewStringStatsAggregate ¶
func NewStringStatsAggregate() *StringStatsAggregate
NewStringStatsAggregate returns a StringStatsAggregate.
type StringStatsAggregation ¶
type StringStatsAggregation struct { Field *string `json:"field,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` ShowDistribution *bool `json:"show_distribution,omitempty"` }
StringStatsAggregation type.
func NewStringStatsAggregation ¶
func NewStringStatsAggregation() *StringStatsAggregation
NewStringStatsAggregation returns a StringStatsAggregation.
type StringTermsAggregate ¶
type StringTermsAggregate struct { Buckets BucketsStringTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` }
StringTermsAggregate type.
func NewStringTermsAggregate ¶
func NewStringTermsAggregate() *StringTermsAggregate
NewStringTermsAggregate returns a StringTermsAggregate.
func (*StringTermsAggregate) UnmarshalJSON ¶
func (s *StringTermsAggregate) UnmarshalJSON(data []byte) error
type StringTermsBucket ¶
type StringTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` DocCountError *int64 `json:"doc_count_error,omitempty"` Key FieldValue `json:"key"` }
StringTermsBucket type.
func NewStringTermsBucket ¶
func NewStringTermsBucket() *StringTermsBucket
NewStringTermsBucket returns a StringTermsBucket.
func (StringTermsBucket) MarshalJSON ¶
func (s StringTermsBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*StringTermsBucket) UnmarshalJSON ¶
func (s *StringTermsBucket) UnmarshalJSON(data []byte) error
type StringifiedEpochTimeUnitMillis ¶
type StringifiedEpochTimeUnitMillis interface{}
StringifiedEpochTimeUnitMillis holds the union for the following types:
int64 string
type StringifiedEpochTimeUnitSeconds ¶
type StringifiedEpochTimeUnitSeconds interface{}
StringifiedEpochTimeUnitSeconds holds the union for the following types:
int64 string
type StringifiedVersionNumber ¶
type StringifiedVersionNumber interface{}
StringifiedVersionNumber holds the union for the following types:
int64 string
type StupidBackoffSmoothingModel ¶
type StupidBackoffSmoothingModel struct {
Discount Float64 `json:"discount"`
}
StupidBackoffSmoothingModel type.
func NewStupidBackoffSmoothingModel ¶
func NewStupidBackoffSmoothingModel() *StupidBackoffSmoothingModel
NewStupidBackoffSmoothingModel returns a StupidBackoffSmoothingModel.
type Suggest ¶
type Suggest interface{}
Suggest holds the union for the following types:
CompletionSuggest PhraseSuggest TermSuggest
type SuggestContext ¶
type SuggestContext struct { Name string `json:"name"` Path *string `json:"path,omitempty"` Precision string `json:"precision,omitempty"` Type string `json:"type"` }
SuggestContext type.
func NewSuggestContext ¶
func NewSuggestContext() *SuggestContext
NewSuggestContext returns a SuggestContext.
type SuggestFuzziness ¶
type SuggestFuzziness struct { Fuzziness Fuzziness `json:"fuzziness,omitempty"` MinLength *int `json:"min_length,omitempty"` PrefixLength *int `json:"prefix_length,omitempty"` Transpositions *bool `json:"transpositions,omitempty"` UnicodeAware *bool `json:"unicode_aware,omitempty"` }
SuggestFuzziness type.
func NewSuggestFuzziness ¶
func NewSuggestFuzziness() *SuggestFuzziness
NewSuggestFuzziness returns a SuggestFuzziness.
type Suggester ¶
type Suggester struct { Suggesters map[string]FieldSuggester `json:"-"` // Text Global suggest text, to avoid repetition when the same text is used in // several suggesters Text *string `json:"text,omitempty"` }
Suggester type.
func (Suggester) MarshalJSON ¶
MarhsalJSON overrides marshalling for types with additional properties
type SumAggregate ¶
type SumAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. Value Float64 `json:"value,omitempty"` ValueAsString *string `json:"value_as_string,omitempty"` }
SumAggregate type.
func NewSumAggregate ¶
func NewSumAggregate() *SumAggregate
NewSumAggregate returns a SumAggregate.
type SumAggregation ¶
type SumAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` }
SumAggregation type.
func NewSumAggregation ¶
func NewSumAggregation() *SumAggregation
NewSumAggregation returns a SumAggregation.
type SumBucketAggregation ¶
type SumBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` Format *string `json:"format,omitempty"` GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` }
SumBucketAggregation type.
func NewSumBucketAggregation ¶
func NewSumBucketAggregation() *SumBucketAggregation
NewSumBucketAggregation returns a SumBucketAggregation.
func (*SumBucketAggregation) UnmarshalJSON ¶
func (s *SumBucketAggregation) UnmarshalJSON(data []byte) error
type Summary ¶
type Summary struct {
Config map[policytype.PolicyType]EnrichPolicy `json:"config"`
}
Summary type.
type SyncContainer ¶
type SyncContainer struct { // Time Specifies that the transform uses a time field to synchronize the source and // destination indices. Time *TimeSync `json:"time,omitempty"` }
SyncContainer type.
func NewSyncContainer ¶
func NewSyncContainer() *SyncContainer
NewSyncContainer returns a SyncContainer.
type SynonymGraphTokenFilter ¶
type SynonymGraphTokenFilter struct { Expand *bool `json:"expand,omitempty"` Format *synonymformat.SynonymFormat `json:"format,omitempty"` Lenient *bool `json:"lenient,omitempty"` Synonyms []string `json:"synonyms,omitempty"` SynonymsPath *string `json:"synonyms_path,omitempty"` Tokenizer *string `json:"tokenizer,omitempty"` Type string `json:"type,omitempty"` Updateable *bool `json:"updateable,omitempty"` Version *string `json:"version,omitempty"` }
SynonymGraphTokenFilter type.
func NewSynonymGraphTokenFilter ¶
func NewSynonymGraphTokenFilter() *SynonymGraphTokenFilter
NewSynonymGraphTokenFilter returns a SynonymGraphTokenFilter.
type SynonymTokenFilter ¶
type SynonymTokenFilter struct { Expand *bool `json:"expand,omitempty"` Format *synonymformat.SynonymFormat `json:"format,omitempty"` Lenient *bool `json:"lenient,omitempty"` Synonyms []string `json:"synonyms,omitempty"` SynonymsPath *string `json:"synonyms_path,omitempty"` Tokenizer *string `json:"tokenizer,omitempty"` Type string `json:"type,omitempty"` Updateable *bool `json:"updateable,omitempty"` Version *string `json:"version,omitempty"` }
SynonymTokenFilter type.
func NewSynonymTokenFilter ¶
func NewSynonymTokenFilter() *SynonymTokenFilter
NewSynonymTokenFilter returns a SynonymTokenFilter.
type TDigest ¶
type TDigest struct {
Compression *int `json:"compression,omitempty"`
}
TDigest type.
type TDigestPercentileRanksAggregate ¶
type TDigestPercentileRanksAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Values Percentiles `json:"values"` }
TDigestPercentileRanksAggregate type.
func NewTDigestPercentileRanksAggregate ¶
func NewTDigestPercentileRanksAggregate() *TDigestPercentileRanksAggregate
NewTDigestPercentileRanksAggregate returns a TDigestPercentileRanksAggregate.
type TDigestPercentilesAggregate ¶
type TDigestPercentilesAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Values Percentiles `json:"values"` }
TDigestPercentilesAggregate type.
func NewTDigestPercentilesAggregate ¶
func NewTDigestPercentilesAggregate() *TDigestPercentilesAggregate
NewTDigestPercentilesAggregate returns a TDigestPercentilesAggregate.
type TTestAggregate ¶
type TTestAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Value Float64 `json:"value,omitempty"` ValueAsString *string `json:"value_as_string,omitempty"` }
TTestAggregate type.
func NewTTestAggregate ¶
func NewTTestAggregate() *TTestAggregate
NewTTestAggregate returns a TTestAggregate.
type TTestAggregation ¶
type TTestAggregation struct { A *TestPopulation `json:"a,omitempty"` B *TestPopulation `json:"b,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Type *ttesttype.TTestType `json:"type,omitempty"` }
TTestAggregation type.
func NewTTestAggregation ¶
func NewTTestAggregation() *TTestAggregation
NewTTestAggregation returns a TTestAggregation.
type TargetMeanEncodingPreprocessor ¶
type TargetMeanEncodingPreprocessor struct { DefaultValue Float64 `json:"default_value"` FeatureName string `json:"feature_name"` Field string `json:"field"` TargetMap map[string]Float64 `json:"target_map"` }
TargetMeanEncodingPreprocessor type.
func NewTargetMeanEncodingPreprocessor ¶
func NewTargetMeanEncodingPreprocessor() *TargetMeanEncodingPreprocessor
NewTargetMeanEncodingPreprocessor returns a TargetMeanEncodingPreprocessor.
type TaskFailure ¶
type TaskFailure struct { NodeId string `json:"node_id"` Reason ErrorCause `json:"reason"` Status string `json:"status"` TaskId int64 `json:"task_id"` }
TaskFailure type.
type TaskInfo ¶
type TaskInfo struct { Action string `json:"action"` Cancellable bool `json:"cancellable"` Cancelled *bool `json:"cancelled,omitempty"` Description *string `json:"description,omitempty"` Headers map[string]string `json:"headers"` Id int64 `json:"id"` Node string `json:"node"` ParentTaskId TaskId `json:"parent_task_id,omitempty"` RunningTime Duration `json:"running_time,omitempty"` RunningTimeInNanos int64 `json:"running_time_in_nanos"` StartTimeInMillis int64 `json:"start_time_in_millis"` Status *TaskStatus `json:"status,omitempty"` Type string `json:"type"` }
TaskInfo type.
type TaskInfos ¶
type TaskInfos interface{}
TaskInfos holds the union for the following types:
[]TaskInfo map[string]ParentTaskInfo
type TaskStatus ¶
type TaskStatus struct { Batches int64 `json:"batches"` Canceled *string `json:"canceled,omitempty"` Created int64 `json:"created"` Deleted int64 `json:"deleted"` Failures []string `json:"failures,omitempty"` Noops int64 `json:"noops"` RequestsPerSecond float32 `json:"requests_per_second"` Retries Retries `json:"retries"` Throttled Duration `json:"throttled,omitempty"` ThrottledMillis int64 `json:"throttled_millis"` ThrottledUntil Duration `json:"throttled_until,omitempty"` ThrottledUntilMillis int64 `json:"throttled_until_millis"` TimedOut *bool `json:"timed_out,omitempty"` Took *int64 `json:"took,omitempty"` Total int64 `json:"total"` Updated int64 `json:"updated"` VersionConflicts int64 `json:"version_conflicts"` }
TaskStatus type.
type TasksRecord ¶
type TasksRecord struct { // Action task action Action *string `json:"action,omitempty"` // Description task action Description *string `json:"description,omitempty"` // Id id of the task with the node Id *string `json:"id,omitempty"` // Ip ip address Ip *string `json:"ip,omitempty"` // Node node name Node *string `json:"node,omitempty"` // NodeId unique node id NodeId *string `json:"node_id,omitempty"` // ParentTaskId parent task id ParentTaskId *string `json:"parent_task_id,omitempty"` // Port bound transport port Port *string `json:"port,omitempty"` // RunningTime running time RunningTime *string `json:"running_time,omitempty"` // RunningTimeNs running time ns RunningTimeNs *string `json:"running_time_ns,omitempty"` // StartTime start time in ms StartTime *string `json:"start_time,omitempty"` // TaskId unique task id TaskId *string `json:"task_id,omitempty"` // Timestamp start time in HH:MM:SS Timestamp *string `json:"timestamp,omitempty"` // Type task type Type *string `json:"type,omitempty"` // Version es version Version *string `json:"version,omitempty"` // XOpaqueId X-Opaque-ID header XOpaqueId *string `json:"x_opaque_id,omitempty"` }
TasksRecord type.
type Template ¶
type Template struct { Aliases map[string]Alias `json:"aliases"` Mappings TypeMapping `json:"mappings"` Settings IndexSettings `json:"settings"` }
Template type.
type TemplateMapping ¶
type TemplateMapping struct { Aliases map[string]Alias `json:"aliases"` IndexPatterns []string `json:"index_patterns"` Mappings TypeMapping `json:"mappings"` Order int `json:"order"` Settings map[string]json.RawMessage `json:"settings"` Version *int64 `json:"version,omitempty"` }
TemplateMapping type.
func NewTemplateMapping ¶
func NewTemplateMapping() *TemplateMapping
NewTemplateMapping returns a TemplateMapping.
type TemplatesRecord ¶
type TemplatesRecord struct { // ComposedOf component templates comprising index template ComposedOf *string `json:"composed_of,omitempty"` // IndexPatterns template index patterns IndexPatterns *string `json:"index_patterns,omitempty"` // Name template name Name *string `json:"name,omitempty"` // Order template application order/priority number Order *string `json:"order,omitempty"` // Version version Version string `json:"version,omitempty"` }
TemplatesRecord type.
func NewTemplatesRecord ¶
func NewTemplatesRecord() *TemplatesRecord
NewTemplatesRecord returns a TemplatesRecord.
type Term ¶
type Term struct { DocFreq *int `json:"doc_freq,omitempty"` Score *Float64 `json:"score,omitempty"` TermFreq int `json:"term_freq"` Tokens []TermVectorsToken `json:"tokens,omitempty"` Ttf *int `json:"ttf,omitempty"` }
Term type.
type TermQuery ¶
type TermQuery struct { Boost *float32 `json:"boost,omitempty"` CaseInsensitive *bool `json:"case_insensitive,omitempty"` QueryName_ *string `json:"_name,omitempty"` Value FieldValue `json:"value"` }
TermQuery type.
type TermSuggest ¶
type TermSuggest struct { Length int `json:"length"` Offset int `json:"offset"` Options []TermSuggestOption `json:"options"` Text string `json:"text"` }
TermSuggest type.
type TermSuggestOption ¶
type TermSuggestOption struct { CollateMatch *bool `json:"collate_match,omitempty"` Freq int64 `json:"freq"` Highlighted *string `json:"highlighted,omitempty"` Score Float64 `json:"score"` Text string `json:"text"` }
TermSuggestOption type.
func NewTermSuggestOption ¶
func NewTermSuggestOption() *TermSuggestOption
NewTermSuggestOption returns a TermSuggestOption.
type TermSuggester ¶
type TermSuggester struct { Analyzer *string `json:"analyzer,omitempty"` Field string `json:"field"` LowercaseTerms *bool `json:"lowercase_terms,omitempty"` MaxEdits *int `json:"max_edits,omitempty"` MaxInspections *int `json:"max_inspections,omitempty"` MaxTermFreq *float32 `json:"max_term_freq,omitempty"` MinDocFreq *float32 `json:"min_doc_freq,omitempty"` MinWordLength *int `json:"min_word_length,omitempty"` PrefixLength *int `json:"prefix_length,omitempty"` ShardSize *int `json:"shard_size,omitempty"` Size *int `json:"size,omitempty"` Sort *suggestsort.SuggestSort `json:"sort,omitempty"` StringDistance *stringdistance.StringDistance `json:"string_distance,omitempty"` SuggestMode *suggestmode.SuggestMode `json:"suggest_mode,omitempty"` Text *string `json:"text,omitempty"` }
TermSuggester type.
func NewTermSuggester ¶
func NewTermSuggester() *TermSuggester
NewTermSuggester returns a TermSuggester.
type TermVector ¶
type TermVector struct { FieldStatistics FieldStatistics `json:"field_statistics"` Terms map[string]Term `json:"terms"` }
TermVector type.
type TermVectorsFilter ¶
type TermVectorsFilter struct { MaxDocFreq *int `json:"max_doc_freq,omitempty"` MaxNumTerms *int `json:"max_num_terms,omitempty"` MaxTermFreq *int `json:"max_term_freq,omitempty"` MaxWordLength *int `json:"max_word_length,omitempty"` MinDocFreq *int `json:"min_doc_freq,omitempty"` MinTermFreq *int `json:"min_term_freq,omitempty"` MinWordLength *int `json:"min_word_length,omitempty"` }
TermVectorsFilter type.
func NewTermVectorsFilter ¶
func NewTermVectorsFilter() *TermVectorsFilter
NewTermVectorsFilter returns a TermVectorsFilter.
type TermVectorsResult ¶
type TermVectorsResult struct { Error *ErrorCause `json:"error,omitempty"` Found *bool `json:"found,omitempty"` Id_ string `json:"_id"` Index_ string `json:"_index"` TermVectors map[string]TermVector `json:"term_vectors,omitempty"` Took *int64 `json:"took,omitempty"` Version_ *int64 `json:"_version,omitempty"` }
TermVectorsResult type.
func NewTermVectorsResult ¶
func NewTermVectorsResult() *TermVectorsResult
NewTermVectorsResult returns a TermVectorsResult.
type TermVectorsToken ¶
type TermVectorsToken struct { EndOffset *int `json:"end_offset,omitempty"` Payload *string `json:"payload,omitempty"` Position int `json:"position"` StartOffset *int `json:"start_offset,omitempty"` }
TermVectorsToken type.
func NewTermVectorsToken ¶
func NewTermVectorsToken() *TermVectorsToken
NewTermVectorsToken returns a TermVectorsToken.
type TermsAggregateBaseDoubleTermsBucket ¶
type TermsAggregateBaseDoubleTermsBucket struct { Buckets BucketsDoubleTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` }
TermsAggregateBaseDoubleTermsBucket type.
func NewTermsAggregateBaseDoubleTermsBucket ¶
func NewTermsAggregateBaseDoubleTermsBucket() *TermsAggregateBaseDoubleTermsBucket
NewTermsAggregateBaseDoubleTermsBucket returns a TermsAggregateBaseDoubleTermsBucket.
func (*TermsAggregateBaseDoubleTermsBucket) UnmarshalJSON ¶
func (s *TermsAggregateBaseDoubleTermsBucket) UnmarshalJSON(data []byte) error
type TermsAggregateBaseLongTermsBucket ¶
type TermsAggregateBaseLongTermsBucket struct { Buckets BucketsLongTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` }
TermsAggregateBaseLongTermsBucket type.
func NewTermsAggregateBaseLongTermsBucket ¶
func NewTermsAggregateBaseLongTermsBucket() *TermsAggregateBaseLongTermsBucket
NewTermsAggregateBaseLongTermsBucket returns a TermsAggregateBaseLongTermsBucket.
func (*TermsAggregateBaseLongTermsBucket) UnmarshalJSON ¶
func (s *TermsAggregateBaseLongTermsBucket) UnmarshalJSON(data []byte) error
type TermsAggregateBaseMultiTermsBucket ¶
type TermsAggregateBaseMultiTermsBucket struct { Buckets BucketsMultiTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` }
TermsAggregateBaseMultiTermsBucket type.
func NewTermsAggregateBaseMultiTermsBucket ¶
func NewTermsAggregateBaseMultiTermsBucket() *TermsAggregateBaseMultiTermsBucket
NewTermsAggregateBaseMultiTermsBucket returns a TermsAggregateBaseMultiTermsBucket.
func (*TermsAggregateBaseMultiTermsBucket) UnmarshalJSON ¶
func (s *TermsAggregateBaseMultiTermsBucket) UnmarshalJSON(data []byte) error
type TermsAggregateBaseStringTermsBucket ¶
type TermsAggregateBaseStringTermsBucket struct { Buckets BucketsStringTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` }
TermsAggregateBaseStringTermsBucket type.
func NewTermsAggregateBaseStringTermsBucket ¶
func NewTermsAggregateBaseStringTermsBucket() *TermsAggregateBaseStringTermsBucket
NewTermsAggregateBaseStringTermsBucket returns a TermsAggregateBaseStringTermsBucket.
func (*TermsAggregateBaseStringTermsBucket) UnmarshalJSON ¶
func (s *TermsAggregateBaseStringTermsBucket) UnmarshalJSON(data []byte) error
type TermsAggregateBaseVoid ¶
type TermsAggregateBaseVoid struct { Buckets BucketsVoid `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` }
TermsAggregateBaseVoid type.
func NewTermsAggregateBaseVoid ¶
func NewTermsAggregateBaseVoid() *TermsAggregateBaseVoid
NewTermsAggregateBaseVoid returns a TermsAggregateBaseVoid.
func (*TermsAggregateBaseVoid) UnmarshalJSON ¶
func (s *TermsAggregateBaseVoid) UnmarshalJSON(data []byte) error
type TermsAggregation ¶
type TermsAggregation struct { CollectMode *termsaggregationcollectmode.TermsAggregationCollectMode `json:"collect_mode,omitempty"` Exclude []string `json:"exclude,omitempty"` ExecutionHint *termsaggregationexecutionhint.TermsAggregationExecutionHint `json:"execution_hint,omitempty"` Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Include TermsInclude `json:"include,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` MinDocCount *int `json:"min_doc_count,omitempty"` Missing Missing `json:"missing,omitempty"` MissingBucket *bool `json:"missing_bucket,omitempty"` MissingOrder *missingorder.MissingOrder `json:"missing_order,omitempty"` Name *string `json:"name,omitempty"` Order AggregateOrder `json:"order,omitempty"` Script Script `json:"script,omitempty"` ShardSize *int `json:"shard_size,omitempty"` ShowTermDocCountError *bool `json:"show_term_doc_count_error,omitempty"` Size *int `json:"size,omitempty"` ValueType *string `json:"value_type,omitempty"` }
TermsAggregation type.
func NewTermsAggregation ¶
func NewTermsAggregation() *TermsAggregation
NewTermsAggregation returns a TermsAggregation.
func (*TermsAggregation) UnmarshalJSON ¶
func (s *TermsAggregation) UnmarshalJSON(data []byte) error
type TermsGrouping ¶
type TermsGrouping struct {
Fields []string `json:"fields"`
}
TermsGrouping type.
func NewTermsGrouping ¶
func NewTermsGrouping() *TermsGrouping
NewTermsGrouping returns a TermsGrouping.
type TermsInclude ¶
type TermsInclude interface{}
TermsInclude holds the union for the following types:
string []string TermsPartition
type TermsLookup ¶
type TermsLookup struct { Id string `json:"id"` Index string `json:"index"` Path string `json:"path"` Routing *string `json:"routing,omitempty"` }
TermsLookup type.
type TermsPartition ¶
type TermsPartition struct { NumPartitions int64 `json:"num_partitions"` Partition int64 `json:"partition"` }
TermsPartition type.
func NewTermsPartition ¶
func NewTermsPartition() *TermsPartition
NewTermsPartition returns a TermsPartition.
type TermsQuery ¶
type TermsQuery struct { Boost *float32 `json:"boost,omitempty"` QueryName_ *string `json:"_name,omitempty"` TermsQuery map[string]TermsQueryField `json:"-"` }
TermsQuery type.
func (TermsQuery) MarshalJSON ¶
func (s TermsQuery) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
type TermsQueryField ¶
type TermsQueryField interface{}
TermsQueryField holds the union for the following types:
[]FieldValue TermsLookup
type TermsSetQuery ¶
type TermsSetQuery struct { Boost *float32 `json:"boost,omitempty"` MinimumShouldMatchField *string `json:"minimum_should_match_field,omitempty"` MinimumShouldMatchScript Script `json:"minimum_should_match_script,omitempty"` QueryName_ *string `json:"_name,omitempty"` Terms []string `json:"terms"` }
TermsSetQuery type.
func NewTermsSetQuery ¶
func NewTermsSetQuery() *TermsSetQuery
NewTermsSetQuery returns a TermsSetQuery.
type TestPopulation ¶
type TestPopulation struct { Field string `json:"field"` Filter *Query `json:"filter,omitempty"` Script Script `json:"script,omitempty"` }
TestPopulation type.
func NewTestPopulation ¶
func NewTestPopulation() *TestPopulation
NewTestPopulation returns a TestPopulation.
type TextClassificationInferenceOptions ¶
type TextClassificationInferenceOptions struct { // ClassificationLabels Classification labels to apply other than the stored labels. Must have the // same deminsions as the default configured labels ClassificationLabels []string `json:"classification_labels,omitempty"` // NumTopClasses Specifies the number of top class predictions to return. Defaults to 0. NumTopClasses *int `json:"num_top_classes,omitempty"` // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` // Tokenization The tokenization options Tokenization *TokenizationConfigContainer `json:"tokenization,omitempty"` }
TextClassificationInferenceOptions type.
func NewTextClassificationInferenceOptions ¶
func NewTextClassificationInferenceOptions() *TextClassificationInferenceOptions
NewTextClassificationInferenceOptions returns a TextClassificationInferenceOptions.
type TextClassificationInferenceUpdateOptions ¶
type TextClassificationInferenceUpdateOptions struct { // ClassificationLabels Classification labels to apply other than the stored labels. Must have the // same deminsions as the default configured labels ClassificationLabels []string `json:"classification_labels,omitempty"` // NumTopClasses Specifies the number of top class predictions to return. Defaults to 0. NumTopClasses *int `json:"num_top_classes,omitempty"` // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` // Tokenization The tokenization options to update when inferring Tokenization *NlpTokenizationUpdateOptions `json:"tokenization,omitempty"` }
TextClassificationInferenceUpdateOptions type.
func NewTextClassificationInferenceUpdateOptions ¶
func NewTextClassificationInferenceUpdateOptions() *TextClassificationInferenceUpdateOptions
NewTextClassificationInferenceUpdateOptions returns a TextClassificationInferenceUpdateOptions.
type TextEmbedding ¶
TextEmbedding type.
func NewTextEmbedding ¶
func NewTextEmbedding() *TextEmbedding
NewTextEmbedding returns a TextEmbedding.
type TextEmbeddingInferenceOptions ¶
type TextEmbeddingInferenceOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` // Tokenization The tokenization options Tokenization *TokenizationConfigContainer `json:"tokenization,omitempty"` }
TextEmbeddingInferenceOptions type.
func NewTextEmbeddingInferenceOptions ¶
func NewTextEmbeddingInferenceOptions() *TextEmbeddingInferenceOptions
NewTextEmbeddingInferenceOptions returns a TextEmbeddingInferenceOptions.
type TextEmbeddingInferenceUpdateOptions ¶
type TextEmbeddingInferenceUpdateOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` Tokenization *NlpTokenizationUpdateOptions `json:"tokenization,omitempty"` }
TextEmbeddingInferenceUpdateOptions type.
func NewTextEmbeddingInferenceUpdateOptions ¶
func NewTextEmbeddingInferenceUpdateOptions() *TextEmbeddingInferenceUpdateOptions
NewTextEmbeddingInferenceUpdateOptions returns a TextEmbeddingInferenceUpdateOptions.
type TextIndexPrefixes ¶
TextIndexPrefixes type.
func NewTextIndexPrefixes ¶
func NewTextIndexPrefixes() *TextIndexPrefixes
NewTextIndexPrefixes returns a TextIndexPrefixes.
type TextProperty ¶
type TextProperty struct { Analyzer *string `json:"analyzer,omitempty"` Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` EagerGlobalOrdinals *bool `json:"eager_global_ordinals,omitempty"` Fielddata *bool `json:"fielddata,omitempty"` FielddataFrequencyFilter *FielddataFrequencyFilter `json:"fielddata_frequency_filter,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` Index *bool `json:"index,omitempty"` IndexOptions *indexoptions.IndexOptions `json:"index_options,omitempty"` IndexPhrases *bool `json:"index_phrases,omitempty"` IndexPrefixes *TextIndexPrefixes `json:"index_prefixes,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Norms *bool `json:"norms,omitempty"` PositionIncrementGap *int `json:"position_increment_gap,omitempty"` Properties map[string]Property `json:"properties,omitempty"` SearchAnalyzer *string `json:"search_analyzer,omitempty"` SearchQuoteAnalyzer *string `json:"search_quote_analyzer,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` TermVector *termvectoroption.TermVectorOption `json:"term_vector,omitempty"` Type string `json:"type,omitempty"` }
TextProperty type.
func NewTextProperty ¶
func NewTextProperty() *TextProperty
NewTextProperty returns a TextProperty.
func (*TextProperty) UnmarshalJSON ¶
func (s *TextProperty) UnmarshalJSON(data []byte) error
type ThreadCount ¶
type ThreadCount struct { Active *int64 `json:"active,omitempty"` Completed *int64 `json:"completed,omitempty"` Largest *int64 `json:"largest,omitempty"` Queue *int64 `json:"queue,omitempty"` Rejected *int64 `json:"rejected,omitempty"` Threads *int64 `json:"threads,omitempty"` }
ThreadCount type.
type ThreadPoolRecord ¶
type ThreadPoolRecord struct { // Active number of active threads Active *string `json:"active,omitempty"` // Completed number of completed tasks Completed *string `json:"completed,omitempty"` // Core core number of threads in a scaling thread pool Core string `json:"core,omitempty"` // EphemeralNodeId ephemeral node id EphemeralNodeId *string `json:"ephemeral_node_id,omitempty"` // Host host name Host *string `json:"host,omitempty"` // Ip ip address Ip *string `json:"ip,omitempty"` // KeepAlive thread keep alive time KeepAlive string `json:"keep_alive,omitempty"` // Largest highest number of seen active threads Largest *string `json:"largest,omitempty"` // Max maximum number of threads in a scaling thread pool Max string `json:"max,omitempty"` // Name thread pool name Name *string `json:"name,omitempty"` // NodeId persistent node id NodeId *string `json:"node_id,omitempty"` // NodeName node name NodeName *string `json:"node_name,omitempty"` // Pid process id Pid *string `json:"pid,omitempty"` // PoolSize number of threads PoolSize *string `json:"pool_size,omitempty"` // Port bound transport port Port *string `json:"port,omitempty"` // Queue number of tasks currently in queue Queue *string `json:"queue,omitempty"` // QueueSize maximum number of tasks permitted in queue QueueSize *string `json:"queue_size,omitempty"` // Rejected number of rejected tasks Rejected *string `json:"rejected,omitempty"` // Size number of threads in a fixed thread pool Size string `json:"size,omitempty"` // Type thread pool type Type *string `json:"type,omitempty"` }
ThreadPoolRecord type.
func NewThreadPoolRecord ¶
func NewThreadPoolRecord() *ThreadPoolRecord
NewThreadPoolRecord returns a ThreadPoolRecord.
type ThrottleState ¶
ThrottleState type.
func NewThrottleState ¶
func NewThrottleState() *ThrottleState
NewThrottleState returns a ThrottleState.
type TimeOfMonth ¶
TimeOfMonth type.
type TimeOfWeek ¶
TimeOfWeek type.
type TimeOfYear ¶
type TimeOfYear struct { At []string `json:"at"` Int []month.Month `json:"int"` On []int `json:"on"` }
TimeOfYear type.
type TimeSync ¶
type TimeSync struct { // Delay The time delay between the current time and the latest input data time. Delay Duration `json:"delay,omitempty"` // Field The date field that is used to identify new documents in the source. In // general, it’s a good idea to use a field // that contains the ingest timestamp. If you use a different field, you might // need to set the delay such that it // accounts for data transmission delays. Field string `json:"field"` }
TimeSync type.
type TimingStats ¶
type TimingStats struct { // ElapsedTime Runtime of the analysis in milliseconds. ElapsedTime int64 `json:"elapsed_time"` // IterationTime Runtime of the latest iteration of the analysis in milliseconds. IterationTime *int64 `json:"iteration_time,omitempty"` }
TimingStats type.
type TokenCountProperty ¶
type TokenCountProperty struct { Analyzer *string `json:"analyzer,omitempty"` Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` EnablePositionIncrements *bool `json:"enable_position_increments,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue *Float64 `json:"null_value,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
TokenCountProperty type.
func NewTokenCountProperty ¶
func NewTokenCountProperty() *TokenCountProperty
NewTokenCountProperty returns a TokenCountProperty.
func (*TokenCountProperty) UnmarshalJSON ¶
func (s *TokenCountProperty) UnmarshalJSON(data []byte) error
type TokenDetail ¶
type TokenDetail struct { Name string `json:"name"` Tokens []ExplainAnalyzeToken `json:"tokens"` }
TokenDetail type.
type TokenFilter ¶
type TokenFilter interface{}
TokenFilter holds the union for the following types:
string TokenFilterDefinition
type TokenFilterDefinition ¶
type TokenFilterDefinition interface{}
TokenFilterDefinition holds the union for the following types:
AsciiFoldingTokenFilter CommonGramsTokenFilter ConditionTokenFilter DelimitedPayloadTokenFilter EdgeNGramTokenFilter ElisionTokenFilter FingerprintTokenFilter HunspellTokenFilter HyphenationDecompounderTokenFilter KeepTypesTokenFilter KeepWordsTokenFilter KeywordMarkerTokenFilter KStemTokenFilter LengthTokenFilter LimitTokenCountTokenFilter LowercaseTokenFilter MultiplexerTokenFilter NGramTokenFilter NoriPartOfSpeechTokenFilter PatternCaptureTokenFilter PatternReplaceTokenFilter PorterStemTokenFilter PredicateTokenFilter RemoveDuplicatesTokenFilter ReverseTokenFilter ShingleTokenFilter SnowballTokenFilter StemmerOverrideTokenFilter StemmerTokenFilter StopTokenFilter SynonymGraphTokenFilter SynonymTokenFilter TrimTokenFilter TruncateTokenFilter UniqueTokenFilter UppercaseTokenFilter WordDelimiterGraphTokenFilter WordDelimiterTokenFilter KuromojiStemmerTokenFilter KuromojiReadingFormTokenFilter KuromojiPartOfSpeechTokenFilter IcuTokenizer IcuCollationTokenFilter IcuFoldingTokenFilter IcuNormalizationTokenFilter IcuTransformTokenFilter PhoneticTokenFilter DictionaryDecompounderTokenFilter
type TokenizationConfigContainer ¶
type TokenizationConfigContainer struct { // Bert Indicates BERT tokenization and its options Bert *NlpBertTokenizationConfig `json:"bert,omitempty"` // Mpnet Indicates MPNET tokenization and its options Mpnet *NlpBertTokenizationConfig `json:"mpnet,omitempty"` // Roberta Indicates RoBERTa tokenization and its options Roberta *NlpRobertaTokenizationConfig `json:"roberta,omitempty"` }
TokenizationConfigContainer type.
func NewTokenizationConfigContainer ¶
func NewTokenizationConfigContainer() *TokenizationConfigContainer
NewTokenizationConfigContainer returns a TokenizationConfigContainer.
type Tokenizer ¶
type Tokenizer interface{}
Tokenizer holds the union for the following types:
string TokenizerDefinition
type TokenizerDefinition ¶
type TokenizerDefinition interface{}
TokenizerDefinition holds the union for the following types:
CharGroupTokenizer EdgeNGramTokenizer KeywordTokenizer LetterTokenizer LowercaseTokenizer NGramTokenizer NoriTokenizer PathHierarchyTokenizer StandardTokenizer UaxEmailUrlTokenizer WhitespaceTokenizer KuromojiTokenizer PatternTokenizer IcuTokenizer
type TopClassEntry ¶
type TopClassEntry struct { ClassName string `json:"class_name"` ClassProbability Float64 `json:"class_probability"` ClassScore Float64 `json:"class_score"` }
TopClassEntry type.
func NewTopClassEntry ¶
func NewTopClassEntry() *TopClassEntry
NewTopClassEntry returns a TopClassEntry.
type TopHitsAggregate ¶
type TopHitsAggregate struct { Hits HitsMetadata `json:"hits"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
TopHitsAggregate type.
func NewTopHitsAggregate ¶
func NewTopHitsAggregate() *TopHitsAggregate
NewTopHitsAggregate returns a TopHitsAggregate.
type TopHitsAggregation ¶
type TopHitsAggregation struct { DocvalueFields []string `json:"docvalue_fields,omitempty"` Explain *bool `json:"explain,omitempty"` Field *string `json:"field,omitempty"` From *int `json:"from,omitempty"` Highlight *Highlight `json:"highlight,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` ScriptFields map[string]ScriptField `json:"script_fields,omitempty"` SeqNoPrimaryTerm *bool `json:"seq_no_primary_term,omitempty"` Size *int `json:"size,omitempty"` Sort []SortCombinations `json:"sort,omitempty"` Source_ SourceConfig `json:"_source,omitempty"` StoredFields []string `json:"stored_fields,omitempty"` TrackScores *bool `json:"track_scores,omitempty"` Version *bool `json:"version,omitempty"` }
TopHitsAggregation type.
func NewTopHitsAggregation ¶
func NewTopHitsAggregation() *TopHitsAggregation
NewTopHitsAggregation returns a TopHitsAggregation.
type TopLeftBottomRightGeoBounds ¶
type TopLeftBottomRightGeoBounds struct { BottomRight GeoLocation `json:"bottom_right"` TopLeft GeoLocation `json:"top_left"` }
TopLeftBottomRightGeoBounds type.
func NewTopLeftBottomRightGeoBounds ¶
func NewTopLeftBottomRightGeoBounds() *TopLeftBottomRightGeoBounds
NewTopLeftBottomRightGeoBounds returns a TopLeftBottomRightGeoBounds.
type TopMetrics ¶
type TopMetrics struct { Metrics map[string]FieldValue `json:"metrics"` Sort []FieldValue `json:"sort"` }
TopMetrics type.
type TopMetricsAggregate ¶
type TopMetricsAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` Top []TopMetrics `json:"top"` }
TopMetricsAggregate type.
func NewTopMetricsAggregate ¶
func NewTopMetricsAggregate() *TopMetricsAggregate
NewTopMetricsAggregate returns a TopMetricsAggregate.
type TopMetricsAggregation ¶
type TopMetricsAggregation struct { Field *string `json:"field,omitempty"` Metrics []TopMetricsValue `json:"metrics,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` Size *int `json:"size,omitempty"` Sort []SortCombinations `json:"sort,omitempty"` }
TopMetricsAggregation type.
func NewTopMetricsAggregation ¶
func NewTopMetricsAggregation() *TopMetricsAggregation
NewTopMetricsAggregation returns a TopMetricsAggregation.
type TopMetricsValue ¶
type TopMetricsValue struct {
Field string `json:"field"`
}
TopMetricsValue type.
func NewTopMetricsValue ¶
func NewTopMetricsValue() *TopMetricsValue
NewTopMetricsValue returns a TopMetricsValue.
type TopRightBottomLeftGeoBounds ¶
type TopRightBottomLeftGeoBounds struct { BottomLeft GeoLocation `json:"bottom_left"` TopRight GeoLocation `json:"top_right"` }
TopRightBottomLeftGeoBounds type.
func NewTopRightBottomLeftGeoBounds ¶
func NewTopRightBottomLeftGeoBounds() *TopRightBottomLeftGeoBounds
NewTopRightBottomLeftGeoBounds returns a TopRightBottomLeftGeoBounds.
type TotalFeatureImportance ¶
type TotalFeatureImportance struct { // Classes If the trained model is a classification model, feature importance statistics // are gathered per target class value. Classes []TotalFeatureImportanceClass `json:"classes"` // FeatureName The feature for which this importance was calculated. FeatureName string `json:"feature_name"` // Importance A collection of feature importance statistics related to the training data // set for this particular feature. Importance []TotalFeatureImportanceStatistics `json:"importance"` }
TotalFeatureImportance type.
func NewTotalFeatureImportance ¶
func NewTotalFeatureImportance() *TotalFeatureImportance
NewTotalFeatureImportance returns a TotalFeatureImportance.
type TotalFeatureImportanceClass ¶
type TotalFeatureImportanceClass struct { // ClassName The target class value. Could be a string, boolean, or number. ClassName string `json:"class_name"` // Importance A collection of feature importance statistics related to the training data // set for this particular feature. Importance []TotalFeatureImportanceStatistics `json:"importance"` }
TotalFeatureImportanceClass type.
func NewTotalFeatureImportanceClass ¶
func NewTotalFeatureImportanceClass() *TotalFeatureImportanceClass
NewTotalFeatureImportanceClass returns a TotalFeatureImportanceClass.
type TotalFeatureImportanceStatistics ¶
type TotalFeatureImportanceStatistics struct { // Max The maximum importance value across all the training data for this feature. Max int `json:"max"` // MeanMagnitude The average magnitude of this feature across all the training data. This // value is the average of the absolute values of the importance for this // feature. MeanMagnitude Float64 `json:"mean_magnitude"` // Min The minimum importance value across all the training data for this feature. Min int `json:"min"` }
TotalFeatureImportanceStatistics type.
func NewTotalFeatureImportanceStatistics ¶
func NewTotalFeatureImportanceStatistics() *TotalFeatureImportanceStatistics
NewTotalFeatureImportanceStatistics returns a TotalFeatureImportanceStatistics.
type TotalHits ¶
type TotalHits struct { Relation totalhitsrelation.TotalHitsRelation `json:"relation"` Value int64 `json:"value"` }
TotalHits type.
func (*TotalHits) UnmarshalJSON ¶
UnmarshalJSON implements Unmarshaler interface, it handles the shortcut for total hits.
type TotalUserProfiles ¶
TotalUserProfiles type.
func NewTotalUserProfiles ¶
func NewTotalUserProfiles() *TotalUserProfiles
NewTotalUserProfiles returns a TotalUserProfiles.
type TrackHits ¶
type TrackHits interface{}
TrackHits holds the union for the following types:
bool int
type TrainedModel ¶
type TrainedModel struct { // Ensemble The definition for an ensemble model Ensemble *Ensemble `json:"ensemble,omitempty"` // Tree The definition for a binary decision tree. Tree *TrainedModelTree `json:"tree,omitempty"` // TreeNode The definition of a node in a tree. // There are two major types of nodes: leaf nodes and not-leaf nodes. // - Leaf nodes only need node_index and leaf_value defined. // - All other nodes need split_feature, left_child, right_child, threshold, // decision_type, and default_left defined. TreeNode *TrainedModelTreeNode `json:"tree_node,omitempty"` }
TrainedModel type.
func NewTrainedModel ¶
func NewTrainedModel() *TrainedModel
NewTrainedModel returns a TrainedModel.
type TrainedModelAssignment ¶
type TrainedModelAssignment struct { // AssignmentState The overall assignment state. AssignmentState deploymentassignmentstate.DeploymentAssignmentState `json:"assignment_state"` MaxAssignedAllocations *int `json:"max_assigned_allocations,omitempty"` // RoutingTable The allocation state for each node. RoutingTable map[string]TrainedModelAssignmentRoutingTable `json:"routing_table"` // StartTime The timestamp when the deployment started. StartTime DateTime `json:"start_time"` TaskParameters TrainedModelAssignmentTaskParameters `json:"task_parameters"` }
TrainedModelAssignment type.
func NewTrainedModelAssignment ¶
func NewTrainedModelAssignment() *TrainedModelAssignment
NewTrainedModelAssignment returns a TrainedModelAssignment.
type TrainedModelAssignmentRoutingTable ¶
type TrainedModelAssignmentRoutingTable struct { // CurrentAllocations Current number of allocations. CurrentAllocations int `json:"current_allocations"` // Reason The reason for the current state. It is usually populated only when the // `routing_state` is `failed`. Reason string `json:"reason"` // RoutingState The current routing state. RoutingState routingstate.RoutingState `json:"routing_state"` // TargetAllocations Target number of allocations. TargetAllocations int `json:"target_allocations"` }
TrainedModelAssignmentRoutingTable type.
func NewTrainedModelAssignmentRoutingTable ¶
func NewTrainedModelAssignmentRoutingTable() *TrainedModelAssignmentRoutingTable
NewTrainedModelAssignmentRoutingTable returns a TrainedModelAssignmentRoutingTable.
type TrainedModelAssignmentTaskParameters ¶
type TrainedModelAssignmentTaskParameters struct { // CacheSize The size of the trained model cache. CacheSize ByteSize `json:"cache_size"` // ModelBytes The size of the trained model in bytes. ModelBytes int `json:"model_bytes"` // ModelId The unique identifier for the trained model. ModelId string `json:"model_id"` // NumberOfAllocations The total number of allocations this model is assigned across ML nodes. NumberOfAllocations int `json:"number_of_allocations"` Priority trainingpriority.TrainingPriority `json:"priority"` // QueueCapacity Number of inference requests are allowed in the queue at a time. QueueCapacity int `json:"queue_capacity"` // ThreadsPerAllocation Number of threads per allocation. ThreadsPerAllocation int `json:"threads_per_allocation"` }
TrainedModelAssignmentTaskParameters type.
func NewTrainedModelAssignmentTaskParameters ¶
func NewTrainedModelAssignmentTaskParameters() *TrainedModelAssignmentTaskParameters
NewTrainedModelAssignmentTaskParameters returns a TrainedModelAssignmentTaskParameters.
type TrainedModelConfig ¶
type TrainedModelConfig struct { CompressedDefinition *string `json:"compressed_definition,omitempty"` // CreateTime The time when the trained model was created. CreateTime DateTime `json:"create_time,omitempty"` // CreatedBy Information on the creator of the trained model. CreatedBy *string `json:"created_by,omitempty"` // DefaultFieldMap Any field map described in the inference configuration takes precedence. DefaultFieldMap map[string]string `json:"default_field_map,omitempty"` // Description The free-text description of the trained model. Description *string `json:"description,omitempty"` // EstimatedHeapMemoryUsageBytes The estimated heap usage in bytes to keep the trained model in memory. EstimatedHeapMemoryUsageBytes *int `json:"estimated_heap_memory_usage_bytes,omitempty"` // EstimatedOperations The estimated number of operations to use the trained model. EstimatedOperations *int `json:"estimated_operations,omitempty"` // InferenceConfig The default configuration for inference. This can be either a regression, // classification, or one of the many NLP focused configurations. It must match // the underlying definition.trained_model's target_type. InferenceConfig InferenceConfigCreateContainer `json:"inference_config"` // Input The input field names for the model definition. Input TrainedModelConfigInput `json:"input"` // LicenseLevel The license level of the trained model. LicenseLevel *string `json:"license_level,omitempty"` Location *TrainedModelLocation `json:"location,omitempty"` // Metadata An object containing metadata about the trained model. For example, models // created by data frame analytics contain analysis_config and input objects. Metadata *TrainedModelConfigMetadata `json:"metadata,omitempty"` // ModelId Identifier for the trained model. ModelId string `json:"model_id"` ModelSizeBytes ByteSize `json:"model_size_bytes,omitempty"` // ModelType The model type ModelType *trainedmodeltype.TrainedModelType `json:"model_type,omitempty"` // Tags A comma delimited string of tags. A trained model can have many tags, or // none. Tags []string `json:"tags"` // Version The Elasticsearch version number in which the trained model was created. Version *string `json:"version,omitempty"` }
TrainedModelConfig type.
func NewTrainedModelConfig ¶
func NewTrainedModelConfig() *TrainedModelConfig
NewTrainedModelConfig returns a TrainedModelConfig.
type TrainedModelConfigInput ¶
type TrainedModelConfigInput struct { // FieldNames An array of input field names for the model. FieldNames []string `json:"field_names"` }
TrainedModelConfigInput type.
func NewTrainedModelConfigInput ¶
func NewTrainedModelConfigInput() *TrainedModelConfigInput
NewTrainedModelConfigInput returns a TrainedModelConfigInput.
type TrainedModelConfigMetadata ¶
type TrainedModelConfigMetadata struct { // FeatureImportanceBaseline An object that contains the baseline for feature importance values. For // regression analysis, it is a single value. For classification analysis, there // is a value for each class. FeatureImportanceBaseline map[string]string `json:"feature_importance_baseline,omitempty"` // Hyperparameters List of the available hyperparameters optimized during the // fine_parameter_tuning phase as well as specified by the user. Hyperparameters []Hyperparameter `json:"hyperparameters,omitempty"` ModelAliases []string `json:"model_aliases,omitempty"` // TotalFeatureImportance An array of the total feature importance for each feature used from the // training data set. This array of objects is returned if data frame analytics // trained the model and the request includes total_feature_importance in the // include request parameter. TotalFeatureImportance []TotalFeatureImportance `json:"total_feature_importance,omitempty"` }
TrainedModelConfigMetadata type.
func NewTrainedModelConfigMetadata ¶
func NewTrainedModelConfigMetadata() *TrainedModelConfigMetadata
NewTrainedModelConfigMetadata returns a TrainedModelConfigMetadata.
type TrainedModelDeploymentAllocationStatus ¶
type TrainedModelDeploymentAllocationStatus struct { // AllocationCount The current number of nodes where the model is allocated. AllocationCount int `json:"allocation_count"` // State The detailed allocation state related to the nodes. State deploymentallocationstate.DeploymentAllocationState `json:"state"` // TargetAllocationCount The desired number of nodes for model allocation. TargetAllocationCount int `json:"target_allocation_count"` }
TrainedModelDeploymentAllocationStatus type.
func NewTrainedModelDeploymentAllocationStatus ¶
func NewTrainedModelDeploymentAllocationStatus() *TrainedModelDeploymentAllocationStatus
NewTrainedModelDeploymentAllocationStatus returns a TrainedModelDeploymentAllocationStatus.
type TrainedModelDeploymentNodesStats ¶
type TrainedModelDeploymentNodesStats struct { // AverageInferenceTimeMs The average time for each inference call to complete on this node. AverageInferenceTimeMs Float64 `json:"average_inference_time_ms"` // ErrorCount The number of errors when evaluating the trained model. ErrorCount int `json:"error_count"` // InferenceCount The total number of inference calls made against this node for this model. InferenceCount int `json:"inference_count"` // LastAccess The epoch time stamp of the last inference call for the model on this node. LastAccess int64 `json:"last_access"` // Node Information pertaining to the node. Node DiscoveryNode `json:"node"` // NumberOfAllocations The number of allocations assigned to this node. NumberOfAllocations int `json:"number_of_allocations"` // NumberOfPendingRequests The number of inference requests queued to be processed. NumberOfPendingRequests int `json:"number_of_pending_requests"` // RejectionExecutionCount The number of inference requests that were not processed because the queue // was full. RejectionExecutionCount int `json:"rejection_execution_count"` // RoutingState The current routing state and reason for the current routing state for this // allocation. RoutingState TrainedModelAssignmentRoutingTable `json:"routing_state"` // StartTime The epoch timestamp when the allocation started. StartTime int64 `json:"start_time"` // ThreadsPerAllocation The number of threads used by each allocation during inference. ThreadsPerAllocation int `json:"threads_per_allocation"` // TimeoutCount The number of inference requests that timed out before being processed. TimeoutCount int `json:"timeout_count"` }
TrainedModelDeploymentNodesStats type.
func NewTrainedModelDeploymentNodesStats ¶
func NewTrainedModelDeploymentNodesStats() *TrainedModelDeploymentNodesStats
NewTrainedModelDeploymentNodesStats returns a TrainedModelDeploymentNodesStats.
type TrainedModelDeploymentStats ¶
type TrainedModelDeploymentStats struct { // AllocationStatus The detailed allocation status for the deployment. AllocationStatus TrainedModelDeploymentAllocationStatus `json:"allocation_status"` CacheSize ByteSize `json:"cache_size,omitempty"` // ErrorCount The sum of `error_count` for all nodes in the deployment. ErrorCount int `json:"error_count"` // InferenceCount The sum of `inference_count` for all nodes in the deployment. InferenceCount int `json:"inference_count"` // ModelId The unique identifier for the trained model. ModelId string `json:"model_id"` // Nodes The deployent stats for each node that currently has the model allocated. Nodes TrainedModelDeploymentNodesStats `json:"nodes"` // NumberOfAllocations The number of allocations requested. NumberOfAllocations int `json:"number_of_allocations"` // QueueCapacity The number of inference requests that can be queued before new requests are // rejected. QueueCapacity int `json:"queue_capacity"` // Reason The reason for the current deployment state. Usually only populated when // the model is not deployed to a node. Reason string `json:"reason"` // RejectedExecutionCount The sum of `rejected_execution_count` for all nodes in the deployment. // Individual nodes reject an inference request if the inference queue is full. // The queue size is controlled by the `queue_capacity` setting in the start // trained model deployment API. RejectedExecutionCount int `json:"rejected_execution_count"` // StartTime The epoch timestamp when the deployment started. StartTime int64 `json:"start_time"` // State The overall state of the deployment. State deploymentstate.DeploymentState `json:"state"` // ThreadsPerAllocation The number of threads used be each allocation during inference. ThreadsPerAllocation int `json:"threads_per_allocation"` // TimeoutCount The sum of `timeout_count` for all nodes in the deployment. TimeoutCount int `json:"timeout_count"` }
TrainedModelDeploymentStats type.
func NewTrainedModelDeploymentStats ¶
func NewTrainedModelDeploymentStats() *TrainedModelDeploymentStats
NewTrainedModelDeploymentStats returns a TrainedModelDeploymentStats.
type TrainedModelEntities ¶
type TrainedModelEntities struct { ClassName string `json:"class_name"` ClassProbability Float64 `json:"class_probability"` EndPos int `json:"end_pos"` Entity string `json:"entity"` StartPos int `json:"start_pos"` }
TrainedModelEntities type.
func NewTrainedModelEntities ¶
func NewTrainedModelEntities() *TrainedModelEntities
NewTrainedModelEntities returns a TrainedModelEntities.
type TrainedModelInferenceClassImportance ¶
type TrainedModelInferenceClassImportance struct { ClassName string `json:"class_name"` Importance Float64 `json:"importance"` }
TrainedModelInferenceClassImportance type.
func NewTrainedModelInferenceClassImportance ¶
func NewTrainedModelInferenceClassImportance() *TrainedModelInferenceClassImportance
NewTrainedModelInferenceClassImportance returns a TrainedModelInferenceClassImportance.
type TrainedModelInferenceFeatureImportance ¶
type TrainedModelInferenceFeatureImportance struct { Classes []TrainedModelInferenceClassImportance `json:"classes,omitempty"` FeatureName string `json:"feature_name"` Importance *Float64 `json:"importance,omitempty"` }
TrainedModelInferenceFeatureImportance type.
func NewTrainedModelInferenceFeatureImportance ¶
func NewTrainedModelInferenceFeatureImportance() *TrainedModelInferenceFeatureImportance
NewTrainedModelInferenceFeatureImportance returns a TrainedModelInferenceFeatureImportance.
type TrainedModelInferenceStats ¶
type TrainedModelInferenceStats struct { // CacheMissCount The number of times the model was loaded for inference and was not retrieved // from the cache. // If this number is close to the `inference_count`, the cache is not being // appropriately used. // This can be solved by increasing the cache size or its time-to-live (TTL). // Refer to general machine learning settings for the appropriate settings. CacheMissCount int `json:"cache_miss_count"` // FailureCount The number of failures when using the model for inference. FailureCount int `json:"failure_count"` // InferenceCount The total number of times the model has been called for inference. // This is across all inference contexts, including all pipelines. InferenceCount int `json:"inference_count"` // MissingAllFieldsCount The number of inference calls where all the training features for the model // were missing. MissingAllFieldsCount int `json:"missing_all_fields_count"` // Timestamp The time when the statistics were last updated. Timestamp DateTime `json:"timestamp"` }
TrainedModelInferenceStats type.
func NewTrainedModelInferenceStats ¶
func NewTrainedModelInferenceStats() *TrainedModelInferenceStats
NewTrainedModelInferenceStats returns a TrainedModelInferenceStats.
type TrainedModelLocation ¶
type TrainedModelLocation struct {
Index TrainedModelLocationIndex `json:"index"`
}
TrainedModelLocation type.
func NewTrainedModelLocation ¶
func NewTrainedModelLocation() *TrainedModelLocation
NewTrainedModelLocation returns a TrainedModelLocation.
type TrainedModelLocationIndex ¶
type TrainedModelLocationIndex struct {
Name string `json:"name"`
}
TrainedModelLocationIndex type.
func NewTrainedModelLocationIndex ¶
func NewTrainedModelLocationIndex() *TrainedModelLocationIndex
NewTrainedModelLocationIndex returns a TrainedModelLocationIndex.
type TrainedModelSizeStats ¶
type TrainedModelSizeStats struct { // ModelSizeBytes The size of the model in bytes. ModelSizeBytes ByteSize `json:"model_size_bytes"` // RequiredNativeMemoryBytes The amount of memory required to load the model in bytes. RequiredNativeMemoryBytes int `json:"required_native_memory_bytes"` }
TrainedModelSizeStats type.
func NewTrainedModelSizeStats ¶
func NewTrainedModelSizeStats() *TrainedModelSizeStats
NewTrainedModelSizeStats returns a TrainedModelSizeStats.
type TrainedModelStats ¶
type TrainedModelStats struct { // DeploymentStats A collection of deployment stats, which is present when the models are // deployed. DeploymentStats *TrainedModelDeploymentStats `json:"deployment_stats,omitempty"` // InferenceStats A collection of inference stats fields. InferenceStats *TrainedModelInferenceStats `json:"inference_stats,omitempty"` // Ingest A collection of ingest stats for the model across all nodes. // The values are summations of the individual node statistics. // The format matches the ingest section in the nodes stats API. Ingest map[string]json.RawMessage `json:"ingest,omitempty"` // ModelId The unique identifier of the trained model. ModelId string `json:"model_id"` // ModelSizeStats A collection of model size stats. ModelSizeStats TrainedModelSizeStats `json:"model_size_stats"` // PipelineCount The number of ingest pipelines that currently refer to the model. PipelineCount int `json:"pipeline_count"` }
TrainedModelStats type.
func NewTrainedModelStats ¶
func NewTrainedModelStats() *TrainedModelStats
NewTrainedModelStats returns a TrainedModelStats.
type TrainedModelTree ¶
type TrainedModelTree struct { ClassificationLabels []string `json:"classification_labels,omitempty"` FeatureNames []string `json:"feature_names"` TargetType *string `json:"target_type,omitempty"` TreeStructure []TrainedModelTreeNode `json:"tree_structure"` }
TrainedModelTree type.
func NewTrainedModelTree ¶
func NewTrainedModelTree() *TrainedModelTree
NewTrainedModelTree returns a TrainedModelTree.
type TrainedModelTreeNode ¶
type TrainedModelTreeNode struct { DecisionType *string `json:"decision_type,omitempty"` DefaultLeft *bool `json:"default_left,omitempty"` LeafValue *Float64 `json:"leaf_value,omitempty"` LeftChild *int `json:"left_child,omitempty"` NodeIndex int `json:"node_index"` RightChild *int `json:"right_child,omitempty"` SplitFeature *int `json:"split_feature,omitempty"` SplitGain *int `json:"split_gain,omitempty"` Threshold *Float64 `json:"threshold,omitempty"` }
TrainedModelTreeNode type.
func NewTrainedModelTreeNode ¶
func NewTrainedModelTreeNode() *TrainedModelTreeNode
NewTrainedModelTreeNode returns a TrainedModelTreeNode.
type TrainedModelsRecord ¶
type TrainedModelsRecord struct { // CreateTime The time the model was created CreateTime DateTime `json:"create_time,omitempty"` // CreatedBy who created the model CreatedBy *string `json:"created_by,omitempty"` // DataFrameAnalysis The analysis used by the data frame to build the model DataFrameAnalysis *string `json:"data_frame.analysis,omitempty"` // DataFrameCreateTime The time the data frame analytics config was created DataFrameCreateTime *string `json:"data_frame.create_time,omitempty"` // DataFrameId The data frame analytics config id that created the model (if still // available) DataFrameId *string `json:"data_frame.id,omitempty"` // DataFrameSourceIndex The source index used to train in the data frame analysis DataFrameSourceIndex *string `json:"data_frame.source_index,omitempty"` // Description The model description Description *string `json:"description,omitempty"` // HeapSize the estimated heap size to keep the model in memory HeapSize ByteSize `json:"heap_size,omitempty"` // Id the trained model id Id *string `json:"id,omitempty"` // IngestCount The total number of docs processed by the model IngestCount *string `json:"ingest.count,omitempty"` // IngestCurrent The total documents currently being handled by the model IngestCurrent *string `json:"ingest.current,omitempty"` // IngestFailed The total count of failed ingest attempts with this model IngestFailed *string `json:"ingest.failed,omitempty"` // IngestPipelines The number of pipelines referencing the model IngestPipelines *string `json:"ingest.pipelines,omitempty"` // IngestTime The total time spent processing docs with this model IngestTime *string `json:"ingest.time,omitempty"` // License The license level of the model License *string `json:"license,omitempty"` // Operations the estimated number of operations to use the model Operations *string `json:"operations,omitempty"` Type *string `json:"type,omitempty"` // Version The version of Elasticsearch when the model was created Version *string `json:"version,omitempty"` }
TrainedModelsRecord type.
func NewTrainedModelsRecord ¶
func NewTrainedModelsRecord() *TrainedModelsRecord
NewTrainedModelsRecord returns a TrainedModelsRecord.
type TransformAuthorization ¶
type TransformAuthorization struct { // ApiKey If an API key was used for the most recent update to the transform, its name // and identifier are listed in the response. ApiKey *ApiKeyAuthorization `json:"api_key,omitempty"` // Roles If a user ID was used for the most recent update to the transform, its roles // at the time of the update are listed in the response. Roles []string `json:"roles,omitempty"` // ServiceAccount If a service account was used for the most recent update to the transform, // the account name is listed in the response. ServiceAccount *string `json:"service_account,omitempty"` }
TransformAuthorization type.
func NewTransformAuthorization ¶
func NewTransformAuthorization() *TransformAuthorization
NewTransformAuthorization returns a TransformAuthorization.
type TransformContainer ¶
type TransformContainer struct { Chain []TransformContainer `json:"chain,omitempty"` Script *ScriptTransform `json:"script,omitempty"` Search *SearchTransform `json:"search,omitempty"` }
TransformContainer type.
func NewTransformContainer ¶
func NewTransformContainer() *TransformContainer
NewTransformContainer returns a TransformContainer.
type TransformDestination ¶
type TransformDestination struct { // Index The destination index for the transform. The mappings of the destination // index are deduced based on the source // fields when possible. If alternate mappings are required, use the create // index API prior to starting the // transform. Index *string `json:"index,omitempty"` // Pipeline The unique identifier for an ingest pipeline. Pipeline *string `json:"pipeline,omitempty"` }
TransformDestination type.
func NewTransformDestination ¶
func NewTransformDestination() *TransformDestination
NewTransformDestination returns a TransformDestination.
type TransformIndexerStats ¶
type TransformIndexerStats struct { DeleteTimeInMs *int64 `json:"delete_time_in_ms,omitempty"` DocumentsDeleted *int64 `json:"documents_deleted,omitempty"` DocumentsIndexed int64 `json:"documents_indexed"` DocumentsProcessed int64 `json:"documents_processed"` ExponentialAvgCheckpointDurationMs Float64 `json:"exponential_avg_checkpoint_duration_ms"` ExponentialAvgDocumentsIndexed Float64 `json:"exponential_avg_documents_indexed"` ExponentialAvgDocumentsProcessed Float64 `json:"exponential_avg_documents_processed"` IndexFailures int64 `json:"index_failures"` IndexTimeInMs int64 `json:"index_time_in_ms"` IndexTotal int64 `json:"index_total"` PagesProcessed int64 `json:"pages_processed"` ProcessingTimeInMs int64 `json:"processing_time_in_ms"` ProcessingTotal int64 `json:"processing_total"` SearchFailures int64 `json:"search_failures"` SearchTimeInMs int64 `json:"search_time_in_ms"` SearchTotal int64 `json:"search_total"` TriggerCount int64 `json:"trigger_count"` }
TransformIndexerStats type.
func NewTransformIndexerStats ¶
func NewTransformIndexerStats() *TransformIndexerStats
NewTransformIndexerStats returns a TransformIndexerStats.
type TransformProgress ¶
type TransformProgress struct { DocsIndexed int64 `json:"docs_indexed"` DocsProcessed int64 `json:"docs_processed"` DocsRemaining int64 `json:"docs_remaining"` PercentComplete Float64 `json:"percent_complete"` TotalDocs int64 `json:"total_docs"` }
TransformProgress type.
func NewTransformProgress ¶
func NewTransformProgress() *TransformProgress
NewTransformProgress returns a TransformProgress.
type TransformSource ¶
type TransformSource struct { // Index The source indices for the transform. It can be a single index, an index // pattern (for example, `"my-index-*""`), an // array of indices (for example, `["my-index-000001", "my-index-000002"]`), or // an array of index patterns (for // example, `["my-index-*", "my-other-index-*"]`. For remote indices use the // syntax `"remote_name:index_name"`. If // any indices are in remote clusters then the master node and at least one // transform node must have the `remote_cluster_client` node role. Index []string `json:"index"` // Query A query clause that retrieves a subset of data from the source index. Query *Query `json:"query,omitempty"` // RuntimeMappings Definitions of search-time runtime fields that can be used by the transform. // For search runtime fields all data // nodes, including remote nodes, must be 7.12 or later. RuntimeMappings map[string]RuntimeField `json:"runtime_mappings,omitempty"` }
TransformSource type.
func NewTransformSource ¶
func NewTransformSource() *TransformSource
NewTransformSource returns a TransformSource.
type TransformStats ¶
type TransformStats struct { Checkpointing Checkpointing `json:"checkpointing"` Health *TransformStatsHealth `json:"health,omitempty"` Id string `json:"id"` Node *NodeAttributes `json:"node,omitempty"` Reason *string `json:"reason,omitempty"` State string `json:"state"` Stats TransformIndexerStats `json:"stats"` }
TransformStats type.
func NewTransformStats ¶
func NewTransformStats() *TransformStats
NewTransformStats returns a TransformStats.
type TransformStatsHealth ¶
type TransformStatsHealth struct {
Status healthstatus.HealthStatus `json:"status"`
}
TransformStatsHealth type.
func NewTransformStatsHealth ¶
func NewTransformStatsHealth() *TransformStatsHealth
NewTransformStatsHealth returns a TransformStatsHealth.
type TransformSummary ¶
type TransformSummary struct { // Authorization The security privileges that the transform uses to run its queries. If // Elastic Stack security features were disabled at the time of the most recent // update to the transform, this property is omitted. Authorization *TransformAuthorization `json:"authorization,omitempty"` // CreateTime The time the transform was created. CreateTime *int64 `json:"create_time,omitempty"` // Description Free text description of the transform. Description *string `json:"description,omitempty"` // Dest The destination for the transform. Dest ReindexDestination `json:"dest"` Frequency Duration `json:"frequency,omitempty"` Id string `json:"id"` Latest *Latest `json:"latest,omitempty"` Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` // Pivot The pivot method transforms the data by aggregating and grouping it. Pivot *Pivot `json:"pivot,omitempty"` RetentionPolicy *RetentionPolicyContainer `json:"retention_policy,omitempty"` // Settings Defines optional transform settings. Settings *Settings `json:"settings,omitempty"` // Source The source of the data for the transform. Source TransformSource `json:"source"` // Sync Defines the properties transforms require to run continuously. Sync *SyncContainer `json:"sync,omitempty"` // Version The version of Elasticsearch that existed on the node when the transform was // created. Version *string `json:"version,omitempty"` }
TransformSummary type.
func NewTransformSummary ¶
func NewTransformSummary() *TransformSummary
NewTransformSummary returns a TransformSummary.
type TransformsRecord ¶
type TransformsRecord struct { // ChangesLastDetectionTime changes last detected time ChangesLastDetectionTime string `json:"changes_last_detection_time,omitempty"` // Checkpoint checkpoint Checkpoint *string `json:"checkpoint,omitempty"` // CheckpointDurationTimeExpAvg exponential average checkpoint processing time (milliseconds) CheckpointDurationTimeExpAvg *string `json:"checkpoint_duration_time_exp_avg,omitempty"` // CheckpointProgress progress of the checkpoint CheckpointProgress string `json:"checkpoint_progress,omitempty"` // CreateTime transform creation time CreateTime *string `json:"create_time,omitempty"` // DeleteTime total time spent deleting documents DeleteTime *string `json:"delete_time,omitempty"` // Description description Description *string `json:"description,omitempty"` // DestIndex destination index DestIndex *string `json:"dest_index,omitempty"` // DocsPerSecond docs per second DocsPerSecond *string `json:"docs_per_second,omitempty"` // DocumentsDeleted the number of documents deleted from the destination index DocumentsDeleted *string `json:"documents_deleted,omitempty"` // DocumentsIndexed the number of documents written to the destination index DocumentsIndexed *string `json:"documents_indexed,omitempty"` // DocumentsProcessed the number of documents read from source indices and processed DocumentsProcessed *string `json:"documents_processed,omitempty"` // Frequency frequency of transform Frequency *string `json:"frequency,omitempty"` // Id the id Id *string `json:"id,omitempty"` // IndexFailure total number of index failures IndexFailure *string `json:"index_failure,omitempty"` // IndexTime total time spent indexing documents IndexTime *string `json:"index_time,omitempty"` // IndexTotal total number of index phases done by the transform IndexTotal *string `json:"index_total,omitempty"` // IndexedDocumentsExpAvg exponential average number of documents indexed IndexedDocumentsExpAvg *string `json:"indexed_documents_exp_avg,omitempty"` // LastSearchTime last time transform searched for updates LastSearchTime string `json:"last_search_time,omitempty"` // MaxPageSearchSize max page search size MaxPageSearchSize *string `json:"max_page_search_size,omitempty"` // PagesProcessed the number of pages processed PagesProcessed *string `json:"pages_processed,omitempty"` // Pipeline transform pipeline Pipeline *string `json:"pipeline,omitempty"` // ProcessedDocumentsExpAvg exponential average number of documents processed ProcessedDocumentsExpAvg *string `json:"processed_documents_exp_avg,omitempty"` // ProcessingTime the total time spent processing documents ProcessingTime *string `json:"processing_time,omitempty"` // Reason reason for the current state Reason *string `json:"reason,omitempty"` // SearchFailure total number of search failures SearchFailure *string `json:"search_failure,omitempty"` // SearchTime total search time SearchTime *string `json:"search_time,omitempty"` // SearchTotal total number of search phases SearchTotal *string `json:"search_total,omitempty"` // SourceIndex source index SourceIndex *string `json:"source_index,omitempty"` // State transform state State *string `json:"state,omitempty"` // TransformType batch or continuous transform TransformType *string `json:"transform_type,omitempty"` // TriggerCount the number of times the transform has been triggered TriggerCount *string `json:"trigger_count,omitempty"` // Version the version of Elasticsearch when the transform was created Version *string `json:"version,omitempty"` }
TransformsRecord type.
func NewTransformsRecord ¶
func NewTransformsRecord() *TransformsRecord
NewTransformsRecord returns a TransformsRecord.
type TransientMetadataConfig ¶
type TransientMetadataConfig struct {
Enabled bool `json:"enabled"`
}
TransientMetadataConfig type.
func NewTransientMetadataConfig ¶
func NewTransientMetadataConfig() *TransientMetadataConfig
NewTransientMetadataConfig returns a TransientMetadataConfig.
type Translog ¶
type Translog struct { // Durability Whether or not to `fsync` and commit the translog after every index, delete, // update, or bulk request. Durability *translogdurability.TranslogDurability `json:"durability,omitempty"` // FlushThresholdSize The translog stores all operations that are not yet safely persisted in // Lucene (i.e., are not // part of a Lucene commit point). Although these operations are available for // reads, they will need // to be replayed if the shard was stopped and had to be recovered. This setting // controls the // maximum total size of these operations, to prevent recoveries from taking too // long. Once the // maximum size has been reached a flush will happen, generating a new Lucene // commit point. FlushThresholdSize ByteSize `json:"flush_threshold_size,omitempty"` Retention *TranslogRetention `json:"retention,omitempty"` // SyncInterval How often the translog is fsynced to disk and committed, regardless of write // operations. // Values less than 100ms are not allowed. SyncInterval Duration `json:"sync_interval,omitempty"` }
Translog type.
type TranslogRetention ¶
type TranslogRetention struct { // Age This controls the maximum duration for which translog files are kept by each // shard. Keeping more // translog files increases the chance of performing an operation based sync // when recovering replicas. If // the translog files are not sufficient, replica recovery will fall back to a // file based sync. This setting // is ignored, and should not be set, if soft deletes are enabled. Soft deletes // are enabled by default in // indices created in Elasticsearch versions 7.0.0 and later. Age Duration `json:"age,omitempty"` // Size This controls the total size of translog files to keep for each shard. // Keeping more translog files increases // the chance of performing an operation based sync when recovering a replica. // If the translog files are not // sufficient, replica recovery will fall back to a file based sync. This // setting is ignored, and should not be // set, if soft deletes are enabled. Soft deletes are enabled by default in // indices created in Elasticsearch // versions 7.0.0 and later. Size ByteSize `json:"size,omitempty"` }
TranslogRetention type.
func NewTranslogRetention ¶
func NewTranslogRetention() *TranslogRetention
NewTranslogRetention returns a TranslogRetention.
type TranslogStats ¶
type TranslogStats struct { EarliestLastModifiedAge int64 `json:"earliest_last_modified_age"` Operations int64 `json:"operations"` Size *string `json:"size,omitempty"` SizeInBytes int64 `json:"size_in_bytes"` UncommittedOperations int `json:"uncommitted_operations"` UncommittedSize *string `json:"uncommitted_size,omitempty"` UncommittedSizeInBytes int64 `json:"uncommitted_size_in_bytes"` }
TranslogStats type.
func NewTranslogStats ¶
func NewTranslogStats() *TranslogStats
NewTranslogStats returns a TranslogStats.
type TranslogStatus ¶
type TranslogStatus struct { Percent Percentage `json:"percent"` Recovered int64 `json:"recovered"` Total int64 `json:"total"` TotalOnStart int64 `json:"total_on_start"` TotalTime Duration `json:"total_time,omitempty"` TotalTimeInMillis int64 `json:"total_time_in_millis"` }
TranslogStatus type.
func NewTranslogStatus ¶
func NewTranslogStatus() *TranslogStatus
NewTranslogStatus returns a TranslogStatus.
type Transport ¶
type Transport struct { InboundHandlingTimeHistogram []TransportHistogram `json:"inbound_handling_time_histogram,omitempty"` OutboundHandlingTimeHistogram []TransportHistogram `json:"outbound_handling_time_histogram,omitempty"` RxCount *int64 `json:"rx_count,omitempty"` RxSize *string `json:"rx_size,omitempty"` RxSizeInBytes *int64 `json:"rx_size_in_bytes,omitempty"` ServerOpen *int `json:"server_open,omitempty"` TotalOutboundConnections *int64 `json:"total_outbound_connections,omitempty"` TxCount *int64 `json:"tx_count,omitempty"` TxSize *string `json:"tx_size,omitempty"` TxSizeInBytes *int64 `json:"tx_size_in_bytes,omitempty"` }
Transport type.
type TransportHistogram ¶
type TransportHistogram struct { Count *int64 `json:"count,omitempty"` GeMillis *int64 `json:"ge_millis,omitempty"` LtMillis *int64 `json:"lt_millis,omitempty"` }
TransportHistogram type.
func NewTransportHistogram ¶
func NewTransportHistogram() *TransportHistogram
NewTransportHistogram returns a TransportHistogram.
type TriggerContainer ¶
type TriggerContainer struct {
Schedule *ScheduleContainer `json:"schedule,omitempty"`
}
TriggerContainer type.
func NewTriggerContainer ¶
func NewTriggerContainer() *TriggerContainer
NewTriggerContainer returns a TriggerContainer.
type TriggerEventContainer ¶
type TriggerEventContainer struct {
Schedule *ScheduleTriggerEvent `json:"schedule,omitempty"`
}
TriggerEventContainer type.
func NewTriggerEventContainer ¶
func NewTriggerEventContainer() *TriggerEventContainer
NewTriggerEventContainer returns a TriggerEventContainer.
type TriggerEventResult ¶
type TriggerEventResult struct { Manual TriggerEventContainer `json:"manual"` TriggeredTime DateTime `json:"triggered_time"` Type string `json:"type"` }
TriggerEventResult type.
func NewTriggerEventResult ¶
func NewTriggerEventResult() *TriggerEventResult
NewTriggerEventResult returns a TriggerEventResult.
type TrimProcessor ¶
type TrimProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
TrimProcessor type.
func NewTrimProcessor ¶
func NewTrimProcessor() *TrimProcessor
NewTrimProcessor returns a TrimProcessor.
type TrimTokenFilter ¶
type TrimTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
TrimTokenFilter type.
func NewTrimTokenFilter ¶
func NewTrimTokenFilter() *TrimTokenFilter
NewTrimTokenFilter returns a TrimTokenFilter.
type TruncateTokenFilter ¶
type TruncateTokenFilter struct { Length *int `json:"length,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
TruncateTokenFilter type.
func NewTruncateTokenFilter ¶
func NewTruncateTokenFilter() *TruncateTokenFilter
NewTruncateTokenFilter returns a TruncateTokenFilter.
type TypeFieldMappings ¶
type TypeFieldMappings struct {
Mappings map[string]FieldMapping `json:"mappings"`
}
TypeFieldMappings type.
func NewTypeFieldMappings ¶
func NewTypeFieldMappings() *TypeFieldMappings
NewTypeFieldMappings returns a TypeFieldMappings.
type TypeMapping ¶
type TypeMapping struct { AllField *AllField `json:"all_field,omitempty"` DataStreamTimestamp_ *DataStreamTimestamp `json:"_data_stream_timestamp,omitempty"` DateDetection *bool `json:"date_detection,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` DynamicDateFormats []string `json:"dynamic_date_formats,omitempty"` DynamicTemplates []map[string]DynamicTemplate `json:"dynamic_templates,omitempty"` Enabled *bool `json:"enabled,omitempty"` FieldNames_ *FieldNamesField `json:"_field_names,omitempty"` IndexField *IndexField `json:"index_field,omitempty"` Meta_ map[string]json.RawMessage `json:"_meta,omitempty"` NumericDetection *bool `json:"numeric_detection,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Routing_ *RoutingField `json:"_routing,omitempty"` Runtime map[string]RuntimeField `json:"runtime,omitempty"` Size_ *SizeField `json:"_size,omitempty"` Source_ *SourceField `json:"_source,omitempty"` }
TypeMapping type.
func (*TypeMapping) UnmarshalJSON ¶
func (s *TypeMapping) UnmarshalJSON(data []byte) error
type TypeQuery ¶
type TypeQuery struct { Boost *float32 `json:"boost,omitempty"` QueryName_ *string `json:"_name,omitempty"` Value string `json:"value"` }
TypeQuery type.
type UaxEmailUrlTokenizer ¶
type UaxEmailUrlTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
UaxEmailUrlTokenizer type.
func NewUaxEmailUrlTokenizer ¶
func NewUaxEmailUrlTokenizer() *UaxEmailUrlTokenizer
NewUaxEmailUrlTokenizer returns a UaxEmailUrlTokenizer.
type UnassignedInformation ¶
type UnassignedInformation struct { AllocationStatus *string `json:"allocation_status,omitempty"` At DateTime `json:"at"` Delayed *bool `json:"delayed,omitempty"` Details *string `json:"details,omitempty"` FailedAllocationAttempts *int `json:"failed_allocation_attempts,omitempty"` LastAllocationStatus *string `json:"last_allocation_status,omitempty"` Reason unassignedinformationreason.UnassignedInformationReason `json:"reason"` }
UnassignedInformation type.
func NewUnassignedInformation ¶
func NewUnassignedInformation() *UnassignedInformation
NewUnassignedInformation returns a UnassignedInformation.
type UniqueTokenFilter ¶
type UniqueTokenFilter struct { OnlyOnSamePosition *bool `json:"only_on_same_position,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
UniqueTokenFilter type.
func NewUniqueTokenFilter ¶
func NewUniqueTokenFilter() *UniqueTokenFilter
NewUniqueTokenFilter returns a UniqueTokenFilter.
type UnmappedRareTermsAggregate ¶
type UnmappedRareTermsAggregate struct { Buckets BucketsVoid `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
UnmappedRareTermsAggregate type.
func NewUnmappedRareTermsAggregate ¶
func NewUnmappedRareTermsAggregate() *UnmappedRareTermsAggregate
NewUnmappedRareTermsAggregate returns a UnmappedRareTermsAggregate.
func (*UnmappedRareTermsAggregate) UnmarshalJSON ¶
func (s *UnmappedRareTermsAggregate) UnmarshalJSON(data []byte) error
type UnmappedSamplerAggregate ¶
type UnmappedSamplerAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
UnmappedSamplerAggregate type.
func NewUnmappedSamplerAggregate ¶
func NewUnmappedSamplerAggregate() *UnmappedSamplerAggregate
NewUnmappedSamplerAggregate returns a UnmappedSamplerAggregate.
func (UnmappedSamplerAggregate) MarshalJSON ¶
func (s UnmappedSamplerAggregate) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*UnmappedSamplerAggregate) UnmarshalJSON ¶
func (s *UnmappedSamplerAggregate) UnmarshalJSON(data []byte) error
type UnmappedSignificantTermsAggregate ¶
type UnmappedSignificantTermsAggregate struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsVoid `json:"buckets"` DocCount *int64 `json:"doc_count,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
UnmappedSignificantTermsAggregate type.
func NewUnmappedSignificantTermsAggregate ¶
func NewUnmappedSignificantTermsAggregate() *UnmappedSignificantTermsAggregate
NewUnmappedSignificantTermsAggregate returns a UnmappedSignificantTermsAggregate.
func (*UnmappedSignificantTermsAggregate) UnmarshalJSON ¶
func (s *UnmappedSignificantTermsAggregate) UnmarshalJSON(data []byte) error
type UnmappedTermsAggregate ¶
type UnmappedTermsAggregate struct { Buckets BucketsVoid `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` SumOtherDocCount *int64 `json:"sum_other_doc_count,omitempty"` }
UnmappedTermsAggregate type.
func NewUnmappedTermsAggregate ¶
func NewUnmappedTermsAggregate() *UnmappedTermsAggregate
NewUnmappedTermsAggregate returns a UnmappedTermsAggregate.
func (*UnmappedTermsAggregate) UnmarshalJSON ¶
func (s *UnmappedTermsAggregate) UnmarshalJSON(data []byte) error
type UnratedDocument ¶
UnratedDocument type.
func NewUnratedDocument ¶
func NewUnratedDocument() *UnratedDocument
NewUnratedDocument returns a UnratedDocument.
type UnsignedLongNumberProperty ¶
type UnsignedLongNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` IgnoreMalformed *bool `json:"ignore_malformed,omitempty"` Index *bool `json:"index,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue uint64 `json:"null_value,omitempty"` OnScriptError *onscripterror.OnScriptError `json:"on_script_error,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Script Script `json:"script,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` // TimeSeriesDimension For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesDimension *bool `json:"time_series_dimension,omitempty"` // TimeSeriesMetric For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesMetric *timeseriesmetrictype.TimeSeriesMetricType `json:"time_series_metric,omitempty"` Type string `json:"type,omitempty"` }
UnsignedLongNumberProperty type.
func NewUnsignedLongNumberProperty ¶
func NewUnsignedLongNumberProperty() *UnsignedLongNumberProperty
NewUnsignedLongNumberProperty returns a UnsignedLongNumberProperty.
func (*UnsignedLongNumberProperty) UnmarshalJSON ¶
func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error
type UpdateByQueryRethrottleNode ¶
type UpdateByQueryRethrottleNode struct { Attributes map[string]string `json:"attributes"` Host string `json:"host"` Ip string `json:"ip"` Name string `json:"name"` Roles []noderole.NodeRole `json:"roles,omitempty"` Tasks map[TaskId]TaskInfo `json:"tasks"` TransportAddress string `json:"transport_address"` }
UpdateByQueryRethrottleNode type.
func NewUpdateByQueryRethrottleNode ¶
func NewUpdateByQueryRethrottleNode() *UpdateByQueryRethrottleNode
NewUpdateByQueryRethrottleNode returns a UpdateByQueryRethrottleNode.
type UppercaseProcessor ¶
type UppercaseProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
UppercaseProcessor type.
func NewUppercaseProcessor ¶
func NewUppercaseProcessor() *UppercaseProcessor
NewUppercaseProcessor returns a UppercaseProcessor.
type UppercaseTokenFilter ¶
type UppercaseTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
UppercaseTokenFilter type.
func NewUppercaseTokenFilter ¶
func NewUppercaseTokenFilter() *UppercaseTokenFilter
NewUppercaseTokenFilter returns a UppercaseTokenFilter.
type UrlDecodeProcessor ¶
type UrlDecodeProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
UrlDecodeProcessor type.
func NewUrlDecodeProcessor ¶
func NewUrlDecodeProcessor() *UrlDecodeProcessor
NewUrlDecodeProcessor returns a UrlDecodeProcessor.
type UsageStatsIndex ¶
type UsageStatsIndex struct {
Shards []UsageStatsShards `json:"shards"`
}
UsageStatsIndex type.
func NewUsageStatsIndex ¶
func NewUsageStatsIndex() *UsageStatsIndex
NewUsageStatsIndex returns a UsageStatsIndex.
type UsageStatsShards ¶
type UsageStatsShards struct { Routing ShardRouting `json:"routing"` Stats IndicesShardsStats `json:"stats"` TrackingId string `json:"tracking_id"` TrackingStartedAtMillis int64 `json:"tracking_started_at_millis"` }
UsageStatsShards type.
func NewUsageStatsShards ¶
func NewUsageStatsShards() *UsageStatsShards
NewUsageStatsShards returns a UsageStatsShards.
type User ¶
type User struct { Email string `json:"email,omitempty"` Enabled bool `json:"enabled"` FullName string `json:"full_name,omitempty"` Metadata map[string]json.RawMessage `json:"metadata"` ProfileUid *string `json:"profile_uid,omitempty"` Roles []string `json:"roles"` Username string `json:"username"` }
User type.
type UserAgentProcessor ¶
type UserAgentProcessor struct { Description *string `json:"description,omitempty"` Field string `json:"field"` If *string `json:"if,omitempty"` IgnoreFailure *bool `json:"ignore_failure,omitempty"` IgnoreMissing *bool `json:"ignore_missing,omitempty"` OnFailure []ProcessorContainer `json:"on_failure,omitempty"` Options []useragentproperty.UserAgentProperty `json:"options,omitempty"` RegexFile *string `json:"regex_file,omitempty"` Tag *string `json:"tag,omitempty"` TargetField *string `json:"target_field,omitempty"` }
UserAgentProcessor type.
func NewUserAgentProcessor ¶
func NewUserAgentProcessor() *UserAgentProcessor
NewUserAgentProcessor returns a UserAgentProcessor.
type UserIndicesPrivileges ¶
type UserIndicesPrivileges struct { // AllowRestrictedIndices Set to `true` if using wildcard or regular expressions for patterns that // cover restricted indices. Implicitly, restricted indices have limited // privileges that can cause pattern tests to fail. If restricted indices are // explicitly included in the `names` list, Elasticsearch checks privileges // against these indices regardless of the value set for // `allow_restricted_indices`. AllowRestrictedIndices bool `json:"allow_restricted_indices"` // FieldSecurity The document fields that the owners of the role have read access to. FieldSecurity []FieldSecurity `json:"field_security,omitempty"` // Names A list of indices (or index name patterns) to which the permissions in this // entry apply. Names []string `json:"names"` // Privileges The index level privileges that owners of the role have on the specified // indices. Privileges []indexprivilege.IndexPrivilege `json:"privileges"` // Query Search queries that define the documents the user has access to. A document // within the specified indices must match these queries for it to be accessible // by the owners of the role. Query []IndicesPrivilegesQuery `json:"query,omitempty"` }
UserIndicesPrivileges type.
func NewUserIndicesPrivileges ¶
func NewUserIndicesPrivileges() *UserIndicesPrivileges
NewUserIndicesPrivileges returns a UserIndicesPrivileges.
type UserProfile ¶
type UserProfile struct { Data map[string]json.RawMessage `json:"data"` Enabled *bool `json:"enabled,omitempty"` Labels map[string]json.RawMessage `json:"labels"` Uid string `json:"uid"` User UserProfileUser `json:"user"` }
UserProfile type.
type UserProfileHitMetadata ¶
type UserProfileHitMetadata struct { PrimaryTerm_ int64 `json:"_primary_term"` SeqNo_ int64 `json:"_seq_no"` }
UserProfileHitMetadata type.
func NewUserProfileHitMetadata ¶
func NewUserProfileHitMetadata() *UserProfileHitMetadata
NewUserProfileHitMetadata returns a UserProfileHitMetadata.
type UserProfileUser ¶
type UserProfileUser struct { Email string `json:"email,omitempty"` FullName string `json:"full_name,omitempty"` RealmDomain *string `json:"realm_domain,omitempty"` RealmName string `json:"realm_name"` Roles []string `json:"roles"` Username string `json:"username"` }
UserProfileUser type.
func NewUserProfileUser ¶
func NewUserProfileUser() *UserProfileUser
NewUserProfileUser returns a UserProfileUser.
type UserProfileWithMetadata ¶
type UserProfileWithMetadata struct { Data map[string]json.RawMessage `json:"data"` Doc_ UserProfileHitMetadata `json:"_doc"` Enabled *bool `json:"enabled,omitempty"` Labels map[string]json.RawMessage `json:"labels"` LastSynchronized int64 `json:"last_synchronized"` Uid string `json:"uid"` User UserProfileUser `json:"user"` }
UserProfileWithMetadata type.
func NewUserProfileWithMetadata ¶
func NewUserProfileWithMetadata() *UserProfileWithMetadata
NewUserProfileWithMetadata returns a UserProfileWithMetadata.
type ValidationLoss ¶
type ValidationLoss struct { // FoldValues Validation loss values for every added decision tree during the forest // growing procedure. FoldValues []string `json:"fold_values"` // LossType The type of the loss metric. For example, binomial_logistic. LossType string `json:"loss_type"` }
ValidationLoss type.
func NewValidationLoss ¶
func NewValidationLoss() *ValidationLoss
NewValidationLoss returns a ValidationLoss.
type ValueCountAggregate ¶
type ValueCountAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. Value Float64 `json:"value,omitempty"` ValueAsString *string `json:"value_as_string,omitempty"` }
ValueCountAggregate type.
func NewValueCountAggregate ¶
func NewValueCountAggregate() *ValueCountAggregate
NewValueCountAggregate returns a ValueCountAggregate.
type ValueCountAggregation ¶
type ValueCountAggregation struct { Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` Missing Missing `json:"missing,omitempty"` Script Script `json:"script,omitempty"` }
ValueCountAggregation type.
func NewValueCountAggregation ¶
func NewValueCountAggregation() *ValueCountAggregation
NewValueCountAggregation returns a ValueCountAggregation.
type VariableWidthHistogramAggregate ¶
type VariableWidthHistogramAggregate struct { Buckets BucketsVariableWidthHistogramBucket `json:"buckets"` Meta map[string]json.RawMessage `json:"meta,omitempty"` }
VariableWidthHistogramAggregate type.
func NewVariableWidthHistogramAggregate ¶
func NewVariableWidthHistogramAggregate() *VariableWidthHistogramAggregate
NewVariableWidthHistogramAggregate returns a VariableWidthHistogramAggregate.
func (*VariableWidthHistogramAggregate) UnmarshalJSON ¶
func (s *VariableWidthHistogramAggregate) UnmarshalJSON(data []byte) error
type VariableWidthHistogramAggregation ¶
type VariableWidthHistogramAggregation struct { Buckets *int `json:"buckets,omitempty"` Field *string `json:"field,omitempty"` InitialBuffer *int `json:"initial_buffer,omitempty"` ShardSize *int `json:"shard_size,omitempty"` }
VariableWidthHistogramAggregation type.
func NewVariableWidthHistogramAggregation ¶
func NewVariableWidthHistogramAggregation() *VariableWidthHistogramAggregation
NewVariableWidthHistogramAggregation returns a VariableWidthHistogramAggregation.
type VariableWidthHistogramBucket ¶
type VariableWidthHistogramBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` Key Float64 `json:"key"` KeyAsString *string `json:"key_as_string,omitempty"` Max Float64 `json:"max"` MaxAsString *string `json:"max_as_string,omitempty"` Min Float64 `json:"min"` MinAsString *string `json:"min_as_string,omitempty"` }
VariableWidthHistogramBucket type.
func NewVariableWidthHistogramBucket ¶
func NewVariableWidthHistogramBucket() *VariableWidthHistogramBucket
NewVariableWidthHistogramBucket returns a VariableWidthHistogramBucket.
func (VariableWidthHistogramBucket) MarshalJSON ¶
func (s VariableWidthHistogramBucket) MarshalJSON() ([]byte, error)
MarhsalJSON overrides marshalling for types with additional properties
func (*VariableWidthHistogramBucket) UnmarshalJSON ¶
func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error
type Vector ¶
type Vector struct { Available bool `json:"available"` DenseVectorDimsAvgCount int `json:"dense_vector_dims_avg_count"` DenseVectorFieldsCount int `json:"dense_vector_fields_count"` Enabled bool `json:"enabled"` SparseVectorFieldsCount *int `json:"sparse_vector_fields_count,omitempty"` }
Vector type.
type VerifyIndex ¶
type VerifyIndex struct { CheckIndexTime Duration `json:"check_index_time,omitempty"` CheckIndexTimeInMillis int64 `json:"check_index_time_in_millis"` TotalTime Duration `json:"total_time,omitempty"` TotalTimeInMillis int64 `json:"total_time_in_millis"` }
VerifyIndex type.
type VersionProperty ¶
type VersionProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
VersionProperty type.
func NewVersionProperty ¶
func NewVersionProperty() *VersionProperty
NewVersionProperty returns a VersionProperty.
func (*VersionProperty) UnmarshalJSON ¶
func (s *VersionProperty) UnmarshalJSON(data []byte) error
type Vertex ¶
type Vertex struct { Depth int64 `json:"depth"` Field string `json:"field"` Term string `json:"term"` Weight Float64 `json:"weight"` }
Vertex type.
type VertexDefinition ¶
type VertexDefinition struct { Exclude []string `json:"exclude,omitempty"` Field string `json:"field"` Include []VertexInclude `json:"include,omitempty"` MinDocCount *int64 `json:"min_doc_count,omitempty"` ShardMinDocCount *int64 `json:"shard_min_doc_count,omitempty"` Size *int `json:"size,omitempty"` }
VertexDefinition type.
func NewVertexDefinition ¶
func NewVertexDefinition() *VertexDefinition
NewVertexDefinition returns a VertexDefinition.
type VertexInclude ¶
VertexInclude type.
func NewVertexInclude ¶
func NewVertexInclude() *VertexInclude
NewVertexInclude returns a VertexInclude.
type WaitForActiveShards ¶
type WaitForActiveShards interface{}
WaitForActiveShards holds the union for the following types:
int waitforactiveshardoptions.WaitForActiveShardOptions
type WarmerStats ¶
type WarmerStats struct { Current int64 `json:"current"` Total int64 `json:"total"` TotalTime Duration `json:"total_time,omitempty"` TotalTimeInMillis int64 `json:"total_time_in_millis"` }
WarmerStats type.
type Watch ¶
type Watch struct { Actions map[string]WatcherAction `json:"actions"` Condition WatcherCondition `json:"condition"` Input WatcherInput `json:"input"` Metadata map[string]json.RawMessage `json:"metadata,omitempty"` Status *WatchStatus `json:"status,omitempty"` ThrottlePeriod Duration `json:"throttle_period,omitempty"` ThrottlePeriodInMillis *int64 `json:"throttle_period_in_millis,omitempty"` Transform *TransformContainer `json:"transform,omitempty"` Trigger TriggerContainer `json:"trigger"` }
Watch type.
type WatchRecord ¶
type WatchRecord struct { Condition WatcherCondition `json:"condition"` Input WatcherInput `json:"input"` Messages []string `json:"messages"` Metadata map[string]json.RawMessage `json:"metadata,omitempty"` Node string `json:"node"` Result ExecutionResult `json:"result"` State executionstatus.ExecutionStatus `json:"state"` Status *WatchStatus `json:"status,omitempty"` TriggerEvent TriggerEventResult `json:"trigger_event"` User string `json:"user"` WatchId string `json:"watch_id"` }
WatchRecord type.
type WatchRecordQueuedStats ¶
type WatchRecordQueuedStats struct {
ExecutionTime DateTime `json:"execution_time"`
}
WatchRecordQueuedStats type.
func NewWatchRecordQueuedStats ¶
func NewWatchRecordQueuedStats() *WatchRecordQueuedStats
NewWatchRecordQueuedStats returns a WatchRecordQueuedStats.
type WatchRecordStats ¶
type WatchRecordStats struct { ExecutedActions []string `json:"executed_actions,omitempty"` ExecutionPhase executionphase.ExecutionPhase `json:"execution_phase"` ExecutionTime DateTime `json:"execution_time"` TriggeredTime DateTime `json:"triggered_time"` WatchId string `json:"watch_id"` WatchRecordId string `json:"watch_record_id"` }
WatchRecordStats type.
func NewWatchRecordStats ¶
func NewWatchRecordStats() *WatchRecordStats
NewWatchRecordStats returns a WatchRecordStats.
type WatchStatus ¶
type WatchStatus struct { Actions WatcherStatusActions `json:"actions"` ExecutionState *string `json:"execution_state,omitempty"` LastChecked DateTime `json:"last_checked,omitempty"` LastMetCondition DateTime `json:"last_met_condition,omitempty"` State ActivationState `json:"state"` Version int64 `json:"version"` }
WatchStatus type.
type Watcher ¶
type Watcher struct { Available bool `json:"available"` Count Counter `json:"count"` Enabled bool `json:"enabled"` Execution WatcherActions `json:"execution"` Watch WatcherWatch `json:"watch"` }
Watcher type.
type WatcherAction ¶
type WatcherAction struct { ActionType *actiontype.ActionType `json:"action_type,omitempty"` Condition *WatcherCondition `json:"condition,omitempty"` Email *EmailAction `json:"email,omitempty"` Foreach *string `json:"foreach,omitempty"` Index *IndexAction `json:"index,omitempty"` Logging *LoggingAction `json:"logging,omitempty"` MaxIterations *int `json:"max_iterations,omitempty"` Name *string `json:"name,omitempty"` Pagerduty *PagerDutyAction `json:"pagerduty,omitempty"` Slack *SlackAction `json:"slack,omitempty"` ThrottlePeriod Duration `json:"throttle_period,omitempty"` ThrottlePeriodInMillis *int64 `json:"throttle_period_in_millis,omitempty"` Transform *TransformContainer `json:"transform,omitempty"` Webhook *WebhookAction `json:"webhook,omitempty"` }
WatcherAction type.
func NewWatcherAction ¶
func NewWatcherAction() *WatcherAction
NewWatcherAction returns a WatcherAction.
type WatcherActionTotals ¶
type WatcherActionTotals struct { Total Duration `json:"total"` TotalTimeInMs int64 `json:"total_time_in_ms"` }
WatcherActionTotals type.
func NewWatcherActionTotals ¶
func NewWatcherActionTotals() *WatcherActionTotals
NewWatcherActionTotals returns a WatcherActionTotals.
type WatcherActions ¶
type WatcherActions struct {
Actions map[string]WatcherActionTotals `json:"actions"`
}
WatcherActions type.
func NewWatcherActions ¶
func NewWatcherActions() *WatcherActions
NewWatcherActions returns a WatcherActions.
type WatcherCondition ¶
type WatcherCondition struct { Always *AlwaysCondition `json:"always,omitempty"` ArrayCompare map[string]ArrayCompareCondition `json:"array_compare,omitempty"` Compare map[string]map[conditionop.ConditionOp]FieldValue `json:"compare,omitempty"` Never *NeverCondition `json:"never,omitempty"` Script *ScriptCondition `json:"script,omitempty"` }
WatcherCondition type.
func NewWatcherCondition ¶
func NewWatcherCondition() *WatcherCondition
NewWatcherCondition returns a WatcherCondition.
type WatcherInput ¶
type WatcherInput struct { Chain *ChainInput `json:"chain,omitempty"` Http *HttpInput `json:"http,omitempty"` Search *SearchInput `json:"search,omitempty"` Simple map[string]json.RawMessage `json:"simple,omitempty"` }
WatcherInput type.
func NewWatcherInput ¶
func NewWatcherInput() *WatcherInput
NewWatcherInput returns a WatcherInput.
type WatcherNodeStats ¶
type WatcherNodeStats struct { CurrentWatches []WatchRecordStats `json:"current_watches,omitempty"` ExecutionThreadPool ExecutionThreadPool `json:"execution_thread_pool"` NodeId string `json:"node_id"` QueuedWatches []WatchRecordQueuedStats `json:"queued_watches,omitempty"` WatchCount int64 `json:"watch_count"` WatcherState watcherstate.WatcherState `json:"watcher_state"` }
WatcherNodeStats type.
func NewWatcherNodeStats ¶
func NewWatcherNodeStats() *WatcherNodeStats
NewWatcherNodeStats returns a WatcherNodeStats.
type WatcherStatusActions ¶
type WatcherStatusActions map[string]ActionStatus
WatcherStatusActions type alias.
type WatcherWatch ¶
type WatcherWatch struct { Action map[string]Counter `json:"action,omitempty"` Condition map[string]Counter `json:"condition,omitempty"` Input map[string]Counter `json:"input"` Trigger WatcherWatchTrigger `json:"trigger"` }
WatcherWatch type.
func NewWatcherWatch ¶
func NewWatcherWatch() *WatcherWatch
NewWatcherWatch returns a WatcherWatch.
type WatcherWatchTrigger ¶
type WatcherWatchTrigger struct { All_ Counter `json:"_all"` Schedule *WatcherWatchTriggerSchedule `json:"schedule,omitempty"` }
WatcherWatchTrigger type.
func NewWatcherWatchTrigger ¶
func NewWatcherWatchTrigger() *WatcherWatchTrigger
NewWatcherWatchTrigger returns a WatcherWatchTrigger.
type WatcherWatchTriggerSchedule ¶
type WatcherWatchTriggerSchedule struct { Active int64 `json:"active"` All_ Counter `json:"_all"` Cron Counter `json:"cron"` Total int64 `json:"total"` }
WatcherWatchTriggerSchedule type.
func NewWatcherWatchTriggerSchedule ¶
func NewWatcherWatchTriggerSchedule() *WatcherWatchTriggerSchedule
NewWatcherWatchTriggerSchedule returns a WatcherWatchTriggerSchedule.
type WebhookAction ¶
type WebhookAction struct { Auth *HttpInputAuthentication `json:"auth,omitempty"` Body *string `json:"body,omitempty"` ConnectionTimeout Duration `json:"connection_timeout,omitempty"` Headers map[string]string `json:"headers,omitempty"` Host *string `json:"host,omitempty"` Method *httpinputmethod.HttpInputMethod `json:"method,omitempty"` Params map[string]string `json:"params,omitempty"` Path *string `json:"path,omitempty"` Port *uint `json:"port,omitempty"` Proxy *HttpInputProxy `json:"proxy,omitempty"` ReadTimeout Duration `json:"read_timeout,omitempty"` Scheme *connectionscheme.ConnectionScheme `json:"scheme,omitempty"` Url *string `json:"url,omitempty"` }
WebhookAction type.
func NewWebhookAction ¶
func NewWebhookAction() *WebhookAction
NewWebhookAction returns a WebhookAction.
type WebhookResult ¶
type WebhookResult struct { Request HttpInputRequestResult `json:"request"` Response *HttpInputResponseResult `json:"response,omitempty"` }
WebhookResult type.
func NewWebhookResult ¶
func NewWebhookResult() *WebhookResult
NewWebhookResult returns a WebhookResult.
type WeightedAverageAggregation ¶
type WeightedAverageAggregation struct { Format *string `json:"format,omitempty"` Meta map[string]json.RawMessage `json:"meta,omitempty"` Name *string `json:"name,omitempty"` Value *WeightedAverageValue `json:"value,omitempty"` ValueType *valuetype.ValueType `json:"value_type,omitempty"` Weight *WeightedAverageValue `json:"weight,omitempty"` }
WeightedAverageAggregation type.
func NewWeightedAverageAggregation ¶
func NewWeightedAverageAggregation() *WeightedAverageAggregation
NewWeightedAverageAggregation returns a WeightedAverageAggregation.
type WeightedAverageValue ¶
type WeightedAverageValue struct { Field *string `json:"field,omitempty"` Missing *Float64 `json:"missing,omitempty"` Script Script `json:"script,omitempty"` }
WeightedAverageValue type.
func NewWeightedAverageValue ¶
func NewWeightedAverageValue() *WeightedAverageValue
NewWeightedAverageValue returns a WeightedAverageValue.
type WeightedAvgAggregate ¶
type WeightedAvgAggregate struct { Meta map[string]json.RawMessage `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. Value Float64 `json:"value,omitempty"` ValueAsString *string `json:"value_as_string,omitempty"` }
WeightedAvgAggregate type.
func NewWeightedAvgAggregate ¶
func NewWeightedAvgAggregate() *WeightedAvgAggregate
NewWeightedAvgAggregate returns a WeightedAvgAggregate.
type WhitespaceAnalyzer ¶
type WhitespaceAnalyzer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
WhitespaceAnalyzer type.
func NewWhitespaceAnalyzer ¶
func NewWhitespaceAnalyzer() *WhitespaceAnalyzer
NewWhitespaceAnalyzer returns a WhitespaceAnalyzer.
type WhitespaceTokenizer ¶
type WhitespaceTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` }
WhitespaceTokenizer type.
func NewWhitespaceTokenizer ¶
func NewWhitespaceTokenizer() *WhitespaceTokenizer
NewWhitespaceTokenizer returns a WhitespaceTokenizer.
type WildcardProperty ¶
type WildcardProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` // Meta Metadata about the field. Meta map[string]string `json:"meta,omitempty"` NullValue *string `json:"null_value,omitempty"` Properties map[string]Property `json:"properties,omitempty"` Similarity *string `json:"similarity,omitempty"` Store *bool `json:"store,omitempty"` Type string `json:"type,omitempty"` }
WildcardProperty type.
func NewWildcardProperty ¶
func NewWildcardProperty() *WildcardProperty
NewWildcardProperty returns a WildcardProperty.
func (*WildcardProperty) UnmarshalJSON ¶
func (s *WildcardProperty) UnmarshalJSON(data []byte) error
type WildcardQuery ¶
type WildcardQuery struct { Boost *float32 `json:"boost,omitempty"` // CaseInsensitive Allows case insensitive matching of the pattern with the indexed field values // when set to true. Default is false which means the case sensitivity of // matching depends on the underlying field’s mapping. CaseInsensitive *bool `json:"case_insensitive,omitempty"` QueryName_ *string `json:"_name,omitempty"` // Rewrite Method used to rewrite the query Rewrite *string `json:"rewrite,omitempty"` // Value Wildcard pattern for terms you wish to find in the provided field. Required, // when wildcard is not set. Value *string `json:"value,omitempty"` // Wildcard Wildcard pattern for terms you wish to find in the provided field. Required, // when value is not set. Wildcard *string `json:"wildcard,omitempty"` }
WildcardQuery type.
func NewWildcardQuery ¶
func NewWildcardQuery() *WildcardQuery
NewWildcardQuery returns a WildcardQuery.
type WktGeoBounds ¶
type WktGeoBounds struct {
Wkt string `json:"wkt"`
}
WktGeoBounds type.
func NewWktGeoBounds ¶
func NewWktGeoBounds() *WktGeoBounds
NewWktGeoBounds returns a WktGeoBounds.
type WordDelimiterGraphTokenFilter ¶
type WordDelimiterGraphTokenFilter struct { AdjustOffsets *bool `json:"adjust_offsets,omitempty"` CatenateAll *bool `json:"catenate_all,omitempty"` CatenateNumbers *bool `json:"catenate_numbers,omitempty"` CatenateWords *bool `json:"catenate_words,omitempty"` GenerateNumberParts *bool `json:"generate_number_parts,omitempty"` GenerateWordParts *bool `json:"generate_word_parts,omitempty"` IgnoreKeywords *bool `json:"ignore_keywords,omitempty"` PreserveOriginal *bool `json:"preserve_original,omitempty"` ProtectedWords []string `json:"protected_words,omitempty"` ProtectedWordsPath *string `json:"protected_words_path,omitempty"` SplitOnCaseChange *bool `json:"split_on_case_change,omitempty"` SplitOnNumerics *bool `json:"split_on_numerics,omitempty"` StemEnglishPossessive *bool `json:"stem_english_possessive,omitempty"` Type string `json:"type,omitempty"` TypeTable []string `json:"type_table,omitempty"` TypeTablePath *string `json:"type_table_path,omitempty"` Version *string `json:"version,omitempty"` }
WordDelimiterGraphTokenFilter type.
func NewWordDelimiterGraphTokenFilter ¶
func NewWordDelimiterGraphTokenFilter() *WordDelimiterGraphTokenFilter
NewWordDelimiterGraphTokenFilter returns a WordDelimiterGraphTokenFilter.
type WordDelimiterTokenFilter ¶
type WordDelimiterTokenFilter struct { CatenateAll *bool `json:"catenate_all,omitempty"` CatenateNumbers *bool `json:"catenate_numbers,omitempty"` CatenateWords *bool `json:"catenate_words,omitempty"` GenerateNumberParts *bool `json:"generate_number_parts,omitempty"` GenerateWordParts *bool `json:"generate_word_parts,omitempty"` PreserveOriginal *bool `json:"preserve_original,omitempty"` ProtectedWords []string `json:"protected_words,omitempty"` ProtectedWordsPath *string `json:"protected_words_path,omitempty"` SplitOnCaseChange *bool `json:"split_on_case_change,omitempty"` SplitOnNumerics *bool `json:"split_on_numerics,omitempty"` StemEnglishPossessive *bool `json:"stem_english_possessive,omitempty"` Type string `json:"type,omitempty"` TypeTable []string `json:"type_table,omitempty"` TypeTablePath *string `json:"type_table_path,omitempty"` Version *string `json:"version,omitempty"` }
WordDelimiterTokenFilter type.
func NewWordDelimiterTokenFilter ¶
func NewWordDelimiterTokenFilter() *WordDelimiterTokenFilter
NewWordDelimiterTokenFilter returns a WordDelimiterTokenFilter.
type WrapperQuery ¶
type WrapperQuery struct { Boost *float32 `json:"boost,omitempty"` // Query A base64 encoded query. The binary data format can be any of JSON, YAML, CBOR // or SMILE encodings Query string `json:"query"` QueryName_ *string `json:"_name,omitempty"` }
WrapperQuery type.
func NewWrapperQuery ¶
func NewWrapperQuery() *WrapperQuery
NewWrapperQuery returns a WrapperQuery.
type XpackDatafeed ¶
type XpackDatafeed struct {
Count int64 `json:"count"`
}
XpackDatafeed type.
func NewXpackDatafeed ¶
func NewXpackDatafeed() *XpackDatafeed
NewXpackDatafeed returns a XpackDatafeed.
type XpackFeature ¶
type XpackFeature struct { Available bool `json:"available"` Description *string `json:"description,omitempty"` Enabled bool `json:"enabled"` NativeCodeInfo *NativeCodeInformation `json:"native_code_info,omitempty"` }
XpackFeature type.
func NewXpackFeature ¶
func NewXpackFeature() *XpackFeature
NewXpackFeature returns a XpackFeature.
type XpackFeatures ¶
type XpackFeatures struct { AggregateMetric XpackFeature `json:"aggregate_metric"` Analytics XpackFeature `json:"analytics"` Archive XpackFeature `json:"archive"` Ccr XpackFeature `json:"ccr"` DataFrame *XpackFeature `json:"data_frame,omitempty"` DataScience *XpackFeature `json:"data_science,omitempty"` DataStreams XpackFeature `json:"data_streams"` DataTiers XpackFeature `json:"data_tiers"` Enrich XpackFeature `json:"enrich"` Eql XpackFeature `json:"eql"` Flattened *XpackFeature `json:"flattened,omitempty"` FrozenIndices XpackFeature `json:"frozen_indices"` Graph XpackFeature `json:"graph"` Ilm XpackFeature `json:"ilm"` Logstash XpackFeature `json:"logstash"` Ml XpackFeature `json:"ml"` Monitoring XpackFeature `json:"monitoring"` Rollup XpackFeature `json:"rollup"` RuntimeFields *XpackFeature `json:"runtime_fields,omitempty"` SearchableSnapshots XpackFeature `json:"searchable_snapshots"` Security XpackFeature `json:"security"` Slm XpackFeature `json:"slm"` Spatial XpackFeature `json:"spatial"` Sql XpackFeature `json:"sql"` Transform XpackFeature `json:"transform"` Vectors *XpackFeature `json:"vectors,omitempty"` VotingOnly XpackFeature `json:"voting_only"` Watcher XpackFeature `json:"watcher"` }
XpackFeatures type.
func NewXpackFeatures ¶
func NewXpackFeatures() *XpackFeatures
NewXpackFeatures returns a XpackFeatures.
type XpackQuery ¶
type XpackQuery struct { Count *int `json:"count,omitempty"` Failed *int `json:"failed,omitempty"` Paging *int `json:"paging,omitempty"` Total *int `json:"total,omitempty"` }
XpackQuery type.
type XpackRealm ¶
type XpackRealm struct { Available bool `json:"available"` Cache []RealmCache `json:"cache,omitempty"` Enabled bool `json:"enabled"` HasAuthorizationRealms []bool `json:"has_authorization_realms,omitempty"` HasDefaultUsernamePattern []bool `json:"has_default_username_pattern,omitempty"` HasTruststore []bool `json:"has_truststore,omitempty"` IsAuthenticationDelegated []bool `json:"is_authentication_delegated,omitempty"` Name []string `json:"name,omitempty"` Order []int64 `json:"order,omitempty"` Size []int64 `json:"size,omitempty"` }
XpackRealm type.
type XpackRoleMapping ¶
XpackRoleMapping type.
func NewXpackRoleMapping ¶
func NewXpackRoleMapping() *XpackRoleMapping
NewXpackRoleMapping returns a XpackRoleMapping.
type XpackRuntimeFieldTypes ¶
type XpackRuntimeFieldTypes struct { Available bool `json:"available"` Enabled bool `json:"enabled"` FieldTypes []RuntimeFieldsType `json:"field_types"` }
XpackRuntimeFieldTypes type.
func NewXpackRuntimeFieldTypes ¶
func NewXpackRuntimeFieldTypes() *XpackRuntimeFieldTypes
NewXpackRuntimeFieldTypes returns a XpackRuntimeFieldTypes.
type ZeroShotClassificationInferenceOptions ¶
type ZeroShotClassificationInferenceOptions struct { // ClassificationLabels The zero shot classification labels indicating entailment, neutral, and // contradiction // Must contain exactly and only entailment, neutral, and contradiction ClassificationLabels []string `json:"classification_labels"` // HypothesisTemplate Hypothesis template used when tokenizing labels for prediction HypothesisTemplate *string `json:"hypothesis_template,omitempty"` // Labels The labels to predict. Labels []string `json:"labels,omitempty"` // MultiLabel Indicates if more than one true label exists. MultiLabel *bool `json:"multi_label,omitempty"` // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` // Tokenization The tokenization options to update when inferring Tokenization *TokenizationConfigContainer `json:"tokenization,omitempty"` }
ZeroShotClassificationInferenceOptions type.
func NewZeroShotClassificationInferenceOptions ¶
func NewZeroShotClassificationInferenceOptions() *ZeroShotClassificationInferenceOptions
NewZeroShotClassificationInferenceOptions returns a ZeroShotClassificationInferenceOptions.
type ZeroShotClassificationInferenceUpdateOptions ¶
type ZeroShotClassificationInferenceUpdateOptions struct { // Labels The labels to predict. Labels []string `json:"labels"` // MultiLabel Update the configured multi label option. Indicates if more than one true // label exists. Defaults to the configured value. MultiLabel *bool `json:"multi_label,omitempty"` // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. ResultsField *string `json:"results_field,omitempty"` // Tokenization The tokenization options to update when inferring Tokenization *NlpTokenizationUpdateOptions `json:"tokenization,omitempty"` }
ZeroShotClassificationInferenceUpdateOptions type.
func NewZeroShotClassificationInferenceUpdateOptions ¶
func NewZeroShotClassificationInferenceUpdateOptions() *ZeroShotClassificationInferenceUpdateOptions
NewZeroShotClassificationInferenceUpdateOptions returns a ZeroShotClassificationInferenceUpdateOptions.
Source Files ¶
- acknowledgement.go
- acknowledgestate.go
- actionstatus.go
- activationstate.go
- activationstatus.go
- adaptiveselection.go
- addaction.go
- adjacencymatrixaggregate.go
- adjacencymatrixaggregation.go
- adjacencymatrixbucket.go
- aggregate.go
- aggregatemetricdoubleproperty.go
- aggregateorder.go
- aggregateoutput.go
- aggregation.go
- aggregationbreakdown.go
- aggregationprofile.go
- aggregationprofiledebug.go
- aggregationprofiledelegatedebugfilter.go
- aggregationrange.go
- aggregations.go
- alias.go
- aliasdefinition.go
- aliasesrecord.go
- allfield.go
- allocationdecision.go
- allocationrecord.go
- allocationstore.go
- alwayscondition.go
- analysisconfig.go
- analysisconfigread.go
- analysislimits.go
- analysismemorylimit.go
- analytics.go
- analyticsstatistics.go
- analyzedetail.go
- analyzer.go
- analyzerdetail.go
- analyzetoken.go
- anomaly.go
- anomalycause.go
- anomalydetectors.go
- anomalyexplanation.go
- apikey.go
- apikeyauthorization.go
- appendprocessor.go
- applicationglobaluserprivileges.go
- applicationprivileges.go
- applicationprivilegescheck.go
- applicationsprivileges.go
- archive.go
- arraycomparecondition.go
- arraycompareopparams.go
- arraypercentilesitem.go
- asciifoldingtokenfilter.go
- asyncsearch.go
- attachmentprocessor.go
- audit.go
- authenticateduser.go
- authenticatetoken.go
- authenticationprovider.go
- autodatehistogramaggregate.go
- autodatehistogramaggregation.go
- autofollowedcluster.go
- autofollowpattern.go
- autofollowpatternsummary.go
- autofollowstats.go
- autoscalingcapacity.go
- autoscalingdecider.go
- autoscalingdeciders.go
- autoscalingnode.go
- autoscalingpolicy.go
- autoscalingresources.go
- averageaggregation.go
- averagebucketaggregation.go
- avgaggregate.go
- base.go
- basenode.go
- binaryproperty.go
- booleanproperty.go
- boolquery.go
- boostingquery.go
- boxplotaggregate.go
- boxplotaggregation.go
- breaker.go
- bucketcorrelationaggregation.go
- bucketcorrelationfunction.go
- bucketcorrelationfunctioncountcorrelation.go
- bucketcorrelationfunctioncountcorrelationindicator.go
- bucketinfluencer.go
- bucketksaggregation.go
- bucketmetricvalueaggregate.go
- bucketpathaggregation.go
- bucketsadjacencymatrixbucket.go
- bucketscompositebucket.go
- bucketscriptaggregation.go
- bucketsdatehistogrambucket.go
- bucketsdoubletermsbucket.go
- bucketselectoraggregation.go
- bucketsfiltersbucket.go
- bucketsgeohashgridbucket.go
- bucketsgeohexgridbucket.go
- bucketsgeotilegridbucket.go
- bucketshistogrambucket.go
- bucketsipprefixbucket.go
- bucketsiprangebucket.go
- bucketslongraretermsbucket.go
- bucketslongtermsbucket.go
- bucketsmultitermsbucket.go
- bucketsortaggregation.go
- bucketspath.go
- bucketsquery.go
- bucketsrangebucket.go
- bucketssignificantlongtermsbucket.go
- bucketssignificantstringtermsbucket.go
- bucketsstringraretermsbucket.go
- bucketsstringtermsbucket.go
- bucketsummary.go
- bucketsvariablewidthhistogrambucket.go
- bucketsvoid.go
- buildinformation.go
- bulkindexbyscrollfailure.go
- bulkstats.go
- bytenumberproperty.go
- bytesize.go
- bytesprocessor.go
- cachequeries.go
- cachestats.go
- calendar.go
- calendarevent.go
- cardinalityaggregate.go
- cardinalityaggregation.go
- catanonalydetectorcolumns.go
- catcomponenttemplate.go
- catdatafeedcolumns.go
- catdfacolumns.go
- categorizationanalyzer.go
- categorizationanalyzerdefinition.go
- categorizetextaggregation.go
- categorizetextanalyzer.go
- category.go
- cattrainedmodelscolumns.go
- cattransformcolumns.go
- ccr.go
- ccrshardstats.go
- certificateinformation.go
- cgroup.go
- cgroupcpu.go
- cgroupcpustat.go
- cgroupmemory.go
- chaininput.go
- charfilter.go
- charfilterdefinition.go
- charfilterdetail.go
- charfiltertypes.go
- chargrouptokenizer.go
- checkpointing.go
- checkpointstats.go
- childrenaggregate.go
- childrenaggregation.go
- chisquareheuristic.go
- chunkingconfig.go
- circleprocessor.go
- classificationinferenceoptions.go
- cleanuprepositoryresults.go
- client.go
- closeindexresult.go
- closeshardresult.go
- clusterappliedstats.go
- clustercomponenttemplate.go
- clusterfilesystem.go
- clusterindexingpressure.go
- clusterindices.go
- clusterindicesshards.go
- clusterindicesshardsindex.go
- clusterinfo.go
- clusteringest.go
- clusterjvm.go
- clusterjvmmemory.go
- clusterjvmversion.go
- clusternetworktypes.go
- clusternode.go
- clusternodecount.go
- clusternodes.go
- clusteroperatingsystem.go
- clusteroperatingsystemarchitecture.go
- clusteroperatingsystemname.go
- clusteroperatingsystemprettyname.go
- clusterpressurememory.go
- clusterprocess.go
- clusterprocesscpu.go
- clusterprocessopenfiledescriptors.go
- clusterprocessor.go
- clusterremoteinfo.go
- clusterremoteproxyinfo.go
- clusterremotesniffinfo.go
- clusterruntimefieldtypes.go
- clustershardmetrics.go
- clusterstatequeue.go
- clusterstateupdate.go
- clusterstatistics.go
- collector.go
- column.go
- combinedfieldsquery.go
- command.go
- commandallocateprimaryaction.go
- commandallocatereplicaaction.go
- commandcancelaction.go
- commandmoveaction.go
- commongramstokenfilter.go
- commontermsquery.go
- compactnodeinfo.go
- completioncontext.go
- completionproperty.go
- completionstats.go
- completionsuggest.go
- completionsuggester.go
- completionsuggestoption.go
- componenttemplatenode.go
- componenttemplatesummary.go
- compositeaggregate.go
- compositeaggregatekey.go
- compositeaggregation.go
- compositeaggregationsource.go
- compositebucket.go
- conditiontokenfilter.go
- configuration.go
- configurations.go
- confusionmatrixitem.go
- confusionmatrixprediction.go
- confusionmatrixthreshold.go
- connection.go
- constantkeywordproperty.go
- constantscorequery.go
- context.go
- contextmethod.go
- contextmethodparam.go
- convertprocessor.go
- coordinatorstats.go
- coordsgeobounds.go
- coreknnquery.go
- counter.go
- countrecord.go
- cpu.go
- cpuacct.go
- createdstatus.go
- csvprocessor.go
- cumulativecardinalityaggregate.go
- cumulativecardinalityaggregation.go
- cumulativesumaggregation.go
- currentnode.go
- customanalyzer.go
- customcategorizetextanalyzer.go
- customnormalizer.go
- customsettings.go
- dailyschedule.go
- danglingindex.go
- datacounts.go
- datadescription.go
- dataemailattachment.go
- datafeedauthorization.go
- datafeedconfig.go
- datafeedrunningstate.go
- datafeeds.go
- datafeedsrecord.go
- datafeedstats.go
- datafeedtimingstats.go
- dataframeanalysis.go
- dataframeanalysisanalyzedfields.go
- dataframeanalysisclassification.go
- dataframeanalysiscontainer.go
- dataframeanalysisfeatureprocessor.go
- dataframeanalysisfeatureprocessorfrequencyencoding.go
- dataframeanalysisfeatureprocessormultiencoding.go
- dataframeanalysisfeatureprocessorngramencoding.go
- dataframeanalysisfeatureprocessoronehotencoding.go
- dataframeanalysisfeatureprocessortargetmeanencoding.go
- dataframeanalysisoutlierdetection.go
- dataframeanalysisregression.go
- dataframeanalytics.go
- dataframeanalyticsauthorization.go
- dataframeanalyticsdestination.go
- dataframeanalyticsfieldselection.go
- dataframeanalyticsmemoryestimation.go
- dataframeanalyticsrecord.go
- dataframeanalyticssource.go
- dataframeanalyticsstatscontainer.go
- dataframeanalyticsstatsdatacounts.go
- dataframeanalyticsstatshyperparameters.go
- dataframeanalyticsstatsmemoryusage.go
- dataframeanalyticsstatsoutlierdetection.go
- dataframeanalyticsstatsprogress.go
- dataframeanalyticssummary.go
- dataframeclassificationsummary.go
- dataframeclassificationsummaryaccuracy.go
- dataframeclassificationsummarymulticlassconfusionmatrix.go
- dataframeclassificationsummaryprecision.go
- dataframeclassificationsummaryrecall.go
- dataframeevaluationclass.go
- dataframeevaluationclassification.go
- dataframeevaluationclassificationmetrics.go
- dataframeevaluationclassificationmetricsaucroc.go
- dataframeevaluationcontainer.go
- dataframeevaluationmetrics.go
- dataframeevaluationoutlierdetection.go
- dataframeevaluationoutlierdetectionmetrics.go
- dataframeevaluationregression.go
- dataframeevaluationregressionmetrics.go
- dataframeevaluationregressionmetricshuber.go
- dataframeevaluationregressionmetricsmsle.go
- dataframeevaluationsummaryaucroc.go
- dataframeevaluationsummaryaucroccurveitem.go
- dataframeevaluationvalue.go
- dataframeoutlierdetectionsummary.go
- dataframepreviewconfig.go
- dataframeregressionsummary.go
- datapathstats.go
- datastream.go
- datastreamindex.go
- datastreamnames.go
- datastreams.go
- datastreamsstatsitem.go
- datastreamtimestamp.go
- datastreamtimestampfield.go
- datastreamvisibility.go
- datatierphasestatistics.go
- datatiers.go
- datedecayfunction.go
- datedistancefeaturequery.go
- datehistogramaggregate.go
- datehistogramaggregation.go
- datehistogrambucket.go
- datehistogramgrouping.go
- dateindexnameprocessor.go
- datenanosproperty.go
- dateprocessor.go
- dateproperty.go
- daterangeaggregate.go
- daterangeaggregation.go
- daterangeexpression.go
- daterangeproperty.go
- daterangequery.go
- datetime.go
- decayfunction.go
- decayplacementdatemathduration.go
- decayplacementdoubledouble.go
- decayplacementgeolocationdistance.go
- defaults.go
- definition.go
- delayeddatacheckconfig.go
- delimitedpayloadtokenfilter.go
- densevectorindexoptions.go
- densevectorproperty.go
- deprecation.go
- deprecationindexing.go
- derivativeaggregate.go
- derivativeaggregation.go
- detectionrule.go
- detector.go
- detectorread.go
- dictionarydecompoundertokenfilter.go
- directgenerator.go
- discovery.go
- discoverynode.go
- diskusage.go
- dismaxquery.go
- dissectprocessor.go
- distancefeaturequery.go
- distancefeaturequerybasedatemathduration.go
- distancefeaturequerybasegeolocationdistance.go
- diversifiedsampleraggregation.go
- docstats.go
- document.go
- documentrating.go
- documentsimulation.go
- dotexpanderprocessor.go
- doublenumberproperty.go
- doublerangeproperty.go
- doubletermsaggregate.go
- doubletermsbucket.go
- downsampleconfig.go
- dropprocessor.go
- duration.go
- durationvalueunitfloatmillis.go
- durationvalueunitmillis.go
- durationvalueunitnanos.go
- durationvalueunitseconds.go
- dutchanalyzer.go
- dynamicproperty.go
- dynamictemplate.go
- edgengramtokenfilter.go
- edgengramtokenizer.go
- elasticsearcherror.go
- elasticsearchversioninfo.go
- elisiontokenfilter.go
- email.go
- emailaction.go
- emailattachmentcontainer.go
- emailbody.go
- emailresult.go
- emptyobject.go
- enrichpolicy.go
- enrichprocessor.go
- ensemble.go
- epochtimeunitmillis.go
- epochtimeunitseconds.go
- eql.go
- eqlfeatures.go
- eqlfeaturesjoin.go
- eqlfeatureskeys.go
- eqlfeaturespipes.go
- eqlfeaturessequences.go
- eqlhits.go
- errorcause.go
- ewmamodelsettings.go
- ewmamovingaverageaggregation.go
- executeenrichpolicystatus.go
- executingpolicy.go
- executionresult.go
- executionresultaction.go
- executionresultcondition.go
- executionresultinput.go
- executionstate.go
- executionthreadpool.go
- existsquery.go
- expandwildcards.go
- explainanalyzetoken.go
- explanation.go
- explanationdetail.go
- explorecontrols.go
- extendedboundsdouble.go
- extendedboundsfielddatemath.go
- extendedmemorystats.go
- extendedstatsaggregate.go
- extendedstatsaggregation.go
- extendedstatsbucketaggregate.go
- extendedstatsbucketaggregation.go
- failprocessor.go
- feature.go
- features.go
- featuretoggle.go
- fetchprofile.go
- fetchprofilebreakdown.go
- fetchprofiledebug.go
- fieldaliasproperty.go
- fieldandformat.go
- fieldcapability.go
- fieldcollapse.go
- fielddatafrequencyfilter.go
- fielddatarecord.go
- fielddatastats.go
- fielddatemath.go
- fieldlookup.go
- fieldmapping.go
- fieldmemoryusage.go
- fieldmetric.go
- fieldnamesfield.go
- fieldrule.go
- fields.go
- fieldsecurity.go
- fieldsizeusage.go
- fieldsort.go
- fieldstatistics.go
- fieldsuggester.go
- fieldsummary.go
- fieldsusagebody.go
- fieldtypes.go
- fieldtypesmappings.go
- fieldvalue.go
- fieldvaluefactorscorefunction.go
- filecountsnapshotstats.go
- filedetails.go
- filesystem.go
- filesystemtotal.go
- fillmaskinferenceoptions.go
- fillmaskinferenceupdateoptions.go
- filteraggregate.go
- filterref.go
- filtersaggregate.go
- filtersaggregation.go
- filtersbucket.go
- fingerprintanalyzer.go
- fingerprinttokenfilter.go
- flattened.go
- flattenedproperty.go
- float64.go
- floatnumberproperty.go
- floatrangeproperty.go
- flushstats.go
- followerindex.go
- followerindexparameters.go
- followindexstats.go
- followstats.go
- forcemergeconfiguration.go
- forcemergeresponsebody.go
- foreachprocessor.go
- formattablemetricaggregation.go
- foundstatus.go
- frequencyencodingpreprocessor.go
- frozenindices.go
- functionscore.go
- functionscorequery.go
- fuzziness.go
- fuzzyquery.go
- garbagecollector.go
- garbagecollectortotal.go
- geoboundingboxquery.go
- geobounds.go
- geoboundsaggregate.go
- geoboundsaggregation.go
- geocentroidaggregate.go
- geocentroidaggregation.go
- geodecayfunction.go
- geodistanceaggregate.go
- geodistanceaggregation.go
- geodistancefeaturequery.go
- geodistancequery.go
- geodistancesort.go
- geohashgridaggregate.go
- geohashgridaggregation.go
- geohashgridbucket.go
- geohashlocation.go
- geohashprecision.go
- geohexgridaggregate.go
- geohexgridaggregation.go
- geohexgridbucket.go
- geoipdownloadstatistics.go
- geoipnodedatabasename.go
- geoipnodedatabases.go
- geoipprocessor.go
- geoline.go
- geolineaggregate.go
- geolineaggregation.go
- geolinepoint.go
- geolinesort.go
- geolocation.go
- geopointproperty.go
- geopolygonpoints.go
- geopolygonquery.go
- georesults.go
- geoshape.go
- geoshapefieldquery.go
- geoshapeproperty.go
- geoshapequery.go
- geotilegridaggregate.go
- geotilegridaggregation.go
- geotilegridbucket.go
- getmigrationfeature.go
- getresult.go
- getscriptcontext.go
- getstats.go
- getuserprofileerrors.go
- globalaggregate.go
- globalaggregation.go
- globalprivilege.go
- googlenormalizeddistanceheuristic.go
- grantapikey.go
- grokprocessor.go
- groupings.go
- gsubprocessor.go
- halffloatnumberproperty.go
- haschildquery.go
- hasparentquery.go
- hasprivilegesuserprofileerrors.go
- hdrmethod.go
- hdrpercentileranksaggregate.go
- hdrpercentilesaggregate.go
- healthrecord.go
- healthstatistics.go
- helprecord.go
- highlight.go
- highlightfield.go
- hint.go
- histogramaggregate.go
- histogramaggregation.go
- histogrambucket.go
- histogramgrouping.go
- histogramproperty.go
- hit.go
- hitsevent.go
- hitsmetadata.go
- hitssequence.go
- holtlinearmodelsettings.go
- holtmovingaverageaggregation.go
- holtwintersmodelsettings.go
- holtwintersmovingaverageaggregation.go
- hop.go
- hotthread.go
- hourandminute.go
- hourlyschedule.go
- htmlstripcharfilter.go
- http.go
- httpemailattachment.go
- httpheaders.go
- httpinput.go
- httpinputauthentication.go
- httpinputbasicauthentication.go
- httpinputproxy.go
- httpinputrequestdefinition.go
- httpinputrequestresult.go
- httpinputresponseresult.go
- hunspelltokenfilter.go
- hyperparameter.go
- hyperparameters.go
- hyphenationdecompoundertokenfilter.go
- icuanalyzer.go
- icucollationtokenfilter.go
- icufoldingtokenfilter.go
- icunormalizationcharfilter.go
- icunormalizationtokenfilter.go
- icutokenizer.go
- icutransformtokenfilter.go
- ids.go
- idsquery.go
- ilm.go
- ilmactions.go
- ilmpolicy.go
- ilmpolicystatistics.go
- indexaction.go
- indexaliases.go
- indexanddatastreamaction.go
- indexcapabilities.go
- indexdetails.go
- indexfield.go
- indexhealthstats.go
- indexingpressurememorysummary.go
- indexingstats.go
- indexmappingrecord.go
- indexpatterns.go
- indexprivilegescheck.go
- indexresult.go
- indexresultsummary.go
- indexrouting.go
- indexroutingallocation.go
- indexroutingallocationdisk.go
- indexroutingallocationinclude.go
- indexroutingallocationinitialrecovery.go
- indexroutingrebalance.go
- indexsegment.go
- indexsegmentsort.go
- indexsettingblocks.go
- indexsettings.go
- indexsettingsanalysis.go
- indexsettingslifecycle.go
- indexsettingslifecyclestep.go
- indexsettingstimeseries.go
- indexstate.go
- indexstats.go
- indextemplate.go
- indextemplatedatastreamconfiguration.go
- indextemplateitem.go
- indextemplatemapping.go
- indextemplatesummary.go
- indexversioning.go
- indices.go
- indicesaction.go
- indicesblockstatus.go
- indicesindexingpressure.go
- indicesindexingpressurememory.go
- indicesmodifyaction.go
- indicesoptions.go
- indicesprivileges.go
- indicesprivilegesquery.go
- indicesrecord.go
- indicesshardsstats.go
- indicesshardstats.go
- indicesshardstores.go
- indicesstats.go
- indicesvalidationexplanation.go
- indicesversions.go
- inferenceaggregate.go
- inferenceaggregation.go
- inferenceclassimportance.go
- inferenceconfig.go
- inferenceconfigclassification.go
- inferenceconfigcontainer.go
- inferenceconfigcreatecontainer.go
- inferenceconfigregression.go
- inferenceconfigupdatecontainer.go
- inferencefeatureimportance.go
- inferenceprocessor.go
- inferenceresponseresult.go
- inferencetopclassentry.go
- influence.go
- influencer.go
- infofeaturestate.go
- ingestpipeline.go
- ingesttotal.go
- inlineget.go
- inlinegetdictuserdefined.go
- inlinescript.go
- innerhits.go
- innerhitsresult.go
- inprogress.go
- input.go
- integernumberproperty.go
- integerrangeproperty.go
- intervals.go
- intervalsallof.go
- intervalsanyof.go
- intervalsfilter.go
- intervalsfuzzy.go
- intervalsmatch.go
- intervalsprefix.go
- intervalsquery.go
- intervalswildcard.go
- invertedindex.go
- invocation.go
- invocations.go
- iostatdevice.go
- iostats.go
- ipfilter.go
- ipprefixaggregate.go
- ipprefixaggregation.go
- ipprefixbucket.go
- ipproperty.go
- iprangeaggregate.go
- iprangeaggregation.go
- iprangeaggregationrange.go
- iprangebucket.go
- iprangeproperty.go
- job.go
- jobblocked.go
- jobconfig.go
- jobforecaststatistics.go
- jobsrecord.go
- jobstatistics.go
- jobstats.go
- jobtimingstats.go
- jobusage.go
- joinprocessor.go
- joinproperty.go
- jsonprocessor.go
- jvm.go
- jvmclasses.go
- jvmmemorystats.go
- jvmstats.go
- jvmthreads.go
- keeptypestokenfilter.go
- keepwordstokenfilter.go
- keyedpercentiles.go
- keyedprocessor.go
- keyvalueprocessor.go
- keywordanalyzer.go
- keywordmarkertokenfilter.go
- keywordproperty.go
- keywordtokenizer.go
- kibanatoken.go
- knnquery.go
- kstemtokenfilter.go
- kuromojianalyzer.go
- kuromojiiterationmarkcharfilter.go
- kuromojipartofspeechtokenfilter.go
- kuromojireadingformtokenfilter.go
- kuromojistemmertokenfilter.go
- kuromojitokenizer.go
- languageanalyzer.go
- languagecontext.go
- laplacesmoothingmodel.go
- latest.go
- latlongeolocation.go
- lengthtokenfilter.go
- lettertokenizer.go
- license.go
- licenseinformation.go
- lifecycle.go
- lifecycleexplain.go
- lifecycleexplainmanaged.go
- lifecycleexplainphaseexecution.go
- lifecycleexplainunmanaged.go
- like.go
- likedocument.go
- limits.go
- limittokencounttokenfilter.go
- linearinterpolationsmoothingmodel.go
- linearmovingaverageaggregation.go
- loggingaction.go
- loggingresult.go
- logstashpipeline.go
- longnumberproperty.go
- longrangeproperty.go
- longraretermsaggregate.go
- longraretermsbucket.go
- longtermsaggregate.go
- longtermsbucket.go
- lowercasenormalizer.go
- lowercaseprocessor.go
- lowercasetokenfilter.go
- lowercasetokenizer.go
- machinelearning.go
- manageuserprivileges.go
- mapboxvectortiles.go
- mappingcharfilter.go
- mappinglimitsettings.go
- mappinglimitsettingsdepth.go
- mappinglimitsettingsdimensionfields.go
- mappinglimitsettingsfieldnamelength.go
- mappinglimitsettingsnestedfields.go
- mappinglimitsettingsnestedobjects.go
- mappinglimitsettingstotalfields.go
- mappingstats.go
- masterrecord.go
- matchallquery.go
- matchboolprefixquery.go
- matchnonequery.go
- matchonlytextproperty.go
- matchphraseprefixquery.go
- matchphrasequery.go
- matchquery.go
- matrixaggregation.go
- matrixstatsaggregate.go
- matrixstatsaggregation.go
- matrixstatsfields.go
- maxaggregate.go
- maxaggregation.go
- maxbucketaggregation.go
- medianabsolutedeviationaggregate.go
- medianabsolutedeviationaggregation.go
- memmlstats.go
- memory.go
- memorystats.go
- memstats.go
- merge.go
- mergescheduler.go
- mergesstats.go
- metadata.go
- metrics.go
- mgetoperation.go
- migrationfeatureindexinfo.go
- minaggregate.go
- minaggregation.go
- minbucketaggregation.go
- minimallicenseinformation.go
- minimumshouldmatch.go
- missing.go
- missingaggregate.go
- missingaggregation.go
- mlcounter.go
- mldatafeed.go
- mldataframeanalyticsjobs.go
- mldataframeanalyticsjobsanalysis.go
- mldataframeanalyticsjobscount.go
- mldataframeanalyticsjobsmemory.go
- mlfilter.go
- mlinference.go
- mlinferencedeployments.go
- mlinferencedeploymentstimems.go
- mlinferenceingestprocessor.go
- mlinferenceingestprocessorcount.go
- mlinferencetrainedmodels.go
- mlinferencetrainedmodelscount.go
- mljobforecasts.go
- modelplotconfig.go
- modelsizestats.go
- modelsnapshot.go
- modelsnapshotupgrade.go
- monitoring.go
- morelikethisquery.go
- mountedsnapshot.go
- movingaverageaggregation.go
- movingfunctionaggregation.go
- movingpercentilesaggregation.go
- mtermvectorsoperation.go
- multibucketaggregatebaseadjacencymatrixbucket.go
- multibucketaggregatebasecompositebucket.go
- multibucketaggregatebasedatehistogrambucket.go
- multibucketaggregatebasedoubletermsbucket.go
- multibucketaggregatebasefiltersbucket.go
- multibucketaggregatebasegeohashgridbucket.go
- multibucketaggregatebasegeohexgridbucket.go
- multibucketaggregatebasegeotilegridbucket.go
- multibucketaggregatebasehistogrambucket.go
- multibucketaggregatebaseipprefixbucket.go
- multibucketaggregatebaseiprangebucket.go
- multibucketaggregatebaselongraretermsbucket.go
- multibucketaggregatebaselongtermsbucket.go
- multibucketaggregatebasemultitermsbucket.go
- multibucketaggregatebaserangebucket.go
- multibucketaggregatebasesignificantlongtermsbucket.go
- multibucketaggregatebasesignificantstringtermsbucket.go
- multibucketaggregatebasestringraretermsbucket.go
- multibucketaggregatebasestringtermsbucket.go
- multibucketaggregatebasevariablewidthhistogrambucket.go
- multibucketaggregatebasevoid.go
- multigeterror.go
- multimatchquery.go
- multiplexertokenfilter.go
- multitermlookup.go
- multitermsaggregate.go
- multitermsaggregation.go
- multitermsbucket.go
- murmur3hashproperty.go
- mutualinformationheuristic.go
- names.go
- nativecode.go
- nativecodeinformation.go
- nerinferenceoptions.go
- nerinferenceupdateoptions.go
- nestedaggregate.go
- nestedaggregation.go
- nestedidentity.go
- nestedproperty.go
- nestedquery.go
- nestedsortvalue.go
- nevercondition.go
- ngramtokenfilter.go
- ngramtokenizer.go
- nlpberttokenizationconfig.go
- nlprobertatokenizationconfig.go
- nlptokenizationupdateoptions.go
- node.go
- nodeallocationexplanation.go
- nodeattributes.go
- nodeattributesrecord.go
- nodebufferpool.go
- nodediskusage.go
- nodeids.go
- nodeinfo.go
- nodeinfoaction.go
- nodeinfoaggregation.go
- nodeinfobootstrap.go
- nodeinfoclient.go
- nodeinfodiscover.go
- nodeinfohttp.go
- nodeinfoingest.go
- nodeinfoingestdownloader.go
- nodeinfoingestinfo.go
- nodeinfoingestprocessor.go
- nodeinfojvmmemory.go
- nodeinfomemory.go
- nodeinfonetwork.go
- nodeinfonetworkinterface.go
- nodeinfooscpu.go
- nodeinfopath.go
- nodeinforepositories.go
- nodeinforepositoriesurl.go
- nodeinfoscript.go
- nodeinfosearch.go
- nodeinfosearchremote.go
- nodeinfosettings.go
- nodeinfosettingscluster.go
- nodeinfosettingsclusterelection.go
- nodeinfosettingshttp.go
- nodeinfosettingshttptype.go
- nodeinfosettingsingest.go
- nodeinfosettingsnetwork.go
- nodeinfosettingsnode.go
- nodeinfosettingstransport.go
- nodeinfosettingstransportfeatures.go
- nodeinfosettingstransporttype.go
- nodeinfotransport.go
- nodeinfoxpack.go
- nodeinfoxpacklicense.go
- nodeinfoxpacklicensetype.go
- nodeinfoxpacksecurity.go
- nodeinfoxpacksecurityauthc.go
- nodeinfoxpacksecurityauthcrealms.go
- nodeinfoxpacksecurityauthcrealmsstatus.go
- nodeinfoxpacksecurityauthctoken.go
- nodeinfoxpacksecurityssl.go
- nodejvminfo.go
- nodeoperatingsysteminfo.go
- nodepackagingtype.go
- nodeprocessinfo.go
- nodereloaderror.go
- nodereloadresult.go
- noderoles.go
- nodescontext.go
- nodescredentials.go
- nodescredentialsfiletoken.go
- nodeshard.go
- nodeshutdownstatus.go
- nodesindexingpressure.go
- nodesindexingpressurememory.go
- nodesingest.go
- nodesrecord.go
- nodestatistics.go
- nodetasks.go
- nodethreadpoolinfo.go
- nodeusage.go
- norianalyzer.go
- noripartofspeechtokenfilter.go
- noritokenizer.go
- normalizeaggregation.go
- normalizer.go
- numberrangequery.go
- numericdecayfunction.go
- numericfielddata.go
- objectproperty.go
- onehotencodingpreprocessor.go
- operatingsystem.go
- operatingsystemmemoryinfo.go
- outlierdetectionparameters.go
- overallbucket.go
- overallbucketjob.go
- overlapping.go
- page.go
- pagerdutyaction.go
- pagerdutycontext.go
- pagerdutyevent.go
- pagerdutyeventproxy.go
- pagerdutyresult.go
- painlesscontextsetup.go
- parentaggregate.go
- parentaggregation.go
- parentidquery.go
- parenttaskinfo.go
- passthroughinferenceoptions.go
- passthroughinferenceupdateoptions.go
- pathhierarchytokenizer.go
- patternanalyzer.go
- patterncapturetokenfilter.go
- patternreplacecharfilter.go
- patternreplacetokenfilter.go
- patterntokenizer.go
- pendingtask.go
- pendingtasksrecord.go
- percentage.go
- percentagescoreheuristic.go
- percentileranksaggregation.go
- percentiles.go
- percentilesaggregation.go
- percentilesbucketaggregate.go
- percentilesbucketaggregation.go
- percolatequery.go
- percolatorproperty.go
- perpartitioncategorization.go
- persistenttaskstatus.go
- phase.go
- phases.go
- phonetictokenfilter.go
- phrasesuggest.go
- phrasesuggestcollate.go
- phrasesuggestcollatequery.go
- phrasesuggester.go
- phrasesuggesthighlight.go
- phrasesuggestoption.go
- pinneddoc.go
- pinnedquery.go
- pipelineconfig.go
- pipelinemetadata.go
- pipelineprocessor.go
- pipelinesettings.go
- pipelinesimulation.go
- pivot.go
- pivotgroupbycontainer.go
- pluginsrecord.go
- pluginsstatus.go
- pluginstats.go
- pointintimereference.go
- pointproperty.go
- pool.go
- porterstemtokenfilter.go
- postmigrationfeature.go
- predicatetokenfilter.go
- predictedvalue.go
- prefixquery.go
- preprocessor.go
- pressurememory.go
- privileges.go
- privilegesactions.go
- privilegescheck.go
- process.go
- processor.go
- processorcontainer.go
- profile.go
- property.go
- publishedclusterstates.go
- queries.go
- query.go
- querybreakdown.go
- querycachestats.go
- queryprofile.go
- querystringquery.go
- queryvector.go
- queryvectorbuilder.go
- querywatch.go
- questionansweringinferenceoptions.go
- questionansweringinferenceupdateoptions.go
- randomscorefunction.go
- rangeaggregate.go
- rangeaggregation.go
- rangebucket.go
- rangequery.go
- rankevalhit.go
- rankevalhititem.go
- rankevalmetric.go
- rankevalmetricdetail.go
- rankevalmetricdiscountedcumulativegain.go
- rankevalmetricexpectedreciprocalrank.go
- rankevalmetricmeanreciprocalrank.go
- rankevalmetricprecision.go
- rankevalmetricratingtreshold.go
- rankevalmetricrecall.go
- rankevalquery.go
- rankevalrequestitem.go
- rankfeaturefunction.go
- rankfeaturefunctionlinear.go
- rankfeaturefunctionlogarithm.go
- rankfeaturefunctionsaturation.go
- rankfeaturefunctionsigmoid.go
- rankfeatureproperty.go
- rankfeaturequery.go
- rankfeaturesproperty.go
- raretermsaggregation.go
- rateaggregate.go
- rateaggregation.go
- readexception.go
- realmcache.go
- realminfo.go
- recording.go
- recoverybytes.go
- recoveryfiles.go
- recoveryindexstatus.go
- recoveryorigin.go
- recoveryrecord.go
- recoverystartstatus.go
- recoverystats.go
- recoverystatus.go
- refreshstats.go
- regexpquery.go
- regressioninferenceoptions.go
- reindexdestination.go
- reindexnode.go
- reindexsource.go
- reindexstatus.go
- reindextask.go
- reloaddetails.go
- relocationfailureinfo.go
- remotesource.go
- removeaction.go
- removeduplicatestokenfilter.go
- removeindexaction.go
- removeprocessor.go
- renameprocessor.go
- reportingemailattachment.go
- repositoriesrecord.go
- repository.go
- repositorylocation.go
- repositorymeteringinformation.go
- repositorysettings.go
- requestcachestats.go
- requestcounts.go
- reroutedecision.go
- rerouteexplanation.go
- rerouteparameters.go
- rescore.go
- rescorequery.go
- reservedsize.go
- resolveindexaliasitem.go
- resolveindexdatastreamsitem.go
- resolveindexitem.go
- resourceprivileges.go
- responsebody.go
- responseitem.go
- retention.go
- retentionlease.go
- retentionpolicy.go
- retentionpolicycontainer.go
- retries.go
- reversenestedaggregate.go
- reversenestedaggregation.go
- reversetokenfilter.go
- role.go
- roledescriptor.go
- roledescriptorread.go
- roledescriptorwrapper.go
- rolemappingrule.go
- roletemplate.go
- roletemplateinlinequery.go
- roletemplateinlinescript.go
- roletemplatequery.go
- roletemplatescript.go
- rolloverconditions.go
- rollupcapabilities.go
- rollupcapabilitysummary.go
- rollupfieldsummary.go
- rollupjob.go
- rollupjobconfiguration.go
- rollupjobstats.go
- rollupjobstatus.go
- rollupjobsummary.go
- rollupjobsummaryfield.go
- routingfield.go
- row.go
- rulecondition.go
- runningstatesearchinterval.go
- runtimefield.go
- runtimefieldfetchfields.go
- runtimefields.go
- runtimefieldstype.go
- samplediversity.go
- sampleraggregate.go
- sampleraggregation.go
- scaledfloatnumberproperty.go
- schedulecontainer.go
- scheduletimeofday.go
- scheduletriggerevent.go
- scoresort.go
- script.go
- scriptcache.go
- scriptcondition.go
- scriptedheuristic.go
- scriptedmetricaggregate.go
- scriptedmetricaggregation.go
- scriptfield.go
- scripting.go
- scriptquery.go
- scriptscorefunction.go
- scriptscorequery.go
- scriptsort.go
- scripttransform.go
- scrollids.go
- searchablesnapshots.go
- searchasyoutypeproperty.go
- searchidle.go
- searchinput.go
- searchinputrequestbody.go
- searchinputrequestdefinition.go
- searchprofile.go
- searchstats.go
- searchtemplaterequestbody.go
- searchtransform.go
- security.go
- securityrealm.go
- securityrolemapping.go
- securityroles.go
- securityrolesdls.go
- securityrolesdlsbitsetcache.go
- securityrolesfile.go
- securityrolesnative.go
- segment.go
- segmentsrecord.go
- segmentsstats.go
- serialdifferencingaggregation.go
- serializedclusterstate.go
- serializedclusterstatedetail.go
- servicetoken.go
- setprocessor.go
- setsecurityuserprocessor.go
- settings.go
- settingsanalyze.go
- settingshighlight.go
- settingsquerystring.go
- settingssearch.go
- settingssimilarity.go
- settingssimilaritybm25.go
- settingssimilaritydfi.go
- settingssimilaritydfr.go
- settingssimilarityib.go
- settingssimilaritylmd.go
- settingssimilaritylmj.go
- settingssimilarityscriptedtfidf.go
- shapefieldquery.go
- shapeproperty.go
- shapequery.go
- shardcommit.go
- shardfailure.go
- shardfilesizeinfo.go
- shardhealthstats.go
- shardlease.go
- shardmigrationstatus.go
- shardpath.go
- shardprofile.go
- shardquerycache.go
- shardrecovery.go
- shardretentionleases.go
- shardrouting.go
- shardsegmentrouting.go
- shardsequencenumber.go
- shardsrecord.go
- shardssegment.go
- shardsstatssummary.go
- shardsstatssummaryitem.go
- shardstatistics.go
- shardstore.go
- shardstoreexception.go
- shardstoreindex.go
- shardstorenode.go
- shardstorewrapper.go
- shardstotalstats.go
- shared.go
- shingletokenfilter.go
- shortnumberproperty.go
- shrinkconfiguration.go
- significantlongtermsaggregate.go
- significantlongtermsbucket.go
- significantstringtermsaggregate.go
- significantstringtermsbucket.go
- significanttermsaggregatebasesignificantlongtermsbucket.go
- significanttermsaggregatebasesignificantstringtermsbucket.go
- significanttermsaggregatebasevoid.go
- significanttermsaggregation.go
- significanttextaggregation.go
- simpleanalyzer.go
- simplemovingaverageaggregation.go
- simplequerystringflags.go
- simplequerystringquery.go
- simplevalueaggregate.go
- simulatedactions.go
- simulateingest.go
- sizefield.go
- slackaction.go
- slackattachment.go
- slackattachmentfield.go
- slackdynamicattachment.go
- slackmessage.go
- slackresult.go
- slicedscroll.go
- slices.go
- slm.go
- slmpolicy.go
- slowlogsettings.go
- slowlogtresholdlevels.go
- slowlogtresholds.go
- smoothingmodelcontainer.go
- snapshotindexstats.go
- snapshotinfo.go
- snapshotlifecycle.go
- snapshotresponseitem.go
- snapshotrestore.go
- snapshotshardfailure.go
- snapshotshardsstats.go
- snapshotshardsstatus.go
- snapshotsrecord.go
- snapshotstats.go
- snowballanalyzer.go
- snowballtokenfilter.go
- softdeletes.go
- sort.go
- sortcombinations.go
- sortoptions.go
- sortprocessor.go
- sortresults.go
- sourceconfig.go
- sourceconfigparam.go
- sourcefield.go
- sourcefilter.go
- spancontainingquery.go
- spanfieldmaskingquery.go
- spanfirstquery.go
- spangapquery.go
- spanmultitermquery.go
- spannearquery.go
- spannotquery.go
- spanorquery.go
- spanquery.go
- spantermquery.go
- spanwithinquery.go
- splitprocessor.go
- sql.go
- ssl.go
- standardanalyzer.go
- standarddeviationbounds.go
- standarddeviationboundsasstring.go
- standardtokenizer.go
- statistics.go
- stats.go
- statsaggregate.go
- statsaggregation.go
- statsbucketaggregate.go
- statsbucketaggregation.go
- status.go
- stemmeroverridetokenfilter.go
- stemmertokenfilter.go
- stepkey.go
- stopanalyzer.go
- stoptokenfilter.go
- stopwords.go
- storage.go
- storedscript.go
- storedscriptid.go
- storestats.go
- stringifiedepochtimeunitmillis.go
- stringifiedepochtimeunitseconds.go
- stringifiedversionnumber.go
- stringraretermsaggregate.go
- stringraretermsbucket.go
- stringstatsaggregate.go
- stringstatsaggregation.go
- stringtermsaggregate.go
- stringtermsbucket.go
- stupidbackoffsmoothingmodel.go
- suggest.go
- suggestcontext.go
- suggester.go
- suggestfuzziness.go
- sumaggregate.go
- sumaggregation.go
- sumbucketaggregation.go
- summary.go
- synccontainer.go
- synonymgraphtokenfilter.go
- synonymtokenfilter.go
- targetmeanencodingpreprocessor.go
- taskfailure.go
- taskid.go
- taskinfo.go
- taskinfos.go
- tasksrecord.go
- taskstatus.go
- tdigest.go
- tdigestpercentileranksaggregate.go
- tdigestpercentilesaggregate.go
- template.go
- templatemapping.go
- templatesrecord.go
- term.go
- termquery.go
- termsaggregatebasedoubletermsbucket.go
- termsaggregatebaselongtermsbucket.go
- termsaggregatebasemultitermsbucket.go
- termsaggregatebasestringtermsbucket.go
- termsaggregatebasevoid.go
- termsaggregation.go
- termsexclude.go
- termsgrouping.go
- termsinclude.go
- termslookup.go
- termspartition.go
- termsquery.go
- termsqueryfield.go
- termssetquery.go
- termsuggest.go
- termsuggester.go
- termsuggestoption.go
- termvector.go
- termvectorsfilter.go
- termvectorsresult.go
- termvectorstoken.go
- testpopulation.go
- textclassificationinferenceoptions.go
- textclassificationinferenceupdateoptions.go
- textembedding.go
- textembeddinginferenceoptions.go
- textembeddinginferenceupdateoptions.go
- textindexprefixes.go
- textproperty.go
- texttoanalyze.go
- threadcount.go
- threadpoolrecord.go
- throttlestate.go
- timeofmonth.go
- timeofweek.go
- timeofyear.go
- timesync.go
- timingstats.go
- tokencountproperty.go
- tokendetail.go
- tokenfilter.go
- tokenfilterdefinition.go
- tokenizationconfigcontainer.go
- tokenizer.go
- tokenizerdefinition.go
- topclassentry.go
- tophitsaggregate.go
- tophitsaggregation.go
- topleftbottomrightgeobounds.go
- topmetrics.go
- topmetricsaggregate.go
- topmetricsaggregation.go
- topmetricsvalue.go
- toprightbottomleftgeobounds.go
- totalfeatureimportance.go
- totalfeatureimportanceclass.go
- totalfeatureimportancestatistics.go
- totalhits.go
- totalhits_unmarshaler.go
- totaluserprofiles.go
- trackhits.go
- trainedmodel.go
- trainedmodelassignment.go
- trainedmodelassignmentroutingtable.go
- trainedmodelassignmenttaskparameters.go
- trainedmodelconfig.go
- trainedmodelconfiginput.go
- trainedmodelconfigmetadata.go
- trainedmodeldeploymentallocationstatus.go
- trainedmodeldeploymentnodesstats.go
- trainedmodeldeploymentstats.go
- trainedmodelentities.go
- trainedmodelinferenceclassimportance.go
- trainedmodelinferencefeatureimportance.go
- trainedmodelinferencestats.go
- trainedmodellocation.go
- trainedmodellocationindex.go
- trainedmodelsizestats.go
- trainedmodelsrecord.go
- trainedmodelstats.go
- trainedmodeltree.go
- trainedmodeltreenode.go
- transformauthorization.go
- transformcontainer.go
- transformdestination.go
- transformindexerstats.go
- transformprogress.go
- transformsource.go
- transformsrecord.go
- transformstats.go
- transformstatshealth.go
- transformsummary.go
- transientmetadataconfig.go
- translog.go
- translogretention.go
- translogstats.go
- translogstatus.go
- transport.go
- transporthistogram.go
- triggercontainer.go
- triggereventcontainer.go
- triggereventresult.go
- trimprocessor.go
- trimtokenfilter.go
- truncatetokenfilter.go
- ttestaggregate.go
- ttestaggregation.go
- typefieldmappings.go
- typemapping.go
- typequery.go
- uaxemailurltokenizer.go
- unassignedinformation.go
- uniquetokenfilter.go
- unmappedraretermsaggregate.go
- unmappedsampleraggregate.go
- unmappedsignificanttermsaggregate.go
- unmappedtermsaggregate.go
- unrateddocument.go
- unsignedlongnumberproperty.go
- updatebyqueryrethrottlenode.go
- uppercaseprocessor.go
- uppercasetokenfilter.go
- urldecodeprocessor.go
- usagestatsindex.go
- usagestatsshards.go
- user.go
- useragentprocessor.go
- userindicesprivileges.go
- userprofile.go
- userprofilehitmetadata.go
- userprofileuser.go
- userprofilewithmetadata.go
- userrealm.go
- validationloss.go
- valuecountaggregate.go
- valuecountaggregation.go
- variablewidthhistogramaggregate.go
- variablewidthhistogramaggregation.go
- variablewidthhistogrambucket.go
- vector.go
- verifyindex.go
- versionproperty.go
- vertex.go
- vertexdefinition.go
- vertexinclude.go
- vocabulary.go
- waitforactiveshards.go
- warmerstats.go
- watch.go
- watcher.go
- watcheraction.go
- watcheractions.go
- watcheractiontotals.go
- watchercondition.go
- watcherinput.go
- watchernodestats.go
- watcherstatusactions.go
- watcherwatch.go
- watcherwatchtrigger.go
- watcherwatchtriggerschedule.go
- watchrecord.go
- watchrecordqueuedstats.go
- watchrecordstats.go
- watchstatus.go
- webhookaction.go
- webhookresult.go
- weightedaverageaggregation.go
- weightedaveragevalue.go
- weightedavgaggregate.go
- weights.go
- whitespaceanalyzer.go
- whitespacetokenizer.go
- wildcardproperty.go
- wildcardquery.go
- wktgeobounds.go
- worddelimitergraphtokenfilter.go
- worddelimitertokenfilter.go
- wrapperquery.go
- xpackdatafeed.go
- xpackfeature.go
- xpackfeatures.go
- xpackquery.go
- xpackrealm.go
- xpackrolemapping.go
- xpackruntimefieldtypes.go
- zeroshotclassificationinferenceoptions.go
- zeroshotclassificationinferenceupdateoptions.go
Directories ¶
Path | Synopsis |
---|---|
enums
|
|
accesstokengranttype
Package accesstokengranttype
|
Package accesstokengranttype |
acknowledgementoptions
Package acknowledgementoptions
|
Package acknowledgementoptions |
actionexecutionmode
Package actionexecutionmode
|
Package actionexecutionmode |
actionstatusoptions
Package actionstatusoptions
|
Package actionstatusoptions |
actiontype
Package actiontype
|
Package actiontype |
allocationexplaindecision
Package allocationexplaindecision
|
Package allocationexplaindecision |
apikeygranttype
Package apikeygranttype
|
Package apikeygranttype |
appliesto
Package appliesto
|
Package appliesto |
boundaryscanner
Package boundaryscanner
|
Package boundaryscanner |
bytes
Package bytes
|
Package bytes |
calendarinterval
Package calendarinterval
|
Package calendarinterval |
cardinalityexecutionmode
Package cardinalityexecutionmode
|
Package cardinalityexecutionmode |
catanomalydetectorcolumn
Package catanomalydetectorcolumn
|
Package catanomalydetectorcolumn |
catdatafeedcolumn
Package catdatafeedcolumn
|
Package catdatafeedcolumn |
catdfacolumn
Package catdfacolumn
|
Package catdfacolumn |
categorizationstatus
Package categorizationstatus
|
Package categorizationstatus |
cattrainedmodelscolumn
Package cattrainedmodelscolumn
|
Package cattrainedmodelscolumn |
cattransformcolumn
Package cattransformcolumn
|
Package cattransformcolumn |
childscoremode
Package childscoremode
|
Package childscoremode |
chunkingmode
Package chunkingmode
|
Package chunkingmode |
clusterprivilege
Package clusterprivilege
|
Package clusterprivilege |
combinedfieldsoperator
Package combinedfieldsoperator
|
Package combinedfieldsoperator |
combinedfieldszeroterms
Package combinedfieldszeroterms
|
Package combinedfieldszeroterms |
conditionop
Package conditionop
|
Package conditionop |
conditionoperator
Package conditionoperator
|
Package conditionoperator |
conditiontype
Package conditiontype
|
Package conditiontype |
conflicts
Package conflicts
|
Package conflicts |
connectionscheme
Package connectionscheme
|
Package connectionscheme |
converttype
Package converttype
|
Package converttype |
dataattachmentformat
Package dataattachmentformat
|
Package dataattachmentformat |
datafeedstate
Package datafeedstate
|
Package datafeedstate |
dataframestate
Package dataframestate
|
Package dataframestate |
day
Package day
|
Package day |
decision
Package decision
|
Package decision |
delimitedpayloadencoding
Package delimitedpayloadencoding
|
Package delimitedpayloadencoding |
deploymentallocationstate
Package deploymentallocationstate
|
Package deploymentallocationstate |
deploymentassignmentstate
Package deploymentassignmentstate
|
Package deploymentassignmentstate |
deploymentstate
Package deploymentstate
|
Package deploymentstate |
deprecationlevel
Package deprecationlevel
|
Package deprecationlevel |
dfiindependencemeasure
Package dfiindependencemeasure
|
Package dfiindependencemeasure |
dfraftereffect
Package dfraftereffect
|
Package dfraftereffect |
dfrbasicmodel
Package dfrbasicmodel
|
Package dfrbasicmodel |
distanceunit
Package distanceunit
|
Package distanceunit |
dynamicmapping
Package dynamicmapping
|
Package dynamicmapping |
edgengramside
Package edgengramside
|
Package edgengramside |
emailpriority
Package emailpriority
|
Package emailpriority |
enrichpolicyphase
Package enrichpolicyphase
|
Package enrichpolicyphase |
excludefrequent
Package excludefrequent
|
Package excludefrequent |
executionphase
Package executionphase
|
Package executionphase |
executionstatus
Package executionstatus
|
Package executionstatus |
expandwildcard
Package expandwildcard
|
Package expandwildcard |
feature
Package feature
|
Package feature |
fieldsortnumerictype
Package fieldsortnumerictype
|
Package fieldsortnumerictype |
fieldtype
Package fieldtype
|
Package fieldtype |
fieldvaluefactormodifier
Package fieldvaluefactormodifier
|
Package fieldvaluefactormodifier |
filtertype
Package filtertype
|
Package filtertype |
followerindexstatus
Package followerindexstatus
|
Package followerindexstatus |
functionboostmode
Package functionboostmode
|
Package functionboostmode |
functionscoremode
Package functionscoremode
|
Package functionscoremode |
gappolicy
Package gappolicy
|
Package gappolicy |
geodistancetype
Package geodistancetype
|
Package geodistancetype |
geoexecution
Package geoexecution
|
Package geoexecution |
geoorientation
Package geoorientation
|
Package geoorientation |
geoshaperelation
Package geoshaperelation
|
Package geoshaperelation |
geostrategy
Package geostrategy
|
Package geostrategy |
geovalidationmethod
Package geovalidationmethod
|
Package geovalidationmethod |
granttype
Package granttype
|
Package granttype |
gridaggregationtype
Package gridaggregationtype
|
Package gridaggregationtype |
gridtype
Package gridtype
|
Package gridtype |
groupby
Package groupby
|
Package groupby |
healthstatus
Package healthstatus
|
Package healthstatus |
highlighterencoder
Package highlighterencoder
|
Package highlighterencoder |
highlighterfragmenter
Package highlighterfragmenter
|
Package highlighterfragmenter |
highlighterorder
Package highlighterorder
|
Package highlighterorder |
highlightertagsschema
Package highlightertagsschema
|
Package highlightertagsschema |
highlightertype
Package highlightertype
|
Package highlightertype |
holtwinterstype
Package holtwinterstype
|
Package holtwinterstype |
httpinputmethod
Package httpinputmethod
|
Package httpinputmethod |
ibdistribution
Package ibdistribution
|
Package ibdistribution |
iblambda
Package iblambda
|
Package iblambda |
icucollationalternate
Package icucollationalternate
|
Package icucollationalternate |
icucollationcasefirst
Package icucollationcasefirst
|
Package icucollationcasefirst |
icucollationdecomposition
Package icucollationdecomposition
|
Package icucollationdecomposition |
icucollationstrength
Package icucollationstrength
|
Package icucollationstrength |
icunormalizationmode
Package icunormalizationmode
|
Package icunormalizationmode |
icunormalizationtype
Package icunormalizationtype
|
Package icunormalizationtype |
icutransformdirection
Package icutransformdirection
|
Package icutransformdirection |
include
Package include
|
Package include |
indexcheckonstartup
Package indexcheckonstartup
|
Package indexcheckonstartup |
indexingjobstate
Package indexingjobstate
|
Package indexingjobstate |
indexmetadatastate
Package indexmetadatastate
|
Package indexmetadatastate |
indexoptions
Package indexoptions
|
Package indexoptions |
indexprivilege
Package indexprivilege
|
Package indexprivilege |
indexroutingallocationoptions
Package indexroutingallocationoptions
|
Package indexroutingallocationoptions |
indexroutingrebalanceoptions
Package indexroutingrebalanceoptions
|
Package indexroutingrebalanceoptions |
indicesblockoptions
Package indicesblockoptions
|
Package indicesblockoptions |
inputtype
Package inputtype
|
Package inputtype |
jobblockedreason
Package jobblockedreason
|
Package jobblockedreason |
jobstate
Package jobstate
|
Package jobstate |
jsonprocessorconflictstrategy
Package jsonprocessorconflictstrategy
|
Package jsonprocessorconflictstrategy |
keeptypesmode
Package keeptypesmode
|
Package keeptypesmode |
kuromojitokenizationmode
Package kuromojitokenizationmode
|
Package kuromojitokenizationmode |
language
Package language
|
Package language |
level
Package level
|
Package level |
licensestatus
Package licensestatus
|
Package licensestatus |
licensetype
Package licensetype
|
Package licensetype |
lifecycleoperationmode
Package lifecycleoperationmode
|
Package lifecycleoperationmode |
matchtype
Package matchtype
|
Package matchtype |
memorystatus
Package memorystatus
|
Package memorystatus |
metric
Package metric
|
Package metric |
migrationstatus
Package migrationstatus
|
Package migrationstatus |
minimuminterval
Package minimuminterval
|
Package minimuminterval |
missingorder
Package missingorder
|
Package missingorder |
month
Package month
|
Package month |
multivaluemode
Package multivaluemode
|
Package multivaluemode |
noderole
Package noderole
|
Package noderole |
noridecompoundmode
Package noridecompoundmode
|
Package noridecompoundmode |
normalization
Package normalization
|
Package normalization |
normalizemethod
Package normalizemethod
|
Package normalizemethod |
numericfielddataformat
Package numericfielddataformat
|
Package numericfielddataformat |
onscripterror
Package onscripterror
|
Package onscripterror |
operator
Package operator
|
Package operator |
optype
Package optype
|
Package optype |
pagerdutycontexttype
Package pagerdutycontexttype
|
Package pagerdutycontexttype |
pagerdutyeventtype
Package pagerdutyeventtype
|
Package pagerdutyeventtype |
phoneticencoder
Package phoneticencoder
|
Package phoneticencoder |
phoneticlanguage
Package phoneticlanguage
|
Package phoneticlanguage |
phoneticnametype
Package phoneticnametype
|
Package phoneticnametype |
phoneticruletype
Package phoneticruletype
|
Package phoneticruletype |
policytype
Package policytype
|
Package policytype |
quantifier
Package quantifier
|
Package quantifier |
rangerelation
Package rangerelation
|
Package rangerelation |
ratemode
Package ratemode
|
Package ratemode |
refresh
Package refresh
|
Package refresh |
responsecontenttype
Package responsecontenttype
|
Package responsecontenttype |
result
Package result
|
Package result |
resultposition
Package resultposition
|
Package resultposition |
routingstate
Package routingstate
|
Package routingstate |
ruleaction
Package ruleaction
|
Package ruleaction |
runtimefieldtype
Package runtimefieldtype
|
Package runtimefieldtype |
sampleraggregationexecutionhint
Package sampleraggregationexecutionhint
|
Package sampleraggregationexecutionhint |
scoremode
Package scoremode
|
Package scoremode |
scriptlanguage
Package scriptlanguage
|
Package scriptlanguage |
scriptsorttype
Package scriptsorttype
|
Package scriptsorttype |
searchtype
Package searchtype
|
Package searchtype |
segmentsortmissing
Package segmentsortmissing
|
Package segmentsortmissing |
segmentsortmode
Package segmentsortmode
|
Package segmentsortmode |
segmentsortorder
Package segmentsortorder
|
Package segmentsortorder |
shapetype
Package shapetype
|
Package shapetype |
shardroutingstate
Package shardroutingstate
|
Package shardroutingstate |
shardsstatsstage
Package shardsstatsstage
|
Package shardsstatsstage |
shardstoreallocation
Package shardstoreallocation
|
Package shardstoreallocation |
shardstorestatus
Package shardstorestatus
|
Package shardstorestatus |
shutdownstatus
Package shutdownstatus
|
Package shutdownstatus |
shutdowntype
Package shutdowntype
|
Package shutdowntype |
simplequerystringflag
Package simplequerystringflag
|
Package simplequerystringflag |
slicescalculation
Package slicescalculation
|
Package slicescalculation |
snapshotsort
Package snapshotsort
|
Package snapshotsort |
snapshotupgradestate
Package snapshotupgradestate
|
Package snapshotupgradestate |
snowballlanguage
Package snowballlanguage
|
Package snowballlanguage |
sortmode
Package sortmode
|
Package sortmode |
sortorder
Package sortorder
|
Package sortorder |
sourcefieldmode
Package sourcefieldmode
|
Package sourcefieldmode |
statslevel
Package statslevel
|
Package statslevel |
storagetype
Package storagetype
|
Package storagetype |
stringdistance
Package stringdistance
|
Package stringdistance |
suggestmode
Package suggestmode
|
Package suggestmode |
suggestsort
Package suggestsort
|
Package suggestsort |
synonymformat
Package synonymformat
|
Package synonymformat |
templateformat
Package templateformat
|
Package templateformat |
termsaggregationcollectmode
Package termsaggregationcollectmode
|
Package termsaggregationcollectmode |
termsaggregationexecutionhint
Package termsaggregationexecutionhint
|
Package termsaggregationexecutionhint |
termvectoroption
Package termvectoroption
|
Package termvectoroption |
textquerytype
Package textquerytype
|
Package textquerytype |
threadtype
Package threadtype
|
Package threadtype |
timeseriesmetrictype
Package timeseriesmetrictype
|
Package timeseriesmetrictype |
timeunit
Package timeunit
|
Package timeunit |
tokenchar
Package tokenchar
|
Package tokenchar |
tokenizationtruncate
Package tokenizationtruncate
|
Package tokenizationtruncate |
totalhitsrelation
Package totalhitsrelation
|
Package totalhitsrelation |
trainedmodeltype
Package trainedmodeltype
|
Package trainedmodeltype |
trainingpriority
Package trainingpriority
|
Package trainingpriority |
translogdurability
Package translogdurability
|
Package translogdurability |
ttesttype
Package ttesttype
|
Package ttesttype |
type_
Package type_
|
Package type_ |
unassignedinformationreason
Package unassignedinformationreason
|
Package unassignedinformationreason |
useragentproperty
Package useragentproperty
|
Package useragentproperty |
valuetype
Package valuetype
|
Package valuetype |
versiontype
Package versiontype
|
Package versiontype |
waitforactiveshardoptions
Package waitforactiveshardoptions
|
Package waitforactiveshardoptions |
waitforevents
Package waitforevents
|
Package waitforevents |
watchermetric
Package watchermetric
|
Package watchermetric |
watcherstate
Package watcherstate
|
Package watcherstate |
zerotermsquery
Package zerotermsquery
|
Package zerotermsquery |