Documentation ¶
Index ¶
- Constants
- func Load(data []byte) (builder.Query, error)
- type AnalysisType
- type Base
- func (b *Base) SetContext(context map[string]interface{}) *Base
- func (b *Base) SetDataSource(dataSource builder.DataSource) *Base
- func (b *Base) SetID(ID string) *Base
- func (b *Base) SetIntervals(intervals builder.Intervals) *Base
- func (b *Base) SetQueryType(queryType builder.ComponentType) *Base
- func (b *Base) Type() builder.ComponentType
- func (b *Base) UnmarshalJSON(data []byte) error
- type DataSourceMetadata
- type GroupBy
- func (g *GroupBy) SetAggregations(aggregations []builder.Aggregator) *GroupBy
- func (g *GroupBy) SetContext(context map[string]interface{}) *GroupBy
- func (g *GroupBy) SetDataSource(dataSource builder.DataSource) *GroupBy
- func (g *GroupBy) SetDimensions(dimensions []builder.Dimension) *GroupBy
- func (g *GroupBy) SetFilter(filter builder.Filter) *GroupBy
- func (g *GroupBy) SetGranularity(granularity builder.Granularity) *GroupBy
- func (g *GroupBy) SetHaving(having builder.HavingSpec) *GroupBy
- func (g *GroupBy) SetIntervals(intervals builder.Intervals) *GroupBy
- func (g *GroupBy) SetLimitSpec(limitSpec builder.LimitSpec) *GroupBy
- func (g *GroupBy) SetPostAggregations(postAggregations []builder.PostAggregator) *GroupBy
- func (g *GroupBy) SetSubtotalsSpec(subtotalsSpec [][]string) *GroupBy
- func (g *GroupBy) SetVirtualColumns(virtualColumns []builder.VirtualColumn) *GroupBy
- func (g *GroupBy) UnmarshalJSON(data []byte) error
- type Order
- type SQL
- type SQLParameter
- type Scan
- func (s *Scan) SetBatchSize(batchSize int64) *Scan
- func (s *Scan) SetColumns(columns []string) *Scan
- func (s *Scan) SetContext(context map[string]interface{}) *Scan
- func (s *Scan) SetDataSource(dataSource builder.DataSource) *Scan
- func (s *Scan) SetFilter(filter builder.Filter) *Scan
- func (s *Scan) SetIntervals(intervals builder.Intervals) *Scan
- func (s *Scan) SetLegacy(legacy bool) *Scan
- func (s *Scan) SetLimit(limit int64) *Scan
- func (s *Scan) SetOffset(offset int64) *Scan
- func (s *Scan) SetOrder(order Order) *Scan
- func (s *Scan) SetResultFormat(resultFormat string) *Scan
- func (s *Scan) SetVirtualColumns(virtualColumns []builder.VirtualColumn) *Scan
- func (s *Scan) UnmarshalJSON(data []byte) error
- type Search
- func (s *Search) SetContext(context map[string]interface{}) *Search
- func (s *Search) SetDataSource(dataSource builder.DataSource) *Search
- func (s *Search) SetFilter(filter builder.Filter) *Search
- func (s *Search) SetGranularity(granularity builder.Granularity) *Search
- func (s *Search) SetIntervals(intervals builder.Intervals) *Search
- func (s *Search) SetLimit(limit int64) *Search
- func (s *Search) SetQuery(q builder.SearchQuerySpec) *Search
- func (s *Search) SetSearchDimensions(searchDimensions []builder.Dimension) *Search
- func (s *Search) SetSort(sort *SearchSortSpec) *Search
- func (s *Search) UnmarshalJSON(data []byte) error
- type SearchSortSpec
- type SegmentMetadata
- func (s *SegmentMetadata) SetAnalysisTypes(analysisTypes []AnalysisType) *SegmentMetadata
- func (s *SegmentMetadata) SetContext(context map[string]interface{}) *SegmentMetadata
- func (s *SegmentMetadata) SetDataSource(dataSource builder.DataSource) *SegmentMetadata
- func (s *SegmentMetadata) SetIntervals(intervals builder.Intervals) *SegmentMetadata
- func (s *SegmentMetadata) SetLenientAggregatorMerge(lenientAggregatorMerge bool) *SegmentMetadata
- func (s *SegmentMetadata) SetMerge(merge bool) *SegmentMetadata
- func (s *SegmentMetadata) SetToInclude(toInclude builder.ToInclude) *SegmentMetadata
- func (s *SegmentMetadata) SetUsingDefaultInterval(usingDefaultInterval bool) *SegmentMetadata
- func (s *SegmentMetadata) UnmarshalJSON(data []byte) error
- type TimeBoundary
- func (t *TimeBoundary) SetBound(bound string) *TimeBoundary
- func (t *TimeBoundary) SetContext(context map[string]interface{}) *TimeBoundary
- func (t *TimeBoundary) SetDataSource(dataSource builder.DataSource) *TimeBoundary
- func (t *TimeBoundary) SetFilter(filter builder.Filter) *TimeBoundary
- func (t *TimeBoundary) SetIntervals(intervals builder.Intervals) *TimeBoundary
- func (t *TimeBoundary) UnmarshalJSON(data []byte) error
- type Timeseries
- func (t *Timeseries) SetAggregations(aggregations []builder.Aggregator) *Timeseries
- func (t *Timeseries) SetContext(context map[string]interface{}) *Timeseries
- func (t *Timeseries) SetDataSource(dataSource builder.DataSource) *Timeseries
- func (t *Timeseries) SetDescending(descending bool) *Timeseries
- func (t *Timeseries) SetFilter(filter builder.Filter) *Timeseries
- func (t *Timeseries) SetGranularity(granularity builder.Granularity) *Timeseries
- func (t *Timeseries) SetIntervals(intervals builder.Intervals) *Timeseries
- func (t *Timeseries) SetLimit(limit int64) *Timeseries
- func (t *Timeseries) SetPostAggregations(postAggregations []builder.PostAggregator) *Timeseries
- func (t *Timeseries) SetVirtualColumns(virtualColumns []builder.VirtualColumn) *Timeseries
- func (t *Timeseries) UnmarshalJSON(data []byte) error
- type TopN
- func (t *TopN) SetAggregations(aggregations []builder.Aggregator) *TopN
- func (t *TopN) SetContext(context map[string]interface{}) *TopN
- func (t *TopN) SetDataSource(dataSource builder.DataSource) *TopN
- func (t *TopN) SetDimension(dimension builder.Dimension) *TopN
- func (t *TopN) SetFilter(filter builder.Filter) *TopN
- func (t *TopN) SetGranularity(granularity builder.Granularity) *TopN
- func (t *TopN) SetIntervals(intervals builder.Intervals) *TopN
- func (t *TopN) SetMetric(metric builder.TopNMetric) *TopN
- func (t *TopN) SetPostAggregations(postAggregations []builder.PostAggregator) *TopN
- func (t *TopN) SetThreshold(threshold int64) *TopN
- func (t *TopN) SetVirtualColumns(virtualColumns []builder.VirtualColumn) *TopN
- func (t *TopN) UnmarshalJSON(data []byte) error
Constants ¶
View Source
const ( Ascending Order = "ASCENDING" Descending = "DESCENDING" None = "NONE" )
View Source
const ( Cardinality AnalysisType = "CARDINALITY" Size = "SIZE" Interval = "INTERVAL" Aggregators = "AGGREGATORS" MinMax = "MINMAX" TimestampSpec = "TIMESTAMPSPEC" QueryGranularity = "QUERYGRANULARITY" Rollup = "ROLLUP" )
Variables ¶
This section is empty.
Functions ¶
Types ¶
type AnalysisType ¶
type AnalysisType string
type Base ¶
type Base struct { ID string `json:"ID,omitempty"` QueryType builder.ComponentType `json:"queryType,omitempty"` DataSource builder.DataSource `json:"dataSource,omitempty"` Intervals builder.Intervals `json:"intervals,omitempty"` Context map[string]interface{} `json:"context,omitempty"` }
func (*Base) SetContext ¶
func (*Base) SetDataSource ¶
func (b *Base) SetDataSource(dataSource builder.DataSource) *Base
func (*Base) SetQueryType ¶
func (b *Base) SetQueryType(queryType builder.ComponentType) *Base
func (*Base) Type ¶
func (b *Base) Type() builder.ComponentType
func (*Base) UnmarshalJSON ¶
type DataSourceMetadata ¶
type DataSourceMetadata struct {
Base
}
func NewDataSourceMetadata ¶
func NewDataSourceMetadata() *DataSourceMetadata
func (*DataSourceMetadata) SetContext ¶
func (d *DataSourceMetadata) SetContext(context map[string]interface{}) *DataSourceMetadata
func (*DataSourceMetadata) SetDataSource ¶
func (d *DataSourceMetadata) SetDataSource(dataSource builder.DataSource) *DataSourceMetadata
func (*DataSourceMetadata) SetIntervals ¶
func (d *DataSourceMetadata) SetIntervals(intervals builder.Intervals) *DataSourceMetadata
type GroupBy ¶
type GroupBy struct { Base Dimensions []builder.Dimension `json:"dimensions,omitempty"` VirtualColumns []builder.VirtualColumn `json:"virtualColumns,omitempty"` Filter builder.Filter `json:"filter,omitempty"` Granularity builder.Granularity `json:"granularity,omitempty"` Aggregations []builder.Aggregator `json:"aggregations,omitempty"` PostAggregations []builder.PostAggregator `json:"postAggregations,omitempty"` Having builder.HavingSpec `json:"having,omitempty"` LimitSpec builder.LimitSpec `json:"limitSpec,omitempty"` SubtotalsSpec [][]string `json:"subtotalsSpec,omitempty"` }
func NewGroupBy ¶
func NewGroupBy() *GroupBy
func (*GroupBy) SetAggregations ¶
func (g *GroupBy) SetAggregations(aggregations []builder.Aggregator) *GroupBy
func (*GroupBy) SetContext ¶
func (*GroupBy) SetDataSource ¶
func (g *GroupBy) SetDataSource(dataSource builder.DataSource) *GroupBy
func (*GroupBy) SetDimensions ¶
func (*GroupBy) SetGranularity ¶
func (g *GroupBy) SetGranularity(granularity builder.Granularity) *GroupBy
func (*GroupBy) SetPostAggregations ¶
func (g *GroupBy) SetPostAggregations(postAggregations []builder.PostAggregator) *GroupBy
func (*GroupBy) SetSubtotalsSpec ¶
func (*GroupBy) SetVirtualColumns ¶
func (g *GroupBy) SetVirtualColumns(virtualColumns []builder.VirtualColumn) *GroupBy
func (*GroupBy) UnmarshalJSON ¶
type SQL ¶
type SQL struct { Base Query string `json:"query,omitempty"` ResultFormat string `json:"resultFormat,omitempty"` Header bool `json:"header,omitempty"` Parameters []SQLParameter `json:"parameters,omitempty"` }
func (*SQL) SetParameters ¶
func (s *SQL) SetParameters(parameters []SQLParameter) *SQL
func (*SQL) SetResultFormat ¶
func (*SQL) UnmarshalJSON ¶
type SQLParameter ¶
type SQLParameter struct { Type string `json:"type,omitempty"` Value string `json:"value,omitempty"` }
func NewSQLParameter ¶
func NewSQLParameter() *SQLParameter
type Scan ¶
type Scan struct { Base VirtualColumns []builder.VirtualColumn `json:"virtualColumns,omitempty"` ResultFormat string `json:"resultFormat,omitempty"` BatchSize int64 `json:"batchSize,omitempty"` Limit int64 `json:"limit,omitempty"` Offset int64 `json:"offset,omitempty"` Order Order `json:"order,omitempty"` Filter builder.Filter `json:"filter,omitempty"` Columns []string `json:"columns,omitempty"` Legacy bool `json:"legacy,omitempty"` }
Scan query returns raw Apache Druid rows in streaming mode. https://druid.apache.org/docs/latest/querying/scan-query.html
func NewScan ¶
func NewScan() *Scan
NewScan returns *builder.Scan which can be used to build a scan query. Eg,
table := datasource.NewTable().SetName("table-name") now := time.Now() i := intervals.NewInterval().SetInterval(now.Add(-60*time.Minute), now) is := intervals.NewIntervals().SetIntervals([]*intervals.Interval{i}) filter1 := filter.NewSelector().SetDimension("key1").SetValue("val1") filter2 := filter.NewSelector().SetDimension("key2").SetValue("val2") filters := filter.NewAnd().SetFields([]builder.Filter{filter1, filter2}) ts := query.NewScan().SetDataSource(table).SetIntervals(is).SetFilter(filters).SetResultFormat("compactedList").SetLimit(10)
func (*Scan) SetBatchSize ¶
SetBatchSize sets the batch size.
func (*Scan) SetContext ¶
SetContext sets the context.
func (*Scan) SetDataSource ¶
func (s *Scan) SetDataSource(dataSource builder.DataSource) *Scan
SetDataSource sets data source.
func (*Scan) SetIntervals ¶
SetIntervals set the intervals.
func (*Scan) SetResultFormat ¶
SetResultFormat sets the result format.
func (*Scan) SetVirtualColumns ¶
func (s *Scan) SetVirtualColumns(virtualColumns []builder.VirtualColumn) *Scan
SetVirtualColumns sets virtual columns.
func (*Scan) UnmarshalJSON ¶
UnmarshalJSON unmarshalls a druid scan native query json string into builder type.
type Search ¶
type Search struct { Base Filter builder.Filter `json:"filter,omitempty"` Granularity builder.Granularity `json:"granularity,omitempty"` Limit int64 `json:"limit,omitempty"` SearchDimensions []builder.Dimension `json:"searchDimensions,omitempty"` Query builder.SearchQuerySpec `json:"query,omitempty"` Sort *SearchSortSpec `json:"sort,omitempty"` }
func (*Search) SetContext ¶
func (*Search) SetDataSource ¶
func (s *Search) SetDataSource(dataSource builder.DataSource) *Search
func (*Search) SetGranularity ¶
func (s *Search) SetGranularity(granularity builder.Granularity) *Search
func (*Search) SetSearchDimensions ¶
func (*Search) SetSort ¶
func (s *Search) SetSort(sort *SearchSortSpec) *Search
func (*Search) UnmarshalJSON ¶
type SearchSortSpec ¶
type SearchSortSpec struct {
Type types.StringComparator `json:"type,omitempty"`
}
type SegmentMetadata ¶
type SegmentMetadata struct { Base ToInclude builder.ToInclude `json:"toInclude,omitempty"` Merge bool `json:"merge,omitempty"` AnalysisTypes []AnalysisType `json:"analysisTypes,omitempty"` UsingDefaultInterval bool `json:"usingDefaultInterval,omitempty"` LenientAggregatorMerge bool `json:"lenientAggregatorMerge,omitempty"` }
func NewSegmentMetadata ¶
func NewSegmentMetadata() *SegmentMetadata
func (*SegmentMetadata) SetAnalysisTypes ¶
func (s *SegmentMetadata) SetAnalysisTypes(analysisTypes []AnalysisType) *SegmentMetadata
func (*SegmentMetadata) SetContext ¶
func (s *SegmentMetadata) SetContext(context map[string]interface{}) *SegmentMetadata
func (*SegmentMetadata) SetDataSource ¶
func (s *SegmentMetadata) SetDataSource(dataSource builder.DataSource) *SegmentMetadata
func (*SegmentMetadata) SetIntervals ¶
func (s *SegmentMetadata) SetIntervals(intervals builder.Intervals) *SegmentMetadata
func (*SegmentMetadata) SetLenientAggregatorMerge ¶
func (s *SegmentMetadata) SetLenientAggregatorMerge(lenientAggregatorMerge bool) *SegmentMetadata
func (*SegmentMetadata) SetMerge ¶
func (s *SegmentMetadata) SetMerge(merge bool) *SegmentMetadata
func (*SegmentMetadata) SetToInclude ¶
func (s *SegmentMetadata) SetToInclude(toInclude builder.ToInclude) *SegmentMetadata
func (*SegmentMetadata) SetUsingDefaultInterval ¶
func (s *SegmentMetadata) SetUsingDefaultInterval(usingDefaultInterval bool) *SegmentMetadata
func (*SegmentMetadata) UnmarshalJSON ¶
func (s *SegmentMetadata) UnmarshalJSON(data []byte) error
type TimeBoundary ¶
type TimeBoundary struct { Base Bound string `json:"bound,omitempty"` Filter builder.Filter `json:"filter,omitempty"` }
func NewTimeBoundary ¶
func NewTimeBoundary() *TimeBoundary
func (*TimeBoundary) SetBound ¶
func (t *TimeBoundary) SetBound(bound string) *TimeBoundary
func (*TimeBoundary) SetContext ¶
func (t *TimeBoundary) SetContext(context map[string]interface{}) *TimeBoundary
func (*TimeBoundary) SetDataSource ¶
func (t *TimeBoundary) SetDataSource(dataSource builder.DataSource) *TimeBoundary
func (*TimeBoundary) SetFilter ¶
func (t *TimeBoundary) SetFilter(filter builder.Filter) *TimeBoundary
func (*TimeBoundary) SetIntervals ¶
func (t *TimeBoundary) SetIntervals(intervals builder.Intervals) *TimeBoundary
func (*TimeBoundary) UnmarshalJSON ¶
func (t *TimeBoundary) UnmarshalJSON(data []byte) error
type Timeseries ¶
type Timeseries struct { Base Descending bool `json:"descending,omitempty"` VirtualColumns []builder.VirtualColumn `json:"virtualColumns,omitempty"` Filter builder.Filter `json:"filter,omitempty"` Granularity builder.Granularity `json:"granularity,omitempty"` Aggregations []builder.Aggregator `json:"aggregations,omitempty"` PostAggregations []builder.PostAggregator `json:"postAggregations,omitempty"` Limit int64 `json:"limit,omitempty"` }
func NewTimeseries ¶
func NewTimeseries() *Timeseries
func (*Timeseries) SetAggregations ¶
func (t *Timeseries) SetAggregations(aggregations []builder.Aggregator) *Timeseries
func (*Timeseries) SetContext ¶
func (t *Timeseries) SetContext(context map[string]interface{}) *Timeseries
func (*Timeseries) SetDataSource ¶
func (t *Timeseries) SetDataSource(dataSource builder.DataSource) *Timeseries
func (*Timeseries) SetDescending ¶
func (t *Timeseries) SetDescending(descending bool) *Timeseries
func (*Timeseries) SetFilter ¶
func (t *Timeseries) SetFilter(filter builder.Filter) *Timeseries
func (*Timeseries) SetGranularity ¶
func (t *Timeseries) SetGranularity(granularity builder.Granularity) *Timeseries
func (*Timeseries) SetIntervals ¶
func (t *Timeseries) SetIntervals(intervals builder.Intervals) *Timeseries
func (*Timeseries) SetLimit ¶
func (t *Timeseries) SetLimit(limit int64) *Timeseries
func (*Timeseries) SetPostAggregations ¶
func (t *Timeseries) SetPostAggregations(postAggregations []builder.PostAggregator) *Timeseries
func (*Timeseries) SetVirtualColumns ¶
func (t *Timeseries) SetVirtualColumns(virtualColumns []builder.VirtualColumn) *Timeseries
func (*Timeseries) UnmarshalJSON ¶
func (t *Timeseries) UnmarshalJSON(data []byte) error
type TopN ¶
type TopN struct { Base VirtualColumns []builder.VirtualColumn `json:"virtualColumns,omitempty"` Dimension builder.Dimension `json:"dimension,omitempty"` Metric builder.TopNMetric `json:"metric,omitempty"` Threshold int64 `json:"threshold,omitempty"` Filter builder.Filter `json:"filter,omitempty"` Granularity builder.Granularity `json:"granularity,omitempty"` Aggregations []builder.Aggregator `json:"aggregations,omitempty"` PostAggregations []builder.PostAggregator `json:"postAggregations,omitempty"` }
func (*TopN) SetAggregations ¶
func (t *TopN) SetAggregations(aggregations []builder.Aggregator) *TopN
func (*TopN) SetContext ¶
func (*TopN) SetDataSource ¶
func (t *TopN) SetDataSource(dataSource builder.DataSource) *TopN
func (*TopN) SetGranularity ¶
func (t *TopN) SetGranularity(granularity builder.Granularity) *TopN
func (*TopN) SetPostAggregations ¶
func (t *TopN) SetPostAggregations(postAggregations []builder.PostAggregator) *TopN
func (*TopN) SetThreshold ¶
func (*TopN) SetVirtualColumns ¶
func (t *TopN) SetVirtualColumns(virtualColumns []builder.VirtualColumn) *TopN
func (*TopN) UnmarshalJSON ¶
Click to show internal directories.
Click to hide internal directories.