Documentation ¶
Overview ¶
Package transformations contains the implementations for the builtin transformation functions.
Index ¶
- Constants
- Variables
- func MakeContainsFunc() values.Function
- func MakeLengthFunc() values.Function
- func NewChandeMomentumOscillatorTransformation(d execute.Dataset, cache execute.TableBuilderCache, ...) *chandeMomentumOscillatorTransformation
- func NewColumnsTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *ColumnsProcedureSpec) *columnsTransformation
- func NewCumulativeSumTransformation(d execute.Dataset, cache execute.TableBuilderCache, ...) *cumulativeSumTransformation
- func NewDerivativeTransformation(d execute.Dataset, cache execute.TableBuilderCache, ...) *derivativeTransformation
- func NewDifferenceTransformation(d execute.Dataset, cache execute.TableBuilderCache, ...) *differenceTransformation
- func NewDistinctTransformation(d execute.Dataset, cache execute.TableBuilderCache, ...) *distinctTransformation
- func NewElapsedTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *ElapsedProcedureSpec) *elapsedTransformation
- func NewExponentialMovingAverageTransformation(d execute.Dataset, cache execute.TableBuilderCache, ...) *exponentialMovingAverageTransformation
- func NewFillTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *FillProcedureSpec) *fillTransformation
- func NewFilterTransformation(ctx context.Context, spec *FilterProcedureSpec, id execute.DatasetID, ...) (execute.Transformation, execute.Dataset, error)
- func NewFixedWindowTransformation(d execute.Dataset, cache execute.TableBuilderCache, bounds execute.Bounds, ...) execute.Transformation
- func NewGetColumnFunction() values.Value
- func NewGetRecordFunction() values.Value
- func NewGroupTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *GroupProcedureSpec) *groupTransformation
- func NewHistogramTransformation(d execute.Dataset, cache execute.TableBuilderCache, ...) *histogramTransformation
- func NewHistorgramQuantileTransformation(d execute.Dataset, cache execute.TableBuilderCache, ...) execute.Transformation
- func NewHoltWintersTransformation(d execute.Dataset, cache execute.TableBuilderCache, ...) *holtWintersTransformation
- func NewHourSelectionTransformation(d execute.Dataset, cache execute.TableBuilderCache, ...) *hourSelectionTransformation
- func NewIntegralTransformation(d execute.Dataset, cache execute.TableBuilderCache, ...) *integralTransformation
- func NewKeyValuesTransformation(d execute.Dataset, cache execute.TableBuilderCache, ...) *keyValuesTransformation
- func NewKeysTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *KeysProcedureSpec) *keysTransformation
- func NewLimitTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *LimitProcedureSpec) *limitTransformation
- func NewMapTransformation(ctx context.Context, spec *MapProcedureSpec, d execute.Dataset, ...) (*mapTransformation, error)
- func NewMergeJoinTransformation(d execute.Dataset, cache *MergeJoinCache, spec *MergeJoinProcedureSpec, ...) *mergeJoinTransformation
- func NewModeTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *ModeProcedureSpec) *modeTransformation
- func NewMovingAverageTransformation(d execute.Dataset, cache execute.TableBuilderCache, ...) *movingAverageTransformation
- func NewPivotTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *PivotProcedureSpec) *pivotTransformation
- func NewRangeTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *RangeProcedureSpec, ...) (*rangeTransformation, error)
- func NewReduceTransformation(ctx context.Context, spec *ReduceProcedureSpec, d execute.Dataset, ...) (*reduceTransformation, error)
- func NewRelativeStrengthIndexTransformation(d execute.Dataset, cache execute.TableBuilderCache, ...) *relativeStrengthIndexTransformation
- func NewSchemaMutationTransformation(ctx context.Context, spec plan.ProcedureSpec, d execute.Dataset, ...) (*schemaMutationTransformation, error)
- func NewSetTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *SetProcedureSpec) execute.Transformation
- func NewShiftTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *ShiftProcedureSpec) *shiftTransformation
- func NewSortTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *SortProcedureSpec) *sortTransformation
- func NewStateTrackingTransformation(ctx context.Context, spec *StateTrackingProcedureSpec, d execute.Dataset, ...) (*stateTrackingTransformation, error)
- func NewTableFindFunction() values.Value
- func NewTailTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *TailProcedureSpec) *tailTransformation
- func NewTripleExponentialDerivativeTransformation(d execute.Dataset, cache execute.TableBuilderCache, alloc *memory.Allocator, ...) *tripleExponentialDerivativeTransformation
- func NewUnionTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *UnionProcedureSpec, ...) *unionTransformation
- func NewUniqueTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *UniqueProcedureSpec) *uniqueTransformation
- func NewkamaTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *KamaProcedureSpec) *kamaTransformation
- type BuilderContext
- type ChandeMomentumOscillatorOpSpec
- type ChandeMomentumOscillatorProcedureSpec
- type ColumnsOpSpec
- type ColumnsProcedureSpec
- type CountAgg
- func (a *CountAgg) DoBool(vs *array.Boolean)
- func (a *CountAgg) DoFloat(vs *array.Float64)
- func (a *CountAgg) DoInt(vs *array.Int64)
- func (a *CountAgg) DoString(vs *array.Binary)
- func (a *CountAgg) DoUInt(vs *array.Uint64)
- func (a *CountAgg) IsNull() bool
- func (a *CountAgg) NewBoolAgg() execute.DoBoolAgg
- func (a *CountAgg) NewFloatAgg() execute.DoFloatAgg
- func (a *CountAgg) NewIntAgg() execute.DoIntAgg
- func (a *CountAgg) NewStringAgg() execute.DoStringAgg
- func (a *CountAgg) NewUIntAgg() execute.DoUIntAgg
- func (a *CountAgg) Type() flux.ColType
- func (a *CountAgg) ValueInt() int64
- type CountOpSpec
- type CountProcedureSpec
- type CovarianceOpSpec
- type CovarianceProcedureSpec
- type CovarianceTransformation
- func (t *CovarianceTransformation) DoFloat(xs, ys *array.Float64)
- func (t *CovarianceTransformation) Finish(id execute.DatasetID, err error)
- func (t *CovarianceTransformation) Process(id execute.DatasetID, tbl flux.Table) error
- func (t *CovarianceTransformation) RetractTable(id execute.DatasetID, key flux.GroupKey) error
- func (t *CovarianceTransformation) UpdateProcessingTime(id execute.DatasetID, pt execute.Time) error
- func (t *CovarianceTransformation) UpdateWatermark(id execute.DatasetID, mark execute.Time) error
- type CumulativeSumOpSpec
- type CumulativeSumProcedureSpec
- type DerivativeOpSpec
- type DerivativeProcedureSpec
- type DifferenceOpSpec
- type DifferenceProcedureSpec
- type DistinctOpSpec
- type DistinctProcedureSpec
- type DropKeepMutator
- type DropOpSpec
- type DuplicateMutator
- type DuplicateOpSpec
- type ElapsedOpSpec
- type ElapsedProcedureSpec
- type ExactQuantileAgg
- func (a *ExactQuantileAgg) Copy() *ExactQuantileAgg
- func (a *ExactQuantileAgg) DoFloat(vs *array.Float64)
- func (a *ExactQuantileAgg) IsNull() bool
- func (a *ExactQuantileAgg) NewBoolAgg() execute.DoBoolAgg
- func (a *ExactQuantileAgg) NewFloatAgg() execute.DoFloatAgg
- func (a *ExactQuantileAgg) NewIntAgg() execute.DoIntAgg
- func (a *ExactQuantileAgg) NewStringAgg() execute.DoStringAgg
- func (a *ExactQuantileAgg) NewUIntAgg() execute.DoUIntAgg
- func (a *ExactQuantileAgg) Type() flux.ColType
- func (a *ExactQuantileAgg) ValueFloat() float64
- type ExactQuantileAggProcedureSpec
- type ExactQuantileSelectProcedureSpec
- type ExactQuantileSelectorTransformation
- func (t *ExactQuantileSelectorTransformation) Finish(id execute.DatasetID, err error)
- func (t *ExactQuantileSelectorTransformation) Process(id execute.DatasetID, tbl flux.Table) error
- func (t *ExactQuantileSelectorTransformation) RetractTable(id execute.DatasetID, key flux.GroupKey) error
- func (t *ExactQuantileSelectorTransformation) UpdateProcessingTime(id execute.DatasetID, pt execute.Time) error
- func (t *ExactQuantileSelectorTransformation) UpdateWatermark(id execute.DatasetID, mark execute.Time) error
- type ExponentialMovingAverageOpSpec
- type ExponentialMovingAverageProcedureSpec
- type FillOpSpec
- type FillProcedureSpec
- type FilterOpSpec
- type FilterProcedureSpec
- type FirstOpSpec
- type FirstProcedureSpec
- type FirstSelector
- func (s *FirstSelector) DoBool(vs *array.Boolean) []int
- func (s *FirstSelector) DoFloat(vs *array.Float64) []int
- func (s *FirstSelector) DoInt(vs *array.Int64) []int
- func (s *FirstSelector) DoString(vs *array.Binary) []int
- func (s *FirstSelector) DoTime(vs *array.Int64) []int
- func (s *FirstSelector) DoUInt(vs *array.Uint64) []int
- func (s *FirstSelector) NewBoolSelector() execute.DoBoolIndexSelector
- func (s *FirstSelector) NewFloatSelector() execute.DoFloatIndexSelector
- func (s *FirstSelector) NewIntSelector() execute.DoIntIndexSelector
- func (s *FirstSelector) NewStringSelector() execute.DoStringIndexSelector
- func (s *FirstSelector) NewTimeSelector() execute.DoTimeIndexSelector
- func (s *FirstSelector) NewUIntSelector() execute.DoUIntIndexSelector
- type GroupOpSpec
- type GroupProcedureSpec
- type HistogramOpSpec
- type HistogramProcedureSpec
- type HistogramQuantileOpSpec
- type HistogramQuantileProcedureSpec
- type HoltWintersOpSpec
- type HoltWintersProcedureSpec
- type HourSelectionOpSpec
- type HourSelectionProcedureSpec
- type IntegralOpSpec
- type IntegralProcedureSpec
- type JoinOpSpec
- type KamaOpSpec
- type KamaProcedureSpec
- type KeepOpSpec
- type KeyValuesOpSpec
- type KeyValuesProcedureSpec
- type KeysOpSpec
- type KeysProcedureSpec
- type LastOpSpec
- type LastProcedureSpec
- type LastSelector
- func (s *LastSelector) DoBool(vs *array.Boolean, cr flux.ColReader)
- func (s *LastSelector) DoFloat(vs *array.Float64, cr flux.ColReader)
- func (s *LastSelector) DoInt(vs *array.Int64, cr flux.ColReader)
- func (s *LastSelector) DoString(vs *array.Binary, cr flux.ColReader)
- func (s *LastSelector) DoTime(vs *array.Int64, cr flux.ColReader)
- func (s *LastSelector) DoUInt(vs *array.Uint64, cr flux.ColReader)
- func (s *LastSelector) NewBoolSelector() execute.DoBoolRowSelector
- func (s *LastSelector) NewFloatSelector() execute.DoFloatRowSelector
- func (s *LastSelector) NewIntSelector() execute.DoIntRowSelector
- func (s *LastSelector) NewStringSelector() execute.DoStringRowSelector
- func (s *LastSelector) NewTimeSelector() execute.DoTimeRowSelector
- func (s *LastSelector) NewUIntSelector() execute.DoUIntRowSelector
- func (s *LastSelector) Rows() []execute.Row
- type LimitOpSpec
- type LimitProcedureSpec
- type MapOpSpec
- type MapProcedureSpec
- type MaxFloatSelector
- type MaxIntSelector
- type MaxOpSpec
- type MaxProcedureSpec
- type MaxSelector
- func (s *MaxSelector) NewBoolSelector() execute.DoBoolRowSelector
- func (s *MaxSelector) NewFloatSelector() execute.DoFloatRowSelector
- func (s *MaxSelector) NewIntSelector() execute.DoIntRowSelector
- func (s *MaxSelector) NewStringSelector() execute.DoStringRowSelector
- func (s *MaxSelector) NewTimeSelector() execute.DoTimeRowSelector
- func (s *MaxSelector) NewUIntSelector() execute.DoUIntRowSelector
- func (s *MaxSelector) Rows() []execute.Row
- type MaxTimeSelector
- type MaxUIntSelector
- type MeanAgg
- func (a *MeanAgg) DoFloat(vs *array.Float64)
- func (a *MeanAgg) DoInt(vs *array.Int64)
- func (a *MeanAgg) DoUInt(vs *array.Uint64)
- func (a *MeanAgg) IsNull() bool
- func (a *MeanAgg) NewBoolAgg() execute.DoBoolAgg
- func (a *MeanAgg) NewFloatAgg() execute.DoFloatAgg
- func (a *MeanAgg) NewIntAgg() execute.DoIntAgg
- func (a *MeanAgg) NewStringAgg() execute.DoStringAgg
- func (a *MeanAgg) NewUIntAgg() execute.DoUIntAgg
- func (a *MeanAgg) Type() flux.ColType
- func (a *MeanAgg) ValueFloat() float64
- type MeanOpSpec
- type MeanProcedureSpec
- type MergeGroupRule
- type MergeJoinCache
- func (c *MergeJoinCache) DiscardTable(key flux.GroupKey)
- func (c *MergeJoinCache) ExpireTable(key flux.GroupKey)
- func (c *MergeJoinCache) ForEach(f func(flux.GroupKey))
- func (c *MergeJoinCache) ForEachWithContext(f func(flux.GroupKey, execute.Trigger, execute.TableContext))
- func (c *MergeJoinCache) SetTriggerSpec(spec plan.TriggerSpec)
- func (c *MergeJoinCache) Table(key flux.GroupKey) (flux.Table, error)
- type MergeJoinProcedureSpec
- type MinFloatSelector
- type MinIntSelector
- type MinOpSpec
- type MinProcedureSpec
- type MinSelector
- func (s *MinSelector) NewBoolSelector() execute.DoBoolRowSelector
- func (s *MinSelector) NewFloatSelector() execute.DoFloatRowSelector
- func (s *MinSelector) NewIntSelector() execute.DoIntRowSelector
- func (s *MinSelector) NewStringSelector() execute.DoStringRowSelector
- func (s *MinSelector) NewTimeSelector() execute.DoTimeRowSelector
- func (s *MinSelector) NewUIntSelector() execute.DoUIntRowSelector
- func (s *MinSelector) Rows() []execute.Row
- type MinTimeSelector
- type MinUIntSelector
- type ModeOpSpec
- type ModeProcedureSpec
- type MovingAverageOpSpec
- type MovingAverageProcedureSpec
- type MutationRegistrar
- type PivotOpSpec
- type PivotProcedureSpec
- type QuantileAgg
- func (a *QuantileAgg) Copy() *QuantileAgg
- func (a *QuantileAgg) DoFloat(vs *array.Float64)
- func (a *QuantileAgg) IsNull() bool
- func (a *QuantileAgg) NewBoolAgg() execute.DoBoolAgg
- func (a *QuantileAgg) NewFloatAgg() execute.DoFloatAgg
- func (a *QuantileAgg) NewIntAgg() execute.DoIntAgg
- func (a *QuantileAgg) NewStringAgg() execute.DoStringAgg
- func (a *QuantileAgg) NewUIntAgg() execute.DoUIntAgg
- func (a *QuantileAgg) Type() flux.ColType
- func (a *QuantileAgg) ValueFloat() float64
- type QuantileOpSpec
- type RangeOpSpec
- type RangeProcedureSpec
- type ReduceOpSpec
- type ReduceProcedureSpec
- type RelativeStrengthIndexOpSpec
- type RelativeStrengthIndexProcedureSpec
- type RemoveTrivialFilterRule
- type RenameMutator
- type RenameOpSpec
- type SampleOpSpec
- type SampleProcedureSpec
- type SampleSelector
- func (s *SampleSelector) DoBool(vs *array.Boolean) []int
- func (s *SampleSelector) DoFloat(vs *array.Float64) []int
- func (s *SampleSelector) DoInt(vs *array.Int64) []int
- func (s *SampleSelector) DoString(vs *array.Binary) []int
- func (s *SampleSelector) DoTime(vs *array.Int64) []int
- func (s *SampleSelector) DoUInt(vs *array.Uint64) []int
- func (s *SampleSelector) NewBoolSelector() execute.DoBoolIndexSelector
- func (s *SampleSelector) NewFloatSelector() execute.DoFloatIndexSelector
- func (s *SampleSelector) NewIntSelector() execute.DoIntIndexSelector
- func (s *SampleSelector) NewStringSelector() execute.DoStringIndexSelector
- func (s *SampleSelector) NewTimeSelector() execute.DoTimeIndexSelector
- func (s *SampleSelector) NewUIntSelector() execute.DoUIntIndexSelector
- type SchemaMutation
- type SchemaMutationProcedureSpec
- type SchemaMutator
- type SetOpSpec
- type SetProcedureSpec
- type ShiftOpSpec
- type ShiftProcedureSpec
- type SkewAgg
- func (a *SkewAgg) DoFloat(vs *array.Float64)
- func (a *SkewAgg) DoInt(vs *array.Int64)
- func (a *SkewAgg) DoUInt(vs *array.Uint64)
- func (a *SkewAgg) IsNull() bool
- func (a *SkewAgg) NewBoolAgg() execute.DoBoolAgg
- func (a *SkewAgg) NewFloatAgg() execute.DoFloatAgg
- func (a *SkewAgg) NewIntAgg() execute.DoIntAgg
- func (a *SkewAgg) NewStringAgg() execute.DoStringAgg
- func (a *SkewAgg) NewUIntAgg() execute.DoUIntAgg
- func (a *SkewAgg) Type() flux.ColType
- func (a *SkewAgg) ValueFloat() float64
- type SkewOpSpec
- type SkewProcedureSpec
- type SortOpSpec
- type SortProcedureSpec
- type SpreadAgg
- type SpreadFloatAgg
- type SpreadIntAgg
- type SpreadOpSpec
- type SpreadProcedureSpec
- type SpreadUIntAgg
- type StateTrackingOpSpec
- type StateTrackingProcedureSpec
- type StddevAgg
- func (a *StddevAgg) DoFloat(vs *array.Float64)
- func (a *StddevAgg) DoInt(vs *array.Int64)
- func (a *StddevAgg) DoUInt(vs *array.Uint64)
- func (a *StddevAgg) IsNull() bool
- func (a *StddevAgg) NewBoolAgg() execute.DoBoolAgg
- func (a *StddevAgg) NewFloatAgg() execute.DoFloatAgg
- func (a *StddevAgg) NewIntAgg() execute.DoIntAgg
- func (a *StddevAgg) NewStringAgg() execute.DoStringAgg
- func (a *StddevAgg) NewUIntAgg() execute.DoUIntAgg
- func (a *StddevAgg) Type() flux.ColType
- func (a *StddevAgg) ValueFloat() float64
- type StddevOpSpec
- type StddevProcedureSpec
- type SumAgg
- type SumFloatAgg
- type SumIntAgg
- type SumOpSpec
- type SumProcedureSpec
- type SumUIntAgg
- type TDigestQuantileProcedureSpec
- type TailOpSpec
- type TailProcedureSpec
- type TripleExponentialDerivativeOpSpec
- type TripleExponentialDerivativeProcedureSpec
- type UnionOpSpec
- type UnionProcedureSpec
- type UniqueOpSpec
- type UniqueProcedureSpec
- type WindowOpSpec
- type WindowProcedureSpec
- type WindowTriggerPhysicalRule
- type YieldOpSpec
- type YieldProcedureSpec
Constants ¶
const ChandeMomentumOscillatorKind = "chandeMomentumOscillator"
const ColumnsKind = "columns"
const CountKind = "count"
const CovarianceKind = "covariance"
const CumulativeSumKind = "cumulativeSum"
const DefaultUpperBoundColumnLabel = "le"
const DerivativeKind = "derivative"
const DifferenceKind = "difference"
const DistinctKind = "distinct"
const DropKind = "drop"
const DuplicateKind = "duplicate"
const ElapsedKind = "elapsed"
const ExactQuantileAggKind = "exact-quantile-aggregate"
const ExactQuantileSelectKind = "exact-quantile-selector"
const ExponentialMovingAverageKind = "exponentialMovingAverage"
const FillKind = "fill"
const FilterKind = "filter"
const FirstKind = "first"
const GroupKind = "group"
const HistogramKind = "histogram"
const HistogramQuantileKind = "histogramQuantile"
const HoltWintersKind = "holtWinters"
const HourSelectionKind = "hourSelection"
const IntegralKind = "integral"
const JoinKind = "join"
const KeepKind = "keep"
const KeyValuesKind = "keyValues"
const KeysKind = "keys"
const LastKind = "last"
const LimitKind = "limit"
const MapKind = "map"
const MaxKind = "max"
const MeanKind = "mean"
const MergeJoinKind = "merge-join"
const MinKind = "min"
const ModeKind = "mode"
const MovingAverageKind = "movingAverage"
const (
PivotKind = "pivot"
)
const QuantileKind = "quantile"
const RangeKind = "range"
const ReduceKind = "reduce"
const RelativeStrengthIndexKind = "relativeStrengthIndex"
const RenameKind = "rename"
const SampleKind = "sample"
const SchemaMutationKind = "SchemaMutation"
The base kind for SchemaMutations
const SetKind = "set"
const ShiftKind = "timeShift"
const SkewKind = "skew"
const SortKind = "sort"
const SpreadKind = "spread"
SpreadKind is the registration name for Flux, query, plan, and execution.
const StateTrackingKind = "stateTracking"
const (
StddevKind = "stddev"
)
const SumKind = "sum"
const TailKind = "tail"
const TripleExponentialDerivativeKind = "tripleExponentialDerivative"
const UnionKind = "union"
const UniqueKind = "unique"
const WindowKind = "window"
const YieldKind = "yield"
Variables ¶
var FluxTestPackages = []*ast.Package{&ast.Package{ BaseNode: ast.BaseNode{ Errors: nil, Loc: nil, }, Files: []*ast.File{&ast.File{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 112, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "package universe_test\n\nimport \"testing\"\n\noption now = () => (2030-01-01T00:00:00Z)\n\ninData = \"\n#datatype,string,long,dateTime:RFC3339,long,string,string,long\n#group,false,false,false,false,true,true,true\n#default,_result,,,,,,\n,result,table,_time,_value,_field,_measurement,status\n,_result,0,2019-09-12T20:00:00Z,510,resp_bytes,http_request,400\n\n#datatype,string,long,dateTime:RFC3339,long,string,string,string,long\n#group,false,false,false,false,true,true,true,true\n#default,_result,,,,,,,\n,result,table,_time,_value,_field,_measurement,org_id,status\n,_result,1,2019-09-12T20:00:00Z,180654,resp_bytes,http_request,03b60149f6fb9000,200\n,_result,2,2019-09-12T20:00:00Z,144,resp_bytes,http_request,03b60149f6fb9000,500\n,_result,3,2019-09-12T20:00:00Z,152,resp_bytes,http_request,10603619cf2e665b,500\n\n#datatype,string,long,dateTime:RFC3339,long,string,string,string,long\n#group,false,false,false,false,true,true,true,true\n#default,_result,,,,,,,\n,result,table,_time,_value,_field,_measurement,org_id,status\n,_result,0,2019-09-12T20:00:00Z,9654177,req_bytes,http_request,039e9eb802d3f000,204\n,_result,1,2019-09-12T20:00:00Z,33784154,req_bytes,http_request,03a2c4456f0f5000,204\n,_result,2,2019-09-12T20:00:00Z,870346480,req_bytes,http_request,03b60149f6fb9000,204\n,_result,3,2019-09-12T20:00:00Z,48275,req_bytes,http_request,03c68af671227000,204\n\"\n\noutData = \"\n#datatype,string,long,long,long,string\n#group,false,false,false,false,true\n#default,_result,,,,\n,result,table,datain,dataout,org_id\n,_result,0,9654177,,039e9eb802d3f000\n,_result,1,33784154,,03a2c4456f0f5000\n,_result,2,870346480,180798,03b60149f6fb9000\n,_result,3,48275,,03c68af671227000\n,_result,4,,152,10603619cf2e665b\n\"\n\nt_join_missing_on_col = (tables=<-) => {\n lhs = tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"resp_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])\n |> sum()\n |> rename(columns: {_value: \"dataout\"})\n rhs = tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"req_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])\n |> sum()\n |> rename(columns: {_value: \"datain\"})\n return join(tables: {lhs: lhs, rhs: rhs}, on: [\"org_id\"])\n}\n\ntest _join_missing_on_col = () =>\n ({input: testing.loadStorage(csv: inData), want: testing.loadMem(csv: outData), fn: t_join_missing_on_col})", Start: ast.Position{ Column: 1, Line: 1, }, }, }, Body: []ast.Statement{&ast.OptionStatement{ Assignment: &ast.VariableAssignment{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 42, Line: 5, }, File: "join_missing_on_col_test.flux", Source: "now = () => (2030-01-01T00:00:00Z)", Start: ast.Position{ Column: 8, Line: 5, }, }, }, ID: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 11, Line: 5, }, File: "join_missing_on_col_test.flux", Source: "now", Start: ast.Position{ Column: 8, Line: 5, }, }, }, Name: "now", }, Init: &ast.FunctionExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 42, Line: 5, }, File: "join_missing_on_col_test.flux", Source: "() => (2030-01-01T00:00:00Z)", Start: ast.Position{ Column: 14, Line: 5, }, }, }, Body: &ast.ParenExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 42, Line: 5, }, File: "join_missing_on_col_test.flux", Source: "(2030-01-01T00:00:00Z)", Start: ast.Position{ Column: 20, Line: 5, }, }, }, Expression: &ast.DateTimeLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 41, Line: 5, }, File: "join_missing_on_col_test.flux", Source: "2030-01-01T00:00:00Z", Start: ast.Position{ Column: 21, Line: 5, }, }, }, Value: parser.MustParseTime("2030-01-01T00:00:00Z"), }, }, Params: nil, }, }, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 42, Line: 5, }, File: "join_missing_on_col_test.flux", Source: "option now = () => (2030-01-01T00:00:00Z)", Start: ast.Position{ Column: 1, Line: 5, }, }, }, }, &ast.VariableAssignment{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 2, Line: 30, }, File: "join_missing_on_col_test.flux", Source: "inData = \"\n#datatype,string,long,dateTime:RFC3339,long,string,string,long\n#group,false,false,false,false,true,true,true\n#default,_result,,,,,,\n,result,table,_time,_value,_field,_measurement,status\n,_result,0,2019-09-12T20:00:00Z,510,resp_bytes,http_request,400\n\n#datatype,string,long,dateTime:RFC3339,long,string,string,string,long\n#group,false,false,false,false,true,true,true,true\n#default,_result,,,,,,,\n,result,table,_time,_value,_field,_measurement,org_id,status\n,_result,1,2019-09-12T20:00:00Z,180654,resp_bytes,http_request,03b60149f6fb9000,200\n,_result,2,2019-09-12T20:00:00Z,144,resp_bytes,http_request,03b60149f6fb9000,500\n,_result,3,2019-09-12T20:00:00Z,152,resp_bytes,http_request,10603619cf2e665b,500\n\n#datatype,string,long,dateTime:RFC3339,long,string,string,string,long\n#group,false,false,false,false,true,true,true,true\n#default,_result,,,,,,,\n,result,table,_time,_value,_field,_measurement,org_id,status\n,_result,0,2019-09-12T20:00:00Z,9654177,req_bytes,http_request,039e9eb802d3f000,204\n,_result,1,2019-09-12T20:00:00Z,33784154,req_bytes,http_request,03a2c4456f0f5000,204\n,_result,2,2019-09-12T20:00:00Z,870346480,req_bytes,http_request,03b60149f6fb9000,204\n,_result,3,2019-09-12T20:00:00Z,48275,req_bytes,http_request,03c68af671227000,204\n\"", Start: ast.Position{ Column: 1, Line: 7, }, }, }, ID: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 7, Line: 7, }, File: "join_missing_on_col_test.flux", Source: "inData", Start: ast.Position{ Column: 1, Line: 7, }, }, }, Name: "inData", }, Init: &ast.StringLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 2, Line: 30, }, File: "join_missing_on_col_test.flux", Source: "\"\n#datatype,string,long,dateTime:RFC3339,long,string,string,long\n#group,false,false,false,false,true,true,true\n#default,_result,,,,,,\n,result,table,_time,_value,_field,_measurement,status\n,_result,0,2019-09-12T20:00:00Z,510,resp_bytes,http_request,400\n\n#datatype,string,long,dateTime:RFC3339,long,string,string,string,long\n#group,false,false,false,false,true,true,true,true\n#default,_result,,,,,,,\n,result,table,_time,_value,_field,_measurement,org_id,status\n,_result,1,2019-09-12T20:00:00Z,180654,resp_bytes,http_request,03b60149f6fb9000,200\n,_result,2,2019-09-12T20:00:00Z,144,resp_bytes,http_request,03b60149f6fb9000,500\n,_result,3,2019-09-12T20:00:00Z,152,resp_bytes,http_request,10603619cf2e665b,500\n\n#datatype,string,long,dateTime:RFC3339,long,string,string,string,long\n#group,false,false,false,false,true,true,true,true\n#default,_result,,,,,,,\n,result,table,_time,_value,_field,_measurement,org_id,status\n,_result,0,2019-09-12T20:00:00Z,9654177,req_bytes,http_request,039e9eb802d3f000,204\n,_result,1,2019-09-12T20:00:00Z,33784154,req_bytes,http_request,03a2c4456f0f5000,204\n,_result,2,2019-09-12T20:00:00Z,870346480,req_bytes,http_request,03b60149f6fb9000,204\n,_result,3,2019-09-12T20:00:00Z,48275,req_bytes,http_request,03c68af671227000,204\n\"", Start: ast.Position{ Column: 10, Line: 7, }, }, }, Value: "\n#datatype,string,long,dateTime:RFC3339,long,string,string,long\n#group,false,false,false,false,true,true,true\n#default,_result,,,,,,\n,result,table,_time,_value,_field,_measurement,status\n,_result,0,2019-09-12T20:00:00Z,510,resp_bytes,http_request,400\n\n#datatype,string,long,dateTime:RFC3339,long,string,string,string,long\n#group,false,false,false,false,true,true,true,true\n#default,_result,,,,,,,\n,result,table,_time,_value,_field,_measurement,org_id,status\n,_result,1,2019-09-12T20:00:00Z,180654,resp_bytes,http_request,03b60149f6fb9000,200\n,_result,2,2019-09-12T20:00:00Z,144,resp_bytes,http_request,03b60149f6fb9000,500\n,_result,3,2019-09-12T20:00:00Z,152,resp_bytes,http_request,10603619cf2e665b,500\n\n#datatype,string,long,dateTime:RFC3339,long,string,string,string,long\n#group,false,false,false,false,true,true,true,true\n#default,_result,,,,,,,\n,result,table,_time,_value,_field,_measurement,org_id,status\n,_result,0,2019-09-12T20:00:00Z,9654177,req_bytes,http_request,039e9eb802d3f000,204\n,_result,1,2019-09-12T20:00:00Z,33784154,req_bytes,http_request,03a2c4456f0f5000,204\n,_result,2,2019-09-12T20:00:00Z,870346480,req_bytes,http_request,03b60149f6fb9000,204\n,_result,3,2019-09-12T20:00:00Z,48275,req_bytes,http_request,03c68af671227000,204\n", }, }, &ast.VariableAssignment{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 2, Line: 42, }, File: "join_missing_on_col_test.flux", Source: "outData = \"\n#datatype,string,long,long,long,string\n#group,false,false,false,false,true\n#default,_result,,,,\n,result,table,datain,dataout,org_id\n,_result,0,9654177,,039e9eb802d3f000\n,_result,1,33784154,,03a2c4456f0f5000\n,_result,2,870346480,180798,03b60149f6fb9000\n,_result,3,48275,,03c68af671227000\n,_result,4,,152,10603619cf2e665b\n\"", Start: ast.Position{ Column: 1, Line: 32, }, }, }, ID: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 8, Line: 32, }, File: "join_missing_on_col_test.flux", Source: "outData", Start: ast.Position{ Column: 1, Line: 32, }, }, }, Name: "outData", }, Init: &ast.StringLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 2, Line: 42, }, File: "join_missing_on_col_test.flux", Source: "\"\n#datatype,string,long,long,long,string\n#group,false,false,false,false,true\n#default,_result,,,,\n,result,table,datain,dataout,org_id\n,_result,0,9654177,,039e9eb802d3f000\n,_result,1,33784154,,03a2c4456f0f5000\n,_result,2,870346480,180798,03b60149f6fb9000\n,_result,3,48275,,03c68af671227000\n,_result,4,,152,10603619cf2e665b\n\"", Start: ast.Position{ Column: 11, Line: 32, }, }, }, Value: "\n#datatype,string,long,long,long,string\n#group,false,false,false,false,true\n#default,_result,,,,\n,result,table,datain,dataout,org_id\n,_result,0,9654177,,039e9eb802d3f000\n,_result,1,33784154,,03a2c4456f0f5000\n,_result,2,870346480,180798,03b60149f6fb9000\n,_result,3,48275,,03c68af671227000\n,_result,4,,152,10603619cf2e665b\n", }, }, &ast.VariableAssignment{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 2, Line: 58, }, File: "join_missing_on_col_test.flux", Source: "t_join_missing_on_col = (tables=<-) => {\n lhs = tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"resp_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])\n |> sum()\n |> rename(columns: {_value: \"dataout\"})\n rhs = tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"req_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])\n |> sum()\n |> rename(columns: {_value: \"datain\"})\n return join(tables: {lhs: lhs, rhs: rhs}, on: [\"org_id\"])\n}", Start: ast.Position{ Column: 1, Line: 44, }, }, }, ID: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 22, Line: 44, }, File: "join_missing_on_col_test.flux", Source: "t_join_missing_on_col", Start: ast.Position{ Column: 1, Line: 44, }, }, }, Name: "t_join_missing_on_col", }, Init: &ast.FunctionExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 2, Line: 58, }, File: "join_missing_on_col_test.flux", Source: "(tables=<-) => {\n lhs = tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"resp_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])\n |> sum()\n |> rename(columns: {_value: \"dataout\"})\n rhs = tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"req_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])\n |> sum()\n |> rename(columns: {_value: \"datain\"})\n return join(tables: {lhs: lhs, rhs: rhs}, on: [\"org_id\"])\n}", Start: ast.Position{ Column: 25, Line: 44, }, }, }, Body: &ast.Block{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 2, Line: 58, }, File: "join_missing_on_col_test.flux", Source: "{\n lhs = tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"resp_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])\n |> sum()\n |> rename(columns: {_value: \"dataout\"})\n rhs = tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"req_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])\n |> sum()\n |> rename(columns: {_value: \"datain\"})\n return join(tables: {lhs: lhs, rhs: rhs}, on: [\"org_id\"])\n}", Start: ast.Position{ Column: 40, Line: 44, }, }, }, Body: []ast.Statement{&ast.VariableAssignment{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 48, Line: 50, }, File: "join_missing_on_col_test.flux", Source: "lhs = tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"resp_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])\n |> sum()\n |> rename(columns: {_value: \"dataout\"})", Start: ast.Position{ Column: 5, Line: 45, }, }, }, ID: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 8, Line: 45, }, File: "join_missing_on_col_test.flux", Source: "lhs", Start: ast.Position{ Column: 5, Line: 45, }, }, }, Name: "lhs", }, Init: &ast.PipeExpression{ Argument: &ast.PipeExpression{ Argument: &ast.PipeExpression{ Argument: &ast.PipeExpression{ Argument: &ast.PipeExpression{ Argument: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 17, Line: 45, }, File: "join_missing_on_col_test.flux", Source: "tables", Start: ast.Position{ Column: 11, Line: 45, }, }, }, Name: "tables", }, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 46, Line: 46, }, File: "join_missing_on_col_test.flux", Source: "tables\n |> range(start: 2019-01-01T00:00:00Z)", Start: ast.Position{ Column: 11, Line: 45, }, }, }, Call: &ast.CallExpression{ Arguments: []ast.Expression{&ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 45, Line: 46, }, File: "join_missing_on_col_test.flux", Source: "start: 2019-01-01T00:00:00Z", Start: ast.Position{ Column: 18, Line: 46, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 45, Line: 46, }, File: "join_missing_on_col_test.flux", Source: "start: 2019-01-01T00:00:00Z", Start: ast.Position{ Column: 18, Line: 46, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 23, Line: 46, }, File: "join_missing_on_col_test.flux", Source: "start", Start: ast.Position{ Column: 18, Line: 46, }, }, }, Name: "start", }, Value: &ast.DateTimeLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 45, Line: 46, }, File: "join_missing_on_col_test.flux", Source: "2019-01-01T00:00:00Z", Start: ast.Position{ Column: 25, Line: 46, }, }, }, Value: parser.MustParseTime("2019-01-01T00:00:00Z"), }, }}, With: nil, }}, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 46, Line: 46, }, File: "join_missing_on_col_test.flux", Source: "range(start: 2019-01-01T00:00:00Z)", Start: ast.Position{ Column: 12, Line: 46, }, }, }, Callee: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 17, Line: 46, }, File: "join_missing_on_col_test.flux", Source: "range", Start: ast.Position{ Column: 12, Line: 46, }, }, }, Name: "range", }, }, }, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 55, Line: 47, }, File: "join_missing_on_col_test.flux", Source: "tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"resp_bytes\")", Start: ast.Position{ Column: 11, Line: 45, }, }, }, Call: &ast.CallExpression{ Arguments: []ast.Expression{&ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 54, Line: 47, }, File: "join_missing_on_col_test.flux", Source: "fn: (r) => r._field == \"resp_bytes\"", Start: ast.Position{ Column: 19, Line: 47, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 54, Line: 47, }, File: "join_missing_on_col_test.flux", Source: "fn: (r) => r._field == \"resp_bytes\"", Start: ast.Position{ Column: 19, Line: 47, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 21, Line: 47, }, File: "join_missing_on_col_test.flux", Source: "fn", Start: ast.Position{ Column: 19, Line: 47, }, }, }, Name: "fn", }, Value: &ast.FunctionExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 54, Line: 47, }, File: "join_missing_on_col_test.flux", Source: "(r) => r._field == \"resp_bytes\"", Start: ast.Position{ Column: 23, Line: 47, }, }, }, Body: &ast.BinaryExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 54, Line: 47, }, File: "join_missing_on_col_test.flux", Source: "r._field == \"resp_bytes\"", Start: ast.Position{ Column: 30, Line: 47, }, }, }, Left: &ast.MemberExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 38, Line: 47, }, File: "join_missing_on_col_test.flux", Source: "r._field", Start: ast.Position{ Column: 30, Line: 47, }, }, }, Object: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 31, Line: 47, }, File: "join_missing_on_col_test.flux", Source: "r", Start: ast.Position{ Column: 30, Line: 47, }, }, }, Name: "r", }, Property: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 38, Line: 47, }, File: "join_missing_on_col_test.flux", Source: "_field", Start: ast.Position{ Column: 32, Line: 47, }, }, }, Name: "_field", }, }, Operator: 17, Right: &ast.StringLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 54, Line: 47, }, File: "join_missing_on_col_test.flux", Source: "\"resp_bytes\"", Start: ast.Position{ Column: 42, Line: 47, }, }, }, Value: "resp_bytes", }, }, Params: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 25, Line: 47, }, File: "join_missing_on_col_test.flux", Source: "r", Start: ast.Position{ Column: 24, Line: 47, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 25, Line: 47, }, File: "join_missing_on_col_test.flux", Source: "r", Start: ast.Position{ Column: 24, Line: 47, }, }, }, Name: "r", }, Value: nil, }}, }, }}, With: nil, }}, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 55, Line: 47, }, File: "join_missing_on_col_test.flux", Source: "filter(fn: (r) => r._field == \"resp_bytes\")", Start: ast.Position{ Column: 12, Line: 47, }, }, }, Callee: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 18, Line: 47, }, File: "join_missing_on_col_test.flux", Source: "filter", Start: ast.Position{ Column: 12, Line: 47, }, }, }, Name: "filter", }, }, }, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 56, Line: 48, }, File: "join_missing_on_col_test.flux", Source: "tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"resp_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])", Start: ast.Position{ Column: 11, Line: 45, }, }, }, Call: &ast.CallExpression{ Arguments: []ast.Expression{&ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 55, Line: 48, }, File: "join_missing_on_col_test.flux", Source: "columns: [\"_time\", \"org_id\", \"_value\"]", Start: ast.Position{ Column: 17, Line: 48, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 55, Line: 48, }, File: "join_missing_on_col_test.flux", Source: "columns: [\"_time\", \"org_id\", \"_value\"]", Start: ast.Position{ Column: 17, Line: 48, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 24, Line: 48, }, File: "join_missing_on_col_test.flux", Source: "columns", Start: ast.Position{ Column: 17, Line: 48, }, }, }, Name: "columns", }, Value: &ast.ArrayExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 55, Line: 48, }, File: "join_missing_on_col_test.flux", Source: "[\"_time\", \"org_id\", \"_value\"]", Start: ast.Position{ Column: 26, Line: 48, }, }, }, Elements: []ast.Expression{&ast.StringLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 34, Line: 48, }, File: "join_missing_on_col_test.flux", Source: "\"_time\"", Start: ast.Position{ Column: 27, Line: 48, }, }, }, Value: "_time", }, &ast.StringLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 44, Line: 48, }, File: "join_missing_on_col_test.flux", Source: "\"org_id\"", Start: ast.Position{ Column: 36, Line: 48, }, }, }, Value: "org_id", }, &ast.StringLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 54, Line: 48, }, File: "join_missing_on_col_test.flux", Source: "\"_value\"", Start: ast.Position{ Column: 46, Line: 48, }, }, }, Value: "_value", }}, }, }}, With: nil, }}, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 56, Line: 48, }, File: "join_missing_on_col_test.flux", Source: "keep(columns: [\"_time\", \"org_id\", \"_value\"])", Start: ast.Position{ Column: 12, Line: 48, }, }, }, Callee: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 16, Line: 48, }, File: "join_missing_on_col_test.flux", Source: "keep", Start: ast.Position{ Column: 12, Line: 48, }, }, }, Name: "keep", }, }, }, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 17, Line: 49, }, File: "join_missing_on_col_test.flux", Source: "tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"resp_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])\n |> sum()", Start: ast.Position{ Column: 11, Line: 45, }, }, }, Call: &ast.CallExpression{ Arguments: nil, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 17, Line: 49, }, File: "join_missing_on_col_test.flux", Source: "sum()", Start: ast.Position{ Column: 12, Line: 49, }, }, }, Callee: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 15, Line: 49, }, File: "join_missing_on_col_test.flux", Source: "sum", Start: ast.Position{ Column: 12, Line: 49, }, }, }, Name: "sum", }, }, }, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 48, Line: 50, }, File: "join_missing_on_col_test.flux", Source: "tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"resp_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])\n |> sum()\n |> rename(columns: {_value: \"dataout\"})", Start: ast.Position{ Column: 11, Line: 45, }, }, }, Call: &ast.CallExpression{ Arguments: []ast.Expression{&ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 47, Line: 50, }, File: "join_missing_on_col_test.flux", Source: "columns: {_value: \"dataout\"}", Start: ast.Position{ Column: 19, Line: 50, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 47, Line: 50, }, File: "join_missing_on_col_test.flux", Source: "columns: {_value: \"dataout\"}", Start: ast.Position{ Column: 19, Line: 50, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 26, Line: 50, }, File: "join_missing_on_col_test.flux", Source: "columns", Start: ast.Position{ Column: 19, Line: 50, }, }, }, Name: "columns", }, Value: &ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 47, Line: 50, }, File: "join_missing_on_col_test.flux", Source: "{_value: \"dataout\"}", Start: ast.Position{ Column: 28, Line: 50, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 46, Line: 50, }, File: "join_missing_on_col_test.flux", Source: "_value: \"dataout\"", Start: ast.Position{ Column: 29, Line: 50, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 35, Line: 50, }, File: "join_missing_on_col_test.flux", Source: "_value", Start: ast.Position{ Column: 29, Line: 50, }, }, }, Name: "_value", }, Value: &ast.StringLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 46, Line: 50, }, File: "join_missing_on_col_test.flux", Source: "\"dataout\"", Start: ast.Position{ Column: 37, Line: 50, }, }, }, Value: "dataout", }, }}, With: nil, }, }}, With: nil, }}, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 48, Line: 50, }, File: "join_missing_on_col_test.flux", Source: "rename(columns: {_value: \"dataout\"})", Start: ast.Position{ Column: 12, Line: 50, }, }, }, Callee: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 18, Line: 50, }, File: "join_missing_on_col_test.flux", Source: "rename", Start: ast.Position{ Column: 12, Line: 50, }, }, }, Name: "rename", }, }, }, }, &ast.VariableAssignment{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 47, Line: 56, }, File: "join_missing_on_col_test.flux", Source: "rhs = tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"req_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])\n |> sum()\n |> rename(columns: {_value: \"datain\"})", Start: ast.Position{ Column: 5, Line: 51, }, }, }, ID: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 8, Line: 51, }, File: "join_missing_on_col_test.flux", Source: "rhs", Start: ast.Position{ Column: 5, Line: 51, }, }, }, Name: "rhs", }, Init: &ast.PipeExpression{ Argument: &ast.PipeExpression{ Argument: &ast.PipeExpression{ Argument: &ast.PipeExpression{ Argument: &ast.PipeExpression{ Argument: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 17, Line: 51, }, File: "join_missing_on_col_test.flux", Source: "tables", Start: ast.Position{ Column: 11, Line: 51, }, }, }, Name: "tables", }, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 46, Line: 52, }, File: "join_missing_on_col_test.flux", Source: "tables\n |> range(start: 2019-01-01T00:00:00Z)", Start: ast.Position{ Column: 11, Line: 51, }, }, }, Call: &ast.CallExpression{ Arguments: []ast.Expression{&ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 45, Line: 52, }, File: "join_missing_on_col_test.flux", Source: "start: 2019-01-01T00:00:00Z", Start: ast.Position{ Column: 18, Line: 52, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 45, Line: 52, }, File: "join_missing_on_col_test.flux", Source: "start: 2019-01-01T00:00:00Z", Start: ast.Position{ Column: 18, Line: 52, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 23, Line: 52, }, File: "join_missing_on_col_test.flux", Source: "start", Start: ast.Position{ Column: 18, Line: 52, }, }, }, Name: "start", }, Value: &ast.DateTimeLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 45, Line: 52, }, File: "join_missing_on_col_test.flux", Source: "2019-01-01T00:00:00Z", Start: ast.Position{ Column: 25, Line: 52, }, }, }, Value: parser.MustParseTime("2019-01-01T00:00:00Z"), }, }}, With: nil, }}, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 46, Line: 52, }, File: "join_missing_on_col_test.flux", Source: "range(start: 2019-01-01T00:00:00Z)", Start: ast.Position{ Column: 12, Line: 52, }, }, }, Callee: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 17, Line: 52, }, File: "join_missing_on_col_test.flux", Source: "range", Start: ast.Position{ Column: 12, Line: 52, }, }, }, Name: "range", }, }, }, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 54, Line: 53, }, File: "join_missing_on_col_test.flux", Source: "tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"req_bytes\")", Start: ast.Position{ Column: 11, Line: 51, }, }, }, Call: &ast.CallExpression{ Arguments: []ast.Expression{&ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 53, Line: 53, }, File: "join_missing_on_col_test.flux", Source: "fn: (r) => r._field == \"req_bytes\"", Start: ast.Position{ Column: 19, Line: 53, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 53, Line: 53, }, File: "join_missing_on_col_test.flux", Source: "fn: (r) => r._field == \"req_bytes\"", Start: ast.Position{ Column: 19, Line: 53, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 21, Line: 53, }, File: "join_missing_on_col_test.flux", Source: "fn", Start: ast.Position{ Column: 19, Line: 53, }, }, }, Name: "fn", }, Value: &ast.FunctionExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 53, Line: 53, }, File: "join_missing_on_col_test.flux", Source: "(r) => r._field == \"req_bytes\"", Start: ast.Position{ Column: 23, Line: 53, }, }, }, Body: &ast.BinaryExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 53, Line: 53, }, File: "join_missing_on_col_test.flux", Source: "r._field == \"req_bytes\"", Start: ast.Position{ Column: 30, Line: 53, }, }, }, Left: &ast.MemberExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 38, Line: 53, }, File: "join_missing_on_col_test.flux", Source: "r._field", Start: ast.Position{ Column: 30, Line: 53, }, }, }, Object: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 31, Line: 53, }, File: "join_missing_on_col_test.flux", Source: "r", Start: ast.Position{ Column: 30, Line: 53, }, }, }, Name: "r", }, Property: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 38, Line: 53, }, File: "join_missing_on_col_test.flux", Source: "_field", Start: ast.Position{ Column: 32, Line: 53, }, }, }, Name: "_field", }, }, Operator: 17, Right: &ast.StringLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 53, Line: 53, }, File: "join_missing_on_col_test.flux", Source: "\"req_bytes\"", Start: ast.Position{ Column: 42, Line: 53, }, }, }, Value: "req_bytes", }, }, Params: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 25, Line: 53, }, File: "join_missing_on_col_test.flux", Source: "r", Start: ast.Position{ Column: 24, Line: 53, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 25, Line: 53, }, File: "join_missing_on_col_test.flux", Source: "r", Start: ast.Position{ Column: 24, Line: 53, }, }, }, Name: "r", }, Value: nil, }}, }, }}, With: nil, }}, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 54, Line: 53, }, File: "join_missing_on_col_test.flux", Source: "filter(fn: (r) => r._field == \"req_bytes\")", Start: ast.Position{ Column: 12, Line: 53, }, }, }, Callee: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 18, Line: 53, }, File: "join_missing_on_col_test.flux", Source: "filter", Start: ast.Position{ Column: 12, Line: 53, }, }, }, Name: "filter", }, }, }, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 56, Line: 54, }, File: "join_missing_on_col_test.flux", Source: "tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"req_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])", Start: ast.Position{ Column: 11, Line: 51, }, }, }, Call: &ast.CallExpression{ Arguments: []ast.Expression{&ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 55, Line: 54, }, File: "join_missing_on_col_test.flux", Source: "columns: [\"_time\", \"org_id\", \"_value\"]", Start: ast.Position{ Column: 17, Line: 54, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 55, Line: 54, }, File: "join_missing_on_col_test.flux", Source: "columns: [\"_time\", \"org_id\", \"_value\"]", Start: ast.Position{ Column: 17, Line: 54, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 24, Line: 54, }, File: "join_missing_on_col_test.flux", Source: "columns", Start: ast.Position{ Column: 17, Line: 54, }, }, }, Name: "columns", }, Value: &ast.ArrayExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 55, Line: 54, }, File: "join_missing_on_col_test.flux", Source: "[\"_time\", \"org_id\", \"_value\"]", Start: ast.Position{ Column: 26, Line: 54, }, }, }, Elements: []ast.Expression{&ast.StringLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 34, Line: 54, }, File: "join_missing_on_col_test.flux", Source: "\"_time\"", Start: ast.Position{ Column: 27, Line: 54, }, }, }, Value: "_time", }, &ast.StringLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 44, Line: 54, }, File: "join_missing_on_col_test.flux", Source: "\"org_id\"", Start: ast.Position{ Column: 36, Line: 54, }, }, }, Value: "org_id", }, &ast.StringLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 54, Line: 54, }, File: "join_missing_on_col_test.flux", Source: "\"_value\"", Start: ast.Position{ Column: 46, Line: 54, }, }, }, Value: "_value", }}, }, }}, With: nil, }}, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 56, Line: 54, }, File: "join_missing_on_col_test.flux", Source: "keep(columns: [\"_time\", \"org_id\", \"_value\"])", Start: ast.Position{ Column: 12, Line: 54, }, }, }, Callee: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 16, Line: 54, }, File: "join_missing_on_col_test.flux", Source: "keep", Start: ast.Position{ Column: 12, Line: 54, }, }, }, Name: "keep", }, }, }, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 17, Line: 55, }, File: "join_missing_on_col_test.flux", Source: "tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"req_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])\n |> sum()", Start: ast.Position{ Column: 11, Line: 51, }, }, }, Call: &ast.CallExpression{ Arguments: nil, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 17, Line: 55, }, File: "join_missing_on_col_test.flux", Source: "sum()", Start: ast.Position{ Column: 12, Line: 55, }, }, }, Callee: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 15, Line: 55, }, File: "join_missing_on_col_test.flux", Source: "sum", Start: ast.Position{ Column: 12, Line: 55, }, }, }, Name: "sum", }, }, }, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 47, Line: 56, }, File: "join_missing_on_col_test.flux", Source: "tables\n |> range(start: 2019-01-01T00:00:00Z)\n |> filter(fn: (r) => r._field == \"req_bytes\")\n |> keep(columns: [\"_time\", \"org_id\", \"_value\"])\n |> sum()\n |> rename(columns: {_value: \"datain\"})", Start: ast.Position{ Column: 11, Line: 51, }, }, }, Call: &ast.CallExpression{ Arguments: []ast.Expression{&ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 46, Line: 56, }, File: "join_missing_on_col_test.flux", Source: "columns: {_value: \"datain\"}", Start: ast.Position{ Column: 19, Line: 56, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 46, Line: 56, }, File: "join_missing_on_col_test.flux", Source: "columns: {_value: \"datain\"}", Start: ast.Position{ Column: 19, Line: 56, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 26, Line: 56, }, File: "join_missing_on_col_test.flux", Source: "columns", Start: ast.Position{ Column: 19, Line: 56, }, }, }, Name: "columns", }, Value: &ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 46, Line: 56, }, File: "join_missing_on_col_test.flux", Source: "{_value: \"datain\"}", Start: ast.Position{ Column: 28, Line: 56, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 45, Line: 56, }, File: "join_missing_on_col_test.flux", Source: "_value: \"datain\"", Start: ast.Position{ Column: 29, Line: 56, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 35, Line: 56, }, File: "join_missing_on_col_test.flux", Source: "_value", Start: ast.Position{ Column: 29, Line: 56, }, }, }, Name: "_value", }, Value: &ast.StringLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 45, Line: 56, }, File: "join_missing_on_col_test.flux", Source: "\"datain\"", Start: ast.Position{ Column: 37, Line: 56, }, }, }, Value: "datain", }, }}, With: nil, }, }}, With: nil, }}, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 47, Line: 56, }, File: "join_missing_on_col_test.flux", Source: "rename(columns: {_value: \"datain\"})", Start: ast.Position{ Column: 12, Line: 56, }, }, }, Callee: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 18, Line: 56, }, File: "join_missing_on_col_test.flux", Source: "rename", Start: ast.Position{ Column: 12, Line: 56, }, }, }, Name: "rename", }, }, }, }, &ast.ReturnStatement{ Argument: &ast.CallExpression{ Arguments: []ast.Expression{&ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 61, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "tables: {lhs: lhs, rhs: rhs}, on: [\"org_id\"]", Start: ast.Position{ Column: 17, Line: 57, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 45, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "tables: {lhs: lhs, rhs: rhs}", Start: ast.Position{ Column: 17, Line: 57, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 23, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "tables", Start: ast.Position{ Column: 17, Line: 57, }, }, }, Name: "tables", }, Value: &ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 45, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "{lhs: lhs, rhs: rhs}", Start: ast.Position{ Column: 25, Line: 57, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 34, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "lhs: lhs", Start: ast.Position{ Column: 26, Line: 57, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 29, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "lhs", Start: ast.Position{ Column: 26, Line: 57, }, }, }, Name: "lhs", }, Value: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 34, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "lhs", Start: ast.Position{ Column: 31, Line: 57, }, }, }, Name: "lhs", }, }, &ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 44, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "rhs: rhs", Start: ast.Position{ Column: 36, Line: 57, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 39, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "rhs", Start: ast.Position{ Column: 36, Line: 57, }, }, }, Name: "rhs", }, Value: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 44, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "rhs", Start: ast.Position{ Column: 41, Line: 57, }, }, }, Name: "rhs", }, }}, With: nil, }, }, &ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 61, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "on: [\"org_id\"]", Start: ast.Position{ Column: 47, Line: 57, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 49, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "on", Start: ast.Position{ Column: 47, Line: 57, }, }, }, Name: "on", }, Value: &ast.ArrayExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 61, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "[\"org_id\"]", Start: ast.Position{ Column: 51, Line: 57, }, }, }, Elements: []ast.Expression{&ast.StringLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 60, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "\"org_id\"", Start: ast.Position{ Column: 52, Line: 57, }, }, }, Value: "org_id", }}, }, }}, With: nil, }}, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 62, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "join(tables: {lhs: lhs, rhs: rhs}, on: [\"org_id\"])", Start: ast.Position{ Column: 12, Line: 57, }, }, }, Callee: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 16, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "join", Start: ast.Position{ Column: 12, Line: 57, }, }, }, Name: "join", }, }, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 62, Line: 57, }, File: "join_missing_on_col_test.flux", Source: "return join(tables: {lhs: lhs, rhs: rhs}, on: [\"org_id\"])", Start: ast.Position{ Column: 5, Line: 57, }, }, }, }}, }, Params: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 35, Line: 44, }, File: "join_missing_on_col_test.flux", Source: "tables=<-", Start: ast.Position{ Column: 26, Line: 44, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 32, Line: 44, }, File: "join_missing_on_col_test.flux", Source: "tables", Start: ast.Position{ Column: 26, Line: 44, }, }, }, Name: "tables", }, Value: &ast.PipeLiteral{BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 35, Line: 44, }, File: "join_missing_on_col_test.flux", Source: "<-", Start: ast.Position{ Column: 33, Line: 44, }, }, }}, }}, }, }, &ast.TestStatement{ Assignment: &ast.VariableAssignment{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 112, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "_join_missing_on_col = () =>\n ({input: testing.loadStorage(csv: inData), want: testing.loadMem(csv: outData), fn: t_join_missing_on_col})", Start: ast.Position{ Column: 6, Line: 60, }, }, }, ID: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 26, Line: 60, }, File: "join_missing_on_col_test.flux", Source: "_join_missing_on_col", Start: ast.Position{ Column: 6, Line: 60, }, }, }, Name: "_join_missing_on_col", }, Init: &ast.FunctionExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 112, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "() =>\n ({input: testing.loadStorage(csv: inData), want: testing.loadMem(csv: outData), fn: t_join_missing_on_col})", Start: ast.Position{ Column: 29, Line: 60, }, }, }, Body: &ast.ParenExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 112, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "({input: testing.loadStorage(csv: inData), want: testing.loadMem(csv: outData), fn: t_join_missing_on_col})", Start: ast.Position{ Column: 5, Line: 61, }, }, }, Expression: &ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 111, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "{input: testing.loadStorage(csv: inData), want: testing.loadMem(csv: outData), fn: t_join_missing_on_col}", Start: ast.Position{ Column: 6, Line: 61, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 46, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "input: testing.loadStorage(csv: inData)", Start: ast.Position{ Column: 7, Line: 61, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 12, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "input", Start: ast.Position{ Column: 7, Line: 61, }, }, }, Name: "input", }, Value: &ast.CallExpression{ Arguments: []ast.Expression{&ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 45, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "csv: inData", Start: ast.Position{ Column: 34, Line: 61, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 45, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "csv: inData", Start: ast.Position{ Column: 34, Line: 61, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 37, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "csv", Start: ast.Position{ Column: 34, Line: 61, }, }, }, Name: "csv", }, Value: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 45, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "inData", Start: ast.Position{ Column: 39, Line: 61, }, }, }, Name: "inData", }, }}, With: nil, }}, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 46, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "testing.loadStorage(csv: inData)", Start: ast.Position{ Column: 14, Line: 61, }, }, }, Callee: &ast.MemberExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 33, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "testing.loadStorage", Start: ast.Position{ Column: 14, Line: 61, }, }, }, Object: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 21, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "testing", Start: ast.Position{ Column: 14, Line: 61, }, }, }, Name: "testing", }, Property: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 33, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "loadStorage", Start: ast.Position{ Column: 22, Line: 61, }, }, }, Name: "loadStorage", }, }, }, }, &ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 83, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "want: testing.loadMem(csv: outData)", Start: ast.Position{ Column: 48, Line: 61, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 52, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "want", Start: ast.Position{ Column: 48, Line: 61, }, }, }, Name: "want", }, Value: &ast.CallExpression{ Arguments: []ast.Expression{&ast.ObjectExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 82, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "csv: outData", Start: ast.Position{ Column: 70, Line: 61, }, }, }, Properties: []*ast.Property{&ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 82, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "csv: outData", Start: ast.Position{ Column: 70, Line: 61, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 73, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "csv", Start: ast.Position{ Column: 70, Line: 61, }, }, }, Name: "csv", }, Value: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 82, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "outData", Start: ast.Position{ Column: 75, Line: 61, }, }, }, Name: "outData", }, }}, With: nil, }}, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 83, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "testing.loadMem(csv: outData)", Start: ast.Position{ Column: 54, Line: 61, }, }, }, Callee: &ast.MemberExpression{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 69, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "testing.loadMem", Start: ast.Position{ Column: 54, Line: 61, }, }, }, Object: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 61, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "testing", Start: ast.Position{ Column: 54, Line: 61, }, }, }, Name: "testing", }, Property: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 69, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "loadMem", Start: ast.Position{ Column: 62, Line: 61, }, }, }, Name: "loadMem", }, }, }, }, &ast.Property{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 110, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "fn: t_join_missing_on_col", Start: ast.Position{ Column: 85, Line: 61, }, }, }, Key: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 87, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "fn", Start: ast.Position{ Column: 85, Line: 61, }, }, }, Name: "fn", }, Value: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 110, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "t_join_missing_on_col", Start: ast.Position{ Column: 89, Line: 61, }, }, }, Name: "t_join_missing_on_col", }, }}, With: nil, }, }, Params: nil, }, }, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 112, Line: 61, }, File: "join_missing_on_col_test.flux", Source: "test _join_missing_on_col = () =>\n ({input: testing.loadStorage(csv: inData), want: testing.loadMem(csv: outData), fn: t_join_missing_on_col})", Start: ast.Position{ Column: 1, Line: 60, }, }, }, }}, Imports: []*ast.ImportDeclaration{&ast.ImportDeclaration{ As: nil, BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 17, Line: 3, }, File: "join_missing_on_col_test.flux", Source: "import \"testing\"", Start: ast.Position{ Column: 1, Line: 3, }, }, }, Path: &ast.StringLiteral{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 17, Line: 3, }, File: "join_missing_on_col_test.flux", Source: "\"testing\"", Start: ast.Position{ Column: 8, Line: 3, }, }, }, Value: "testing", }, }}, Name: "join_missing_on_col_test.flux", Package: &ast.PackageClause{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 22, Line: 1, }, File: "join_missing_on_col_test.flux", Source: "package universe_test", Start: ast.Position{ Column: 1, Line: 1, }, }, }, Name: &ast.Identifier{ BaseNode: ast.BaseNode{ Errors: nil, Loc: &ast.SourceLocation{ End: ast.Position{ Column: 22, Line: 1, }, File: "join_missing_on_col_test.flux", Source: "universe_test", Start: ast.Position{ Column: 9, Line: 1, }, }, }, Name: "main", }, }, }}, Package: "main", Path: "", }}
var Registrars = []MutationRegistrar{ { Kind: RenameKind, Args: map[string]semantic.PolyType{ "columns": semantic.Object, "fn": semantic.NewFunctionPolyType(semantic.FunctionPolySignature{ Parameters: map[string]semantic.PolyType{ "column": semantic.String, }, Required: semantic.LabelSet{"column"}, Return: semantic.String, }), }, Create: createRenameOpSpec, New: newRenameOp, }, { Kind: DropKind, Args: map[string]semantic.PolyType{ "columns": semantic.NewArrayPolyType(semantic.String), "fn": semantic.NewFunctionPolyType(semantic.FunctionPolySignature{ Parameters: map[string]semantic.PolyType{ "column": semantic.String, }, Required: semantic.LabelSet{"column"}, Return: semantic.Bool, }), }, Create: createDropOpSpec, New: newDropOp, }, { Kind: KeepKind, Args: map[string]semantic.PolyType{ "columns": semantic.NewArrayPolyType(semantic.String), "fn": semantic.NewFunctionPolyType(semantic.FunctionPolySignature{ Parameters: map[string]semantic.PolyType{ "column": semantic.String, }, Required: semantic.LabelSet{"column"}, Return: semantic.Bool, }), }, Create: createKeepOpSpec, New: newKeepOp, }, { Kind: DuplicateKind, Args: map[string]semantic.PolyType{ "column": semantic.String, "as": semantic.String, }, Create: createDuplicateOpSpec, New: newDuplicateOp, }, }
A list of all MutationRegistrars to register. To register a new mutation, add an entry to this list.
var SchemaMutationOps = []flux.OperationKind{}
A list of all operations which should map to the SchemaMutationProcedure Added to dynamically upon calls to `Register()`
Functions ¶
func MakeContainsFunc ¶ added in v0.19.0
MakeContainsFunc will construct the "contains()" function.
Contains will test whether a given value is a member of the given set array.
func MakeLengthFunc ¶ added in v0.35.0
MakeLengthFunc create the "length()" function.
Length will return the length of the given arr array.
func NewChandeMomentumOscillatorTransformation ¶ added in v0.39.0
func NewChandeMomentumOscillatorTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *ChandeMomentumOscillatorProcedureSpec) *chandeMomentumOscillatorTransformation
func NewColumnsTransformation ¶
func NewColumnsTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *ColumnsProcedureSpec) *columnsTransformation
func NewCumulativeSumTransformation ¶
func NewCumulativeSumTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *CumulativeSumProcedureSpec) *cumulativeSumTransformation
func NewDerivativeTransformation ¶
func NewDerivativeTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *DerivativeProcedureSpec) *derivativeTransformation
func NewDifferenceTransformation ¶
func NewDifferenceTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *DifferenceProcedureSpec) *differenceTransformation
func NewDistinctTransformation ¶
func NewDistinctTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *DistinctProcedureSpec) *distinctTransformation
func NewElapsedTransformation ¶ added in v0.36.0
func NewElapsedTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *ElapsedProcedureSpec) *elapsedTransformation
func NewExponentialMovingAverageTransformation ¶ added in v0.37.0
func NewExponentialMovingAverageTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *ExponentialMovingAverageProcedureSpec) *exponentialMovingAverageTransformation
func NewFillTransformation ¶
func NewFillTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *FillProcedureSpec) *fillTransformation
func NewFilterTransformation ¶
func NewGetColumnFunction ¶ added in v0.29.0
func NewGetRecordFunction ¶ added in v0.29.0
func NewGroupTransformation ¶
func NewGroupTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *GroupProcedureSpec) *groupTransformation
func NewHistogramTransformation ¶
func NewHistogramTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *HistogramProcedureSpec) *histogramTransformation
func NewHistorgramQuantileTransformation ¶
func NewHistorgramQuantileTransformation( d execute.Dataset, cache execute.TableBuilderCache, spec *HistogramQuantileProcedureSpec, ) execute.Transformation
func NewHoltWintersTransformation ¶ added in v0.38.0
func NewHoltWintersTransformation(d execute.Dataset, cache execute.TableBuilderCache, alloc *fluxmemory.Allocator, spec *HoltWintersProcedureSpec) *holtWintersTransformation
func NewHourSelectionTransformation ¶ added in v0.39.0
func NewHourSelectionTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *HourSelectionProcedureSpec) *hourSelectionTransformation
func NewIntegralTransformation ¶
func NewIntegralTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *IntegralProcedureSpec) *integralTransformation
func NewKeyValuesTransformation ¶
func NewKeyValuesTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *KeyValuesProcedureSpec) *keyValuesTransformation
func NewKeysTransformation ¶
func NewKeysTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *KeysProcedureSpec) *keysTransformation
func NewLimitTransformation ¶
func NewLimitTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *LimitProcedureSpec) *limitTransformation
func NewMapTransformation ¶
func NewMapTransformation(ctx context.Context, spec *MapProcedureSpec, d execute.Dataset, cache execute.TableBuilderCache) (*mapTransformation, error)
func NewMergeJoinTransformation ¶
func NewMergeJoinTransformation(d execute.Dataset, cache *MergeJoinCache, spec *MergeJoinProcedureSpec, parents []execute.DatasetID, tableNames map[execute.DatasetID]string) *mergeJoinTransformation
func NewModeTransformation ¶ added in v0.36.0
func NewModeTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *ModeProcedureSpec) *modeTransformation
func NewMovingAverageTransformation ¶ added in v0.36.0
func NewMovingAverageTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *MovingAverageProcedureSpec) *movingAverageTransformation
func NewPivotTransformation ¶
func NewPivotTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *PivotProcedureSpec) *pivotTransformation
func NewRangeTransformation ¶
func NewRangeTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *RangeProcedureSpec, absolute execute.Bounds) (*rangeTransformation, error)
func NewReduceTransformation ¶ added in v0.23.0
func NewReduceTransformation(ctx context.Context, spec *ReduceProcedureSpec, d execute.Dataset, cache execute.TableBuilderCache) (*reduceTransformation, error)
func NewRelativeStrengthIndexTransformation ¶ added in v0.38.0
func NewRelativeStrengthIndexTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *RelativeStrengthIndexProcedureSpec) *relativeStrengthIndexTransformation
func NewSchemaMutationTransformation ¶
func NewSchemaMutationTransformation(ctx context.Context, spec plan.ProcedureSpec, d execute.Dataset, cache execute.TableBuilderCache) (*schemaMutationTransformation, error)
func NewSetTransformation ¶
func NewSetTransformation( d execute.Dataset, cache execute.TableBuilderCache, spec *SetProcedureSpec, ) execute.Transformation
func NewShiftTransformation ¶
func NewShiftTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *ShiftProcedureSpec) *shiftTransformation
func NewSortTransformation ¶
func NewSortTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *SortProcedureSpec) *sortTransformation
func NewStateTrackingTransformation ¶
func NewStateTrackingTransformation(ctx context.Context, spec *StateTrackingProcedureSpec, d execute.Dataset, cache execute.TableBuilderCache) (*stateTrackingTransformation, error)
func NewTableFindFunction ¶ added in v0.29.0
func NewTailTransformation ¶ added in v0.39.0
func NewTailTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *TailProcedureSpec) *tailTransformation
func NewTripleExponentialDerivativeTransformation ¶ added in v0.40.0
func NewTripleExponentialDerivativeTransformation(d execute.Dataset, cache execute.TableBuilderCache, alloc *memory.Allocator, spec *TripleExponentialDerivativeProcedureSpec) *tripleExponentialDerivativeTransformation
func NewUnionTransformation ¶ added in v0.15.0
func NewUnionTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *UnionProcedureSpec, parents []execute.DatasetID) *unionTransformation
func NewUniqueTransformation ¶
func NewUniqueTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *UniqueProcedureSpec) *uniqueTransformation
func NewkamaTransformation ¶ added in v0.40.0
func NewkamaTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *KamaProcedureSpec) *kamaTransformation
Types ¶
type BuilderContext ¶
func NewBuilderContext ¶
func NewBuilderContext(tbl flux.Table) *BuilderContext
func (*BuilderContext) ColMap ¶
func (b *BuilderContext) ColMap() []int
func (*BuilderContext) Cols ¶
func (b *BuilderContext) Cols() []flux.ColMeta
func (*BuilderContext) Key ¶
func (b *BuilderContext) Key() flux.GroupKey
type ChandeMomentumOscillatorOpSpec ¶ added in v0.39.0
func (*ChandeMomentumOscillatorOpSpec) Kind ¶ added in v0.39.0
func (s *ChandeMomentumOscillatorOpSpec) Kind() flux.OperationKind
type ChandeMomentumOscillatorProcedureSpec ¶ added in v0.39.0
type ChandeMomentumOscillatorProcedureSpec struct { plan.DefaultCost N int64 `json:"n"` Columns []string `json:"columns"` }
func (*ChandeMomentumOscillatorProcedureSpec) Copy ¶ added in v0.39.0
func (s *ChandeMomentumOscillatorProcedureSpec) Copy() plan.ProcedureSpec
func (*ChandeMomentumOscillatorProcedureSpec) Kind ¶ added in v0.39.0
func (s *ChandeMomentumOscillatorProcedureSpec) Kind() plan.ProcedureKind
func (*ChandeMomentumOscillatorProcedureSpec) TriggerSpec ¶ added in v0.39.0
func (s *ChandeMomentumOscillatorProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type ColumnsOpSpec ¶
type ColumnsOpSpec struct {
Column string `json:"column"`
}
func (*ColumnsOpSpec) Kind ¶
func (s *ColumnsOpSpec) Kind() flux.OperationKind
type ColumnsProcedureSpec ¶
type ColumnsProcedureSpec struct { plan.DefaultCost Column string }
func (*ColumnsProcedureSpec) Copy ¶
func (s *ColumnsProcedureSpec) Copy() plan.ProcedureSpec
func (*ColumnsProcedureSpec) Kind ¶
func (s *ColumnsProcedureSpec) Kind() plan.ProcedureKind
type CountAgg ¶
type CountAgg struct {
// contains filtered or unexported fields
}
func (*CountAgg) NewBoolAgg ¶
func (*CountAgg) NewFloatAgg ¶
func (a *CountAgg) NewFloatAgg() execute.DoFloatAgg
func (*CountAgg) NewStringAgg ¶
func (a *CountAgg) NewStringAgg() execute.DoStringAgg
func (*CountAgg) NewUIntAgg ¶
type CountOpSpec ¶
type CountOpSpec struct {
execute.AggregateConfig
}
func (*CountOpSpec) Kind ¶
func (s *CountOpSpec) Kind() flux.OperationKind
type CountProcedureSpec ¶
type CountProcedureSpec struct {
execute.AggregateConfig
}
func (*CountProcedureSpec) AggregateMethod ¶
func (s *CountProcedureSpec) AggregateMethod() string
func (*CountProcedureSpec) Copy ¶
func (s *CountProcedureSpec) Copy() plan.ProcedureSpec
func (*CountProcedureSpec) Kind ¶
func (s *CountProcedureSpec) Kind() plan.ProcedureKind
func (*CountProcedureSpec) ReAggregateSpec ¶
func (s *CountProcedureSpec) ReAggregateSpec() plan.ProcedureSpec
func (*CountProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *CountProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type CovarianceOpSpec ¶
type CovarianceOpSpec struct { PearsonCorrelation bool `json:"pearsonr"` ValueDst string `json:"valueDst"` Columns []string `json:"column"` }
func (*CovarianceOpSpec) Kind ¶
func (s *CovarianceOpSpec) Kind() flux.OperationKind
type CovarianceProcedureSpec ¶
type CovarianceProcedureSpec struct { plan.DefaultCost PearsonCorrelation bool ValueLabel string Columns []string }
func (*CovarianceProcedureSpec) Copy ¶
func (s *CovarianceProcedureSpec) Copy() plan.ProcedureSpec
func (*CovarianceProcedureSpec) Kind ¶
func (s *CovarianceProcedureSpec) Kind() plan.ProcedureKind
func (*CovarianceProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *CovarianceProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type CovarianceTransformation ¶
type CovarianceTransformation struct {
// contains filtered or unexported fields
}
func NewCovarianceTransformation ¶
func NewCovarianceTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *CovarianceProcedureSpec) *CovarianceTransformation
func (*CovarianceTransformation) DoFloat ¶
func (t *CovarianceTransformation) DoFloat(xs, ys *array.Float64)
func (*CovarianceTransformation) Finish ¶
func (t *CovarianceTransformation) Finish(id execute.DatasetID, err error)
func (*CovarianceTransformation) RetractTable ¶
func (*CovarianceTransformation) UpdateProcessingTime ¶
func (*CovarianceTransformation) UpdateWatermark ¶
type CumulativeSumOpSpec ¶
type CumulativeSumOpSpec struct {
Columns []string `json:"columns"`
}
func (*CumulativeSumOpSpec) Kind ¶
func (s *CumulativeSumOpSpec) Kind() flux.OperationKind
type CumulativeSumProcedureSpec ¶
type CumulativeSumProcedureSpec struct { plan.DefaultCost Columns []string }
func (*CumulativeSumProcedureSpec) Copy ¶
func (s *CumulativeSumProcedureSpec) Copy() plan.ProcedureSpec
func (*CumulativeSumProcedureSpec) Kind ¶
func (s *CumulativeSumProcedureSpec) Kind() plan.ProcedureKind
func (*CumulativeSumProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *CumulativeSumProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type DerivativeOpSpec ¶
type DerivativeOpSpec struct { Unit flux.Duration `json:"unit"` NonNegative bool `json:"nonNegative"` Columns []string `json:"columns"` TimeColumn string `json:"timeColumn"` }
func (*DerivativeOpSpec) Kind ¶
func (s *DerivativeOpSpec) Kind() flux.OperationKind
type DerivativeProcedureSpec ¶
type DerivativeProcedureSpec struct { plan.DefaultCost Unit flux.Duration `json:"unit"` NonNegative bool `json:"non_negative"` Columns []string `json:"columns"` TimeColumn string `json:"timeColumn"` }
func (*DerivativeProcedureSpec) Copy ¶
func (s *DerivativeProcedureSpec) Copy() plan.ProcedureSpec
func (*DerivativeProcedureSpec) Kind ¶
func (s *DerivativeProcedureSpec) Kind() plan.ProcedureKind
func (*DerivativeProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *DerivativeProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type DifferenceOpSpec ¶
type DifferenceOpSpec struct { NonNegative bool `json:"nonNegative"` Columns []string `json:"columns"` KeepFirst bool `json:"keepFirst"` }
func (*DifferenceOpSpec) Kind ¶
func (s *DifferenceOpSpec) Kind() flux.OperationKind
type DifferenceProcedureSpec ¶
type DifferenceProcedureSpec struct { plan.DefaultCost NonNegative bool `json:"non_negative"` Columns []string `json:"columns"` KeepFirst bool `json:"keepFirst"` }
func (*DifferenceProcedureSpec) Copy ¶
func (s *DifferenceProcedureSpec) Copy() plan.ProcedureSpec
func (*DifferenceProcedureSpec) Kind ¶
func (s *DifferenceProcedureSpec) Kind() plan.ProcedureKind
func (*DifferenceProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *DifferenceProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type DistinctOpSpec ¶
type DistinctOpSpec struct {
Column string `json:"column"`
}
func (*DistinctOpSpec) Kind ¶
func (s *DistinctOpSpec) Kind() flux.OperationKind
type DistinctProcedureSpec ¶
type DistinctProcedureSpec struct { plan.DefaultCost Column string }
func (*DistinctProcedureSpec) Copy ¶
func (s *DistinctProcedureSpec) Copy() plan.ProcedureSpec
func (*DistinctProcedureSpec) Kind ¶
func (s *DistinctProcedureSpec) Kind() plan.ProcedureKind
func (*DistinctProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *DistinctProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type DropKeepMutator ¶
type DropKeepMutator struct { KeepCols map[string]bool DropCols map[string]bool Predicate compiler.Func FlipPredicate bool ParamName string Input values.Object }
func NewDropKeepMutator ¶
func NewDropKeepMutator(qs flux.OperationSpec) (*DropKeepMutator, error)
func (*DropKeepMutator) Mutate ¶
func (m *DropKeepMutator) Mutate(ctx context.Context, bctx *BuilderContext) error
type DropOpSpec ¶
type DropOpSpec struct { Columns []string `json:"columns"` Predicate interpreter.ResolvedFunction `json:"fn"` }
func (*DropOpSpec) Copy ¶
func (s *DropOpSpec) Copy() SchemaMutation
func (*DropOpSpec) Kind ¶
func (s *DropOpSpec) Kind() flux.OperationKind
func (*DropOpSpec) Mutator ¶
func (s *DropOpSpec) Mutator() (SchemaMutator, error)
type DuplicateMutator ¶
func NewDuplicateMutator ¶
func NewDuplicateMutator(qs flux.OperationSpec) (*DuplicateMutator, error)
TODO: figure out what we'd like to do with the context and dependencies here
func (*DuplicateMutator) Mutate ¶
func (m *DuplicateMutator) Mutate(ctx context.Context, bctx *BuilderContext) error
type DuplicateOpSpec ¶
func (*DuplicateOpSpec) Copy ¶
func (s *DuplicateOpSpec) Copy() SchemaMutation
func (*DuplicateOpSpec) Kind ¶
func (s *DuplicateOpSpec) Kind() flux.OperationKind
func (*DuplicateOpSpec) Mutator ¶
func (s *DuplicateOpSpec) Mutator() (SchemaMutator, error)
type ElapsedOpSpec ¶ added in v0.36.0
type ElapsedOpSpec struct { Unit flux.Duration `json:"unit"` TimeColumn string `json:"timeColumn"` ColumnName string `json:"columnName"` }
func (*ElapsedOpSpec) Kind ¶ added in v0.36.0
func (s *ElapsedOpSpec) Kind() flux.OperationKind
type ElapsedProcedureSpec ¶ added in v0.36.0
type ElapsedProcedureSpec struct { plan.DefaultCost Unit flux.Duration `json:"unit"` TimeColumn string `json:"timeColumn"` ColumnName string `json:"columnName"` }
func (*ElapsedProcedureSpec) Copy ¶ added in v0.36.0
func (s *ElapsedProcedureSpec) Copy() plan.ProcedureSpec
func (*ElapsedProcedureSpec) Kind ¶ added in v0.36.0
func (s *ElapsedProcedureSpec) Kind() plan.ProcedureKind
type ExactQuantileAgg ¶ added in v0.24.0
type ExactQuantileAgg struct { Quantile float64 // contains filtered or unexported fields }
func (*ExactQuantileAgg) Copy ¶ added in v0.24.0
func (a *ExactQuantileAgg) Copy() *ExactQuantileAgg
func (*ExactQuantileAgg) DoFloat ¶ added in v0.24.0
func (a *ExactQuantileAgg) DoFloat(vs *array.Float64)
func (*ExactQuantileAgg) IsNull ¶ added in v0.24.0
func (a *ExactQuantileAgg) IsNull() bool
func (*ExactQuantileAgg) NewBoolAgg ¶ added in v0.24.0
func (a *ExactQuantileAgg) NewBoolAgg() execute.DoBoolAgg
func (*ExactQuantileAgg) NewFloatAgg ¶ added in v0.24.0
func (a *ExactQuantileAgg) NewFloatAgg() execute.DoFloatAgg
func (*ExactQuantileAgg) NewIntAgg ¶ added in v0.24.0
func (a *ExactQuantileAgg) NewIntAgg() execute.DoIntAgg
func (*ExactQuantileAgg) NewStringAgg ¶ added in v0.24.0
func (a *ExactQuantileAgg) NewStringAgg() execute.DoStringAgg
func (*ExactQuantileAgg) NewUIntAgg ¶ added in v0.24.0
func (a *ExactQuantileAgg) NewUIntAgg() execute.DoUIntAgg
func (*ExactQuantileAgg) Type ¶ added in v0.24.0
func (a *ExactQuantileAgg) Type() flux.ColType
func (*ExactQuantileAgg) ValueFloat ¶ added in v0.24.0
func (a *ExactQuantileAgg) ValueFloat() float64
type ExactQuantileAggProcedureSpec ¶ added in v0.24.0
type ExactQuantileAggProcedureSpec struct { Quantile float64 `json:"quantile"` execute.AggregateConfig }
func (*ExactQuantileAggProcedureSpec) Copy ¶ added in v0.24.0
func (s *ExactQuantileAggProcedureSpec) Copy() plan.ProcedureSpec
func (*ExactQuantileAggProcedureSpec) Kind ¶ added in v0.24.0
func (s *ExactQuantileAggProcedureSpec) Kind() plan.ProcedureKind
func (*ExactQuantileAggProcedureSpec) TriggerSpec ¶ added in v0.24.0
func (s *ExactQuantileAggProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type ExactQuantileSelectProcedureSpec ¶ added in v0.24.0
type ExactQuantileSelectProcedureSpec struct { Quantile float64 `json:"quantile"` execute.SelectorConfig }
func (*ExactQuantileSelectProcedureSpec) Copy ¶ added in v0.24.0
func (s *ExactQuantileSelectProcedureSpec) Copy() plan.ProcedureSpec
func (*ExactQuantileSelectProcedureSpec) Kind ¶ added in v0.24.0
func (s *ExactQuantileSelectProcedureSpec) Kind() plan.ProcedureKind
func (*ExactQuantileSelectProcedureSpec) TriggerSpec ¶ added in v0.24.0
func (s *ExactQuantileSelectProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type ExactQuantileSelectorTransformation ¶ added in v0.24.0
type ExactQuantileSelectorTransformation struct {
// contains filtered or unexported fields
}
func NewExactQuantileSelectorTransformation ¶ added in v0.24.0
func NewExactQuantileSelectorTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *ExactQuantileSelectProcedureSpec, a *memory.Allocator) *ExactQuantileSelectorTransformation
func (*ExactQuantileSelectorTransformation) Finish ¶ added in v0.24.0
func (t *ExactQuantileSelectorTransformation) Finish(id execute.DatasetID, err error)
func (*ExactQuantileSelectorTransformation) RetractTable ¶ added in v0.24.0
func (*ExactQuantileSelectorTransformation) UpdateProcessingTime ¶ added in v0.24.0
func (*ExactQuantileSelectorTransformation) UpdateWatermark ¶ added in v0.24.0
type ExponentialMovingAverageOpSpec ¶ added in v0.37.0
type ExponentialMovingAverageOpSpec struct {
N int64 `json:"n"`
}
func (*ExponentialMovingAverageOpSpec) Kind ¶ added in v0.37.0
func (s *ExponentialMovingAverageOpSpec) Kind() flux.OperationKind
type ExponentialMovingAverageProcedureSpec ¶ added in v0.37.0
type ExponentialMovingAverageProcedureSpec struct { plan.DefaultCost N int64 `json:"n"` }
func (*ExponentialMovingAverageProcedureSpec) Copy ¶ added in v0.37.0
func (s *ExponentialMovingAverageProcedureSpec) Copy() plan.ProcedureSpec
func (*ExponentialMovingAverageProcedureSpec) Kind ¶ added in v0.37.0
func (s *ExponentialMovingAverageProcedureSpec) Kind() plan.ProcedureKind
func (*ExponentialMovingAverageProcedureSpec) TriggerSpec ¶ added in v0.37.0
func (s *ExponentialMovingAverageProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type FillOpSpec ¶
type FillOpSpec struct { Column string `json:"column"` Type string `json:"type"` Value string `json:"value"` UsePrevious bool `json:"use_previous"` }
func (*FillOpSpec) Kind ¶
func (s *FillOpSpec) Kind() flux.OperationKind
type FillProcedureSpec ¶
type FillProcedureSpec struct { plan.DefaultCost Column string Value values.Value UsePrevious bool }
func (*FillProcedureSpec) Copy ¶
func (s *FillProcedureSpec) Copy() plan.ProcedureSpec
func (*FillProcedureSpec) Kind ¶
func (s *FillProcedureSpec) Kind() plan.ProcedureKind
type FilterOpSpec ¶
type FilterOpSpec struct {
Fn interpreter.ResolvedFunction `json:"fn"`
}
func (*FilterOpSpec) Kind ¶
func (s *FilterOpSpec) Kind() flux.OperationKind
type FilterProcedureSpec ¶
type FilterProcedureSpec struct { plan.DefaultCost Fn interpreter.ResolvedFunction }
func (*FilterProcedureSpec) Copy ¶
func (s *FilterProcedureSpec) Copy() plan.ProcedureSpec
func (*FilterProcedureSpec) Kind ¶
func (s *FilterProcedureSpec) Kind() plan.ProcedureKind
func (*FilterProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *FilterProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type FirstOpSpec ¶
type FirstOpSpec struct {
execute.SelectorConfig
}
func (*FirstOpSpec) Kind ¶
func (s *FirstOpSpec) Kind() flux.OperationKind
type FirstProcedureSpec ¶
type FirstProcedureSpec struct {
execute.SelectorConfig
}
func (*FirstProcedureSpec) Copy ¶
func (s *FirstProcedureSpec) Copy() plan.ProcedureSpec
func (*FirstProcedureSpec) Kind ¶
func (s *FirstProcedureSpec) Kind() plan.ProcedureKind
func (*FirstProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *FirstProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type FirstSelector ¶
type FirstSelector struct {
// contains filtered or unexported fields
}
func (*FirstSelector) DoTime ¶ added in v0.38.0
func (s *FirstSelector) DoTime(vs *array.Int64) []int
func (*FirstSelector) NewBoolSelector ¶
func (s *FirstSelector) NewBoolSelector() execute.DoBoolIndexSelector
func (*FirstSelector) NewFloatSelector ¶
func (s *FirstSelector) NewFloatSelector() execute.DoFloatIndexSelector
func (*FirstSelector) NewIntSelector ¶
func (s *FirstSelector) NewIntSelector() execute.DoIntIndexSelector
func (*FirstSelector) NewStringSelector ¶
func (s *FirstSelector) NewStringSelector() execute.DoStringIndexSelector
func (*FirstSelector) NewTimeSelector ¶ added in v0.38.0
func (s *FirstSelector) NewTimeSelector() execute.DoTimeIndexSelector
func (*FirstSelector) NewUIntSelector ¶
func (s *FirstSelector) NewUIntSelector() execute.DoUIntIndexSelector
type GroupOpSpec ¶
func (*GroupOpSpec) Kind ¶
func (s *GroupOpSpec) Kind() flux.OperationKind
type GroupProcedureSpec ¶
type GroupProcedureSpec struct { plan.DefaultCost GroupMode flux.GroupMode GroupKeys []string }
func (*GroupProcedureSpec) Copy ¶
func (s *GroupProcedureSpec) Copy() plan.ProcedureSpec
func (*GroupProcedureSpec) Kind ¶
func (s *GroupProcedureSpec) Kind() plan.ProcedureKind
type HistogramOpSpec ¶
type HistogramOpSpec struct { Column string `json:"column"` UpperBoundColumn string `json:"upperBoundColumn"` CountColumn string `json:"countColumn"` Bins []float64 `json:"bins"` Normalize bool `json:"normalize"` }
func (*HistogramOpSpec) Kind ¶
func (s *HistogramOpSpec) Kind() flux.OperationKind
type HistogramProcedureSpec ¶
type HistogramProcedureSpec struct { plan.DefaultCost HistogramOpSpec }
func (*HistogramProcedureSpec) Copy ¶
func (s *HistogramProcedureSpec) Copy() plan.ProcedureSpec
func (*HistogramProcedureSpec) Kind ¶
func (s *HistogramProcedureSpec) Kind() plan.ProcedureKind
type HistogramQuantileOpSpec ¶
type HistogramQuantileOpSpec struct { Quantile float64 `json:"quantile"` CountColumn string `json:"countColumn"` UpperBoundColumn string `json:"upperBoundColumn"` ValueColumn string `json:"valueColumn"` MinValue float64 `json:"minValue"` }
func (*HistogramQuantileOpSpec) Kind ¶
func (s *HistogramQuantileOpSpec) Kind() flux.OperationKind
type HistogramQuantileProcedureSpec ¶
type HistogramQuantileProcedureSpec struct { plan.DefaultCost Quantile float64 `json:"quantile"` CountColumn string `json:"countColumn"` UpperBoundColumn string `json:"upperBoundColumn"` ValueColumn string `json:"valueColumn"` MinValue float64 `json:"minValue"` }
func (*HistogramQuantileProcedureSpec) Copy ¶
func (s *HistogramQuantileProcedureSpec) Copy() plan.ProcedureSpec
func (*HistogramQuantileProcedureSpec) Kind ¶
func (s *HistogramQuantileProcedureSpec) Kind() plan.ProcedureKind
type HoltWintersOpSpec ¶ added in v0.38.0
type HoltWintersOpSpec struct { WithFit bool `json:"with_fit"` Column string `json:"column"` TimeColumn string `json:"time_column"` N int64 `json:"n"` S int64 `json:"s"` Interval flux.Duration `json:"interval"` }
func (*HoltWintersOpSpec) Kind ¶ added in v0.38.0
func (s *HoltWintersOpSpec) Kind() flux.OperationKind
type HoltWintersProcedureSpec ¶ added in v0.38.0
type HoltWintersProcedureSpec struct { plan.DefaultCost WithFit bool Column string TimeColumn string N int64 S int64 Interval flux.Duration }
func (*HoltWintersProcedureSpec) Copy ¶ added in v0.38.0
func (s *HoltWintersProcedureSpec) Copy() plan.ProcedureSpec
func (*HoltWintersProcedureSpec) Kind ¶ added in v0.38.0
func (s *HoltWintersProcedureSpec) Kind() plan.ProcedureKind
func (*HoltWintersProcedureSpec) TriggerSpec ¶ added in v0.38.0
func (s *HoltWintersProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type HourSelectionOpSpec ¶ added in v0.39.0
type HourSelectionOpSpec struct { Start int64 `json:"start"` Stop int64 `json:"stop"` TimeColumn string `json:"timeColumn"` }
func (*HourSelectionOpSpec) Kind ¶ added in v0.39.0
func (s *HourSelectionOpSpec) Kind() flux.OperationKind
type HourSelectionProcedureSpec ¶ added in v0.39.0
type HourSelectionProcedureSpec struct { plan.DefaultCost Start int64 `json:"start"` Stop int64 `json:"stop"` TimeColumn string `json:"timeColumn"` }
func (*HourSelectionProcedureSpec) Copy ¶ added in v0.39.0
func (s *HourSelectionProcedureSpec) Copy() plan.ProcedureSpec
func (*HourSelectionProcedureSpec) Kind ¶ added in v0.39.0
func (s *HourSelectionProcedureSpec) Kind() plan.ProcedureKind
func (*HourSelectionProcedureSpec) TriggerSpec ¶ added in v0.39.0
func (s *HourSelectionProcedureSpec) TriggerSpec() plan.TriggerSpec
type IntegralOpSpec ¶
type IntegralOpSpec struct { Unit flux.Duration `json:"unit"` TimeColumn string `json:"timeColumn"` execute.AggregateConfig }
func (*IntegralOpSpec) Kind ¶
func (s *IntegralOpSpec) Kind() flux.OperationKind
type IntegralProcedureSpec ¶
type IntegralProcedureSpec struct { Unit flux.Duration `json:"unit"` TimeColumn string `json:"timeColumn"` execute.AggregateConfig }
func (*IntegralProcedureSpec) Copy ¶
func (s *IntegralProcedureSpec) Copy() plan.ProcedureSpec
func (*IntegralProcedureSpec) Kind ¶
func (s *IntegralProcedureSpec) Kind() plan.ProcedureKind
func (*IntegralProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *IntegralProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type JoinOpSpec ¶
type JoinOpSpec struct { TableNames map[flux.OperationID]string `json:"tableNames"` On []string `json:"on"` Method string `json:"method"` // contains filtered or unexported fields }
JoinOpSpec specifies a particular join operation
func (*JoinOpSpec) IDer ¶
func (t *JoinOpSpec) IDer(ider flux.IDer)
func (*JoinOpSpec) Kind ¶
func (s *JoinOpSpec) Kind() flux.OperationKind
type KamaOpSpec ¶ added in v0.40.0
func (*KamaOpSpec) Kind ¶ added in v0.40.0
func (s *KamaOpSpec) Kind() flux.OperationKind
type KamaProcedureSpec ¶ added in v0.40.0
type KamaProcedureSpec struct { plan.DefaultCost N int64 `json:"n"` Column string `json:"column"` }
func (*KamaProcedureSpec) Copy ¶ added in v0.40.0
func (s *KamaProcedureSpec) Copy() plan.ProcedureSpec
func (*KamaProcedureSpec) Kind ¶ added in v0.40.0
func (s *KamaProcedureSpec) Kind() plan.ProcedureKind
func (*KamaProcedureSpec) TriggerSpec ¶ added in v0.40.0
func (s *KamaProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type KeepOpSpec ¶
type KeepOpSpec struct { Columns []string `json:"columns"` Predicate interpreter.ResolvedFunction `json:"fn"` }
func (*KeepOpSpec) Copy ¶
func (s *KeepOpSpec) Copy() SchemaMutation
func (*KeepOpSpec) Kind ¶
func (s *KeepOpSpec) Kind() flux.OperationKind
func (*KeepOpSpec) Mutator ¶
func (s *KeepOpSpec) Mutator() (SchemaMutator, error)
type KeyValuesOpSpec ¶
type KeyValuesOpSpec struct { KeyColumns []string `json:"keyColumns"` PredicateFn interpreter.ResolvedFunction `json:"fn"` }
func (*KeyValuesOpSpec) Kind ¶
func (s *KeyValuesOpSpec) Kind() flux.OperationKind
type KeyValuesProcedureSpec ¶
type KeyValuesProcedureSpec struct { plan.DefaultCost KeyColumns []string `json:"keyColumns"` Predicate interpreter.ResolvedFunction `json:"fn"` }
func (*KeyValuesProcedureSpec) Copy ¶
func (s *KeyValuesProcedureSpec) Copy() plan.ProcedureSpec
func (*KeyValuesProcedureSpec) Kind ¶
func (s *KeyValuesProcedureSpec) Kind() plan.ProcedureKind
func (*KeyValuesProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *KeyValuesProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type KeysOpSpec ¶
type KeysOpSpec struct {
Column string `json:"column"`
}
func (*KeysOpSpec) Kind ¶
func (s *KeysOpSpec) Kind() flux.OperationKind
type KeysProcedureSpec ¶
type KeysProcedureSpec struct { plan.DefaultCost Column string }
func (*KeysProcedureSpec) Copy ¶
func (s *KeysProcedureSpec) Copy() plan.ProcedureSpec
func (*KeysProcedureSpec) Kind ¶
func (s *KeysProcedureSpec) Kind() plan.ProcedureKind
func (*KeysProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *KeysProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type LastOpSpec ¶
type LastOpSpec struct {
execute.SelectorConfig
}
func (*LastOpSpec) Kind ¶
func (s *LastOpSpec) Kind() flux.OperationKind
type LastProcedureSpec ¶
type LastProcedureSpec struct {
execute.SelectorConfig
}
func (*LastProcedureSpec) Copy ¶
func (s *LastProcedureSpec) Copy() plan.ProcedureSpec
func (*LastProcedureSpec) Kind ¶
func (s *LastProcedureSpec) Kind() plan.ProcedureKind
func (*LastProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *LastProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type LastSelector ¶
type LastSelector struct {
// contains filtered or unexported fields
}
LastSelector selects the last row from a Flux table. Note that while 'last' and 'first' are conceptually similar, one is a row selector (last) while the other is an index selector (first). The reason for this is that it was easier to ensure a correct implementation of 'last' by defining it as a row selector when using multiple column readers to iterate over a Flux table.
func (*LastSelector) DoString ¶
func (s *LastSelector) DoString(vs *array.Binary, cr flux.ColReader)
func (*LastSelector) DoTime ¶ added in v0.38.0
func (s *LastSelector) DoTime(vs *array.Int64, cr flux.ColReader)
func (*LastSelector) NewBoolSelector ¶
func (s *LastSelector) NewBoolSelector() execute.DoBoolRowSelector
func (*LastSelector) NewFloatSelector ¶
func (s *LastSelector) NewFloatSelector() execute.DoFloatRowSelector
func (*LastSelector) NewIntSelector ¶
func (s *LastSelector) NewIntSelector() execute.DoIntRowSelector
func (*LastSelector) NewStringSelector ¶
func (s *LastSelector) NewStringSelector() execute.DoStringRowSelector
func (*LastSelector) NewTimeSelector ¶ added in v0.38.0
func (s *LastSelector) NewTimeSelector() execute.DoTimeRowSelector
func (*LastSelector) NewUIntSelector ¶
func (s *LastSelector) NewUIntSelector() execute.DoUIntRowSelector
func (*LastSelector) Rows ¶
func (s *LastSelector) Rows() []execute.Row
type LimitOpSpec ¶
LimitOpSpec limits the number of rows returned per table.
func (*LimitOpSpec) Kind ¶
func (s *LimitOpSpec) Kind() flux.OperationKind
type LimitProcedureSpec ¶
type LimitProcedureSpec struct { plan.DefaultCost N int64 `json:"n"` Offset int64 `json:"offset"` }
func (*LimitProcedureSpec) Copy ¶
func (s *LimitProcedureSpec) Copy() plan.ProcedureSpec
func (*LimitProcedureSpec) Kind ¶
func (s *LimitProcedureSpec) Kind() plan.ProcedureKind
func (*LimitProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *LimitProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type MapOpSpec ¶
type MapOpSpec struct { Fn interpreter.ResolvedFunction `json:"fn"` MergeKey bool `json:"mergeKey"` }
func (*MapOpSpec) Kind ¶
func (s *MapOpSpec) Kind() flux.OperationKind
type MapProcedureSpec ¶
type MapProcedureSpec struct { plan.DefaultCost Fn interpreter.ResolvedFunction `json:"fn"` MergeKey bool }
func (*MapProcedureSpec) Copy ¶
func (s *MapProcedureSpec) Copy() plan.ProcedureSpec
func (*MapProcedureSpec) Kind ¶
func (s *MapProcedureSpec) Kind() plan.ProcedureKind
type MaxFloatSelector ¶
type MaxFloatSelector struct { MaxSelector // contains filtered or unexported fields }
type MaxIntSelector ¶
type MaxIntSelector struct { MaxSelector // contains filtered or unexported fields }
type MaxOpSpec ¶
type MaxOpSpec struct {
execute.SelectorConfig
}
func (*MaxOpSpec) Kind ¶
func (s *MaxOpSpec) Kind() flux.OperationKind
type MaxProcedureSpec ¶
type MaxProcedureSpec struct {
execute.SelectorConfig
}
func (*MaxProcedureSpec) Copy ¶
func (s *MaxProcedureSpec) Copy() plan.ProcedureSpec
func (*MaxProcedureSpec) Kind ¶
func (s *MaxProcedureSpec) Kind() plan.ProcedureKind
func (*MaxProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *MaxProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type MaxSelector ¶
type MaxSelector struct {
// contains filtered or unexported fields
}
func (*MaxSelector) NewBoolSelector ¶
func (s *MaxSelector) NewBoolSelector() execute.DoBoolRowSelector
func (*MaxSelector) NewFloatSelector ¶
func (s *MaxSelector) NewFloatSelector() execute.DoFloatRowSelector
func (*MaxSelector) NewIntSelector ¶
func (s *MaxSelector) NewIntSelector() execute.DoIntRowSelector
func (*MaxSelector) NewStringSelector ¶
func (s *MaxSelector) NewStringSelector() execute.DoStringRowSelector
func (*MaxSelector) NewTimeSelector ¶ added in v0.38.0
func (s *MaxSelector) NewTimeSelector() execute.DoTimeRowSelector
func (*MaxSelector) NewUIntSelector ¶
func (s *MaxSelector) NewUIntSelector() execute.DoUIntRowSelector
func (*MaxSelector) Rows ¶
func (s *MaxSelector) Rows() []execute.Row
type MaxTimeSelector ¶ added in v0.38.0
type MaxTimeSelector struct {
MaxIntSelector
}
type MaxUIntSelector ¶
type MaxUIntSelector struct { MaxSelector // contains filtered or unexported fields }
type MeanAgg ¶
type MeanAgg struct {
// contains filtered or unexported fields
}
func (*MeanAgg) NewBoolAgg ¶
func (*MeanAgg) NewFloatAgg ¶
func (a *MeanAgg) NewFloatAgg() execute.DoFloatAgg
func (*MeanAgg) NewStringAgg ¶
func (a *MeanAgg) NewStringAgg() execute.DoStringAgg
func (*MeanAgg) NewUIntAgg ¶
func (*MeanAgg) ValueFloat ¶
type MeanOpSpec ¶
type MeanOpSpec struct {
execute.AggregateConfig
}
func (*MeanOpSpec) Kind ¶
func (s *MeanOpSpec) Kind() flux.OperationKind
type MeanProcedureSpec ¶
type MeanProcedureSpec struct {
execute.AggregateConfig
}
func (*MeanProcedureSpec) Copy ¶
func (s *MeanProcedureSpec) Copy() plan.ProcedureSpec
func (*MeanProcedureSpec) Kind ¶
func (s *MeanProcedureSpec) Kind() plan.ProcedureKind
func (*MeanProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *MeanProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type MergeGroupRule ¶
type MergeGroupRule struct{}
`MergeGroupRule` merges two group operations and keeps only the last one
func (MergeGroupRule) Name ¶
func (r MergeGroupRule) Name() string
func (MergeGroupRule) Pattern ¶
func (r MergeGroupRule) Pattern() plan.Pattern
returns the pattern that matches `group |> group`
type MergeJoinCache ¶
type MergeJoinCache struct {
// contains filtered or unexported fields
}
MergeJoinCache implements execute.DataCache This is where the all the tables to be joined are stored.
buffers: Buffers to hold the tables for each incoming stream.
postJoinKeys: The post-join group keys for all joined tables.
These group keys are constructed and stored as soon as a table is consumed by the join operator, but prior to actually joining the data.
reverseLookup: Each output group key that is stored is mapped to its
corresponding pre-join group keys. These pre-join group keys are then used to retrieve their corresponding tables from the buffers.
tables: All output tables are materialized and stored in this
map before being sent to downstream operators.
func NewMergeJoinCache ¶
func NewMergeJoinCache(alloc *memory.Allocator, datasetIDs []execute.DatasetID, tableNames map[execute.DatasetID]string, key []string) *MergeJoinCache
NewMergeJoinCache constructs a new instance of a MergeJoinCache
func (*MergeJoinCache) DiscardTable ¶
func (c *MergeJoinCache) DiscardTable(key flux.GroupKey)
DiscardTable removes a table from the output buffer
func (*MergeJoinCache) ExpireTable ¶
func (c *MergeJoinCache) ExpireTable(key flux.GroupKey)
ExpireTable removes the a key from the set of postJoinKeys. ExpireTable will be called after the table associated with key has already been materialized. As a result, it cannot not be materialized again. Each buffer is cleared of any stale data that arises as a result of this process.
func (*MergeJoinCache) ForEach ¶
func (c *MergeJoinCache) ForEach(f func(flux.GroupKey))
ForEach iterates over each table in the output stream
func (*MergeJoinCache) ForEachWithContext ¶
func (c *MergeJoinCache) ForEachWithContext(f func(flux.GroupKey, execute.Trigger, execute.TableContext))
ForEachWithContext iterates over each table in the output stream
func (*MergeJoinCache) SetTriggerSpec ¶
func (c *MergeJoinCache) SetTriggerSpec(spec plan.TriggerSpec)
SetTriggerSpec sets the trigger rule for this cache
type MergeJoinProcedureSpec ¶
type MergeJoinProcedureSpec struct { plan.DefaultCost TableNames []string `json:"table_names"` On []string `json:"keys"` }
func (*MergeJoinProcedureSpec) Copy ¶
func (s *MergeJoinProcedureSpec) Copy() plan.ProcedureSpec
func (*MergeJoinProcedureSpec) Kind ¶
func (s *MergeJoinProcedureSpec) Kind() plan.ProcedureKind
type MinFloatSelector ¶
type MinFloatSelector struct { MinSelector // contains filtered or unexported fields }
type MinIntSelector ¶
type MinIntSelector struct { MinSelector // contains filtered or unexported fields }
type MinOpSpec ¶
type MinOpSpec struct {
execute.SelectorConfig
}
func (*MinOpSpec) Kind ¶
func (s *MinOpSpec) Kind() flux.OperationKind
type MinProcedureSpec ¶
type MinProcedureSpec struct {
execute.SelectorConfig
}
func (*MinProcedureSpec) Copy ¶
func (s *MinProcedureSpec) Copy() plan.ProcedureSpec
func (*MinProcedureSpec) Kind ¶
func (s *MinProcedureSpec) Kind() plan.ProcedureKind
func (*MinProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *MinProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type MinSelector ¶
type MinSelector struct {
// contains filtered or unexported fields
}
func (*MinSelector) NewBoolSelector ¶
func (s *MinSelector) NewBoolSelector() execute.DoBoolRowSelector
func (*MinSelector) NewFloatSelector ¶
func (s *MinSelector) NewFloatSelector() execute.DoFloatRowSelector
func (*MinSelector) NewIntSelector ¶
func (s *MinSelector) NewIntSelector() execute.DoIntRowSelector
func (*MinSelector) NewStringSelector ¶
func (s *MinSelector) NewStringSelector() execute.DoStringRowSelector
func (*MinSelector) NewTimeSelector ¶ added in v0.38.0
func (s *MinSelector) NewTimeSelector() execute.DoTimeRowSelector
func (*MinSelector) NewUIntSelector ¶
func (s *MinSelector) NewUIntSelector() execute.DoUIntRowSelector
func (*MinSelector) Rows ¶
func (s *MinSelector) Rows() []execute.Row
type MinTimeSelector ¶ added in v0.38.0
type MinTimeSelector struct {
MinIntSelector
}
type MinUIntSelector ¶
type MinUIntSelector struct { MinSelector // contains filtered or unexported fields }
type ModeOpSpec ¶ added in v0.36.0
type ModeOpSpec struct {
Column string `json:"column"`
}
func (*ModeOpSpec) Kind ¶ added in v0.36.0
func (s *ModeOpSpec) Kind() flux.OperationKind
type ModeProcedureSpec ¶ added in v0.36.0
type ModeProcedureSpec struct { plan.DefaultCost Column string }
func (*ModeProcedureSpec) Copy ¶ added in v0.36.0
func (s *ModeProcedureSpec) Copy() plan.ProcedureSpec
func (*ModeProcedureSpec) Kind ¶ added in v0.36.0
func (s *ModeProcedureSpec) Kind() plan.ProcedureKind
func (*ModeProcedureSpec) TriggerSpec ¶ added in v0.36.0
func (s *ModeProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type MovingAverageOpSpec ¶ added in v0.36.0
type MovingAverageOpSpec struct {
N int64 `json:"n"`
}
func (*MovingAverageOpSpec) Kind ¶ added in v0.36.0
func (s *MovingAverageOpSpec) Kind() flux.OperationKind
type MovingAverageProcedureSpec ¶ added in v0.36.0
type MovingAverageProcedureSpec struct { plan.DefaultCost N int64 `json:"n"` }
func (*MovingAverageProcedureSpec) Copy ¶ added in v0.36.0
func (s *MovingAverageProcedureSpec) Copy() plan.ProcedureSpec
func (*MovingAverageProcedureSpec) Kind ¶ added in v0.36.0
func (s *MovingAverageProcedureSpec) Kind() plan.ProcedureKind
func (*MovingAverageProcedureSpec) TriggerSpec ¶ added in v0.36.0
func (s *MovingAverageProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type MutationRegistrar ¶
type MutationRegistrar struct { Kind flux.OperationKind Args map[string]semantic.PolyType Create flux.CreateOperationSpec New flux.NewOperationSpec }
A MutationRegistrar contains information needed to register a type of Operation Spec that will be converted into a SchemaMutator and embedded in a SchemaMutationProcedureSpec. Operations with a corresponding MutationRegistrar should not have their own ProcedureSpec.
func (MutationRegistrar) Register ¶
func (m MutationRegistrar) Register()
type PivotOpSpec ¶
type PivotOpSpec struct { RowKey []string `json:"rowKey"` ColumnKey []string `json:"columnKey"` ValueColumn string `json:"valueColumn"` }
func (*PivotOpSpec) Kind ¶
func (s *PivotOpSpec) Kind() flux.OperationKind
type PivotProcedureSpec ¶
type PivotProcedureSpec struct { plan.DefaultCost RowKey []string ColumnKey []string ValueColumn string }
func (*PivotProcedureSpec) Copy ¶
func (s *PivotProcedureSpec) Copy() plan.ProcedureSpec
func (*PivotProcedureSpec) Kind ¶
func (s *PivotProcedureSpec) Kind() plan.ProcedureKind
type QuantileAgg ¶ added in v0.24.0
type QuantileAgg struct { Quantile, Compression float64 // contains filtered or unexported fields }
func (*QuantileAgg) Copy ¶ added in v0.24.0
func (a *QuantileAgg) Copy() *QuantileAgg
func (*QuantileAgg) DoFloat ¶ added in v0.24.0
func (a *QuantileAgg) DoFloat(vs *array.Float64)
func (*QuantileAgg) IsNull ¶ added in v0.24.0
func (a *QuantileAgg) IsNull() bool
func (*QuantileAgg) NewBoolAgg ¶ added in v0.24.0
func (a *QuantileAgg) NewBoolAgg() execute.DoBoolAgg
func (*QuantileAgg) NewFloatAgg ¶ added in v0.24.0
func (a *QuantileAgg) NewFloatAgg() execute.DoFloatAgg
func (*QuantileAgg) NewIntAgg ¶ added in v0.24.0
func (a *QuantileAgg) NewIntAgg() execute.DoIntAgg
func (*QuantileAgg) NewStringAgg ¶ added in v0.24.0
func (a *QuantileAgg) NewStringAgg() execute.DoStringAgg
func (*QuantileAgg) NewUIntAgg ¶ added in v0.24.0
func (a *QuantileAgg) NewUIntAgg() execute.DoUIntAgg
func (*QuantileAgg) Type ¶ added in v0.24.0
func (a *QuantileAgg) Type() flux.ColType
func (*QuantileAgg) ValueFloat ¶ added in v0.24.0
func (a *QuantileAgg) ValueFloat() float64
type QuantileOpSpec ¶ added in v0.24.0
type QuantileOpSpec struct { Quantile float64 `json:"quantile"` Compression float64 `json:"compression"` Method string `json:"method"` // quantile is either an aggregate, or a selector based on the options execute.AggregateConfig execute.SelectorConfig }
func (*QuantileOpSpec) Kind ¶ added in v0.24.0
func (s *QuantileOpSpec) Kind() flux.OperationKind
type RangeOpSpec ¶
type RangeOpSpec struct { Start flux.Time `json:"start"` Stop flux.Time `json:"stop"` TimeColumn string `json:"timeColumn"` StartColumn string `json:"startColumn"` StopColumn string `json:"stopColumn"` }
func (*RangeOpSpec) Kind ¶
func (s *RangeOpSpec) Kind() flux.OperationKind
type RangeProcedureSpec ¶
type RangeProcedureSpec struct { plan.DefaultCost Bounds flux.Bounds TimeColumn string StartColumn string StopColumn string }
func (*RangeProcedureSpec) Copy ¶
func (s *RangeProcedureSpec) Copy() plan.ProcedureSpec
func (*RangeProcedureSpec) Kind ¶
func (s *RangeProcedureSpec) Kind() plan.ProcedureKind
func (*RangeProcedureSpec) TimeBounds ¶
func (s *RangeProcedureSpec) TimeBounds(predecessorBounds *plan.Bounds) *plan.Bounds
TimeBounds implements plan.BoundsAwareProcedureSpec
func (*RangeProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *RangeProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type ReduceOpSpec ¶ added in v0.23.0
type ReduceOpSpec struct { Fn interpreter.ResolvedFunction `json:"fn"` ReducerType semantic.Type `json:"reducer_type"` Identity map[string]string `json:"identity"` }
func (*ReduceOpSpec) Kind ¶ added in v0.23.0
func (s *ReduceOpSpec) Kind() flux.OperationKind
type ReduceProcedureSpec ¶ added in v0.23.0
type ReduceProcedureSpec struct { plan.DefaultCost Fn interpreter.ResolvedFunction ReducerType semantic.Type Identity map[string]string }
func (*ReduceProcedureSpec) Copy ¶ added in v0.23.0
func (s *ReduceProcedureSpec) Copy() plan.ProcedureSpec
func (*ReduceProcedureSpec) Kind ¶ added in v0.23.0
func (s *ReduceProcedureSpec) Kind() plan.ProcedureKind
type RelativeStrengthIndexOpSpec ¶ added in v0.38.0
func (*RelativeStrengthIndexOpSpec) Kind ¶ added in v0.38.0
func (s *RelativeStrengthIndexOpSpec) Kind() flux.OperationKind
type RelativeStrengthIndexProcedureSpec ¶ added in v0.38.0
type RelativeStrengthIndexProcedureSpec struct { plan.DefaultCost N int64 `json:"n"` Columns []string `json:"columns"` }
func (*RelativeStrengthIndexProcedureSpec) Copy ¶ added in v0.38.0
func (s *RelativeStrengthIndexProcedureSpec) Copy() plan.ProcedureSpec
func (*RelativeStrengthIndexProcedureSpec) Kind ¶ added in v0.38.0
func (s *RelativeStrengthIndexProcedureSpec) Kind() plan.ProcedureKind
func (*RelativeStrengthIndexProcedureSpec) TriggerSpec ¶ added in v0.38.0
func (s *RelativeStrengthIndexProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type RemoveTrivialFilterRule ¶ added in v0.15.0
type RemoveTrivialFilterRule struct{}
RemoveTrivialFilterRule removes Filter nodes whose predicate always evaluates to true.
func (RemoveTrivialFilterRule) Name ¶ added in v0.15.0
func (RemoveTrivialFilterRule) Name() string
func (RemoveTrivialFilterRule) Pattern ¶ added in v0.15.0
func (RemoveTrivialFilterRule) Pattern() plan.Pattern
type RenameMutator ¶
type RenameMutator struct { Columns map[string]string Fn compiler.Func Input values.Object ParamName string }
func NewRenameMutator ¶
func NewRenameMutator(qs flux.OperationSpec) (*RenameMutator, error)
func (*RenameMutator) Mutate ¶
func (m *RenameMutator) Mutate(ctx context.Context, bctx *BuilderContext) error
type RenameOpSpec ¶
type RenameOpSpec struct { Columns map[string]string `json:"columns"` Fn interpreter.ResolvedFunction `json:"fn"` }
func (*RenameOpSpec) Copy ¶
func (s *RenameOpSpec) Copy() SchemaMutation
func (*RenameOpSpec) Kind ¶
func (s *RenameOpSpec) Kind() flux.OperationKind
func (*RenameOpSpec) Mutator ¶
func (s *RenameOpSpec) Mutator() (SchemaMutator, error)
type SampleOpSpec ¶
type SampleOpSpec struct { N int64 `json:"n"` Pos int64 `json:"pos"` execute.SelectorConfig }
func (*SampleOpSpec) Kind ¶
func (s *SampleOpSpec) Kind() flux.OperationKind
type SampleProcedureSpec ¶
type SampleProcedureSpec struct { N int64 Pos int64 execute.SelectorConfig }
func (*SampleProcedureSpec) Copy ¶
func (s *SampleProcedureSpec) Copy() plan.ProcedureSpec
func (*SampleProcedureSpec) Kind ¶
func (s *SampleProcedureSpec) Kind() plan.ProcedureKind
func (*SampleProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *SampleProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type SampleSelector ¶
func (*SampleSelector) DoTime ¶ added in v0.38.0
func (s *SampleSelector) DoTime(vs *array.Int64) []int
func (*SampleSelector) NewBoolSelector ¶
func (s *SampleSelector) NewBoolSelector() execute.DoBoolIndexSelector
func (*SampleSelector) NewFloatSelector ¶
func (s *SampleSelector) NewFloatSelector() execute.DoFloatIndexSelector
func (*SampleSelector) NewIntSelector ¶
func (s *SampleSelector) NewIntSelector() execute.DoIntIndexSelector
func (*SampleSelector) NewStringSelector ¶
func (s *SampleSelector) NewStringSelector() execute.DoStringIndexSelector
func (*SampleSelector) NewTimeSelector ¶ added in v0.38.0
func (s *SampleSelector) NewTimeSelector() execute.DoTimeIndexSelector
func (*SampleSelector) NewUIntSelector ¶
func (s *SampleSelector) NewUIntSelector() execute.DoUIntIndexSelector
type SchemaMutation ¶
type SchemaMutation interface { Mutator() (SchemaMutator, error) Copy() SchemaMutation }
type SchemaMutationProcedureSpec ¶
type SchemaMutationProcedureSpec struct { plan.DefaultCost Mutations []SchemaMutation }
func (*SchemaMutationProcedureSpec) Copy ¶
func (s *SchemaMutationProcedureSpec) Copy() plan.ProcedureSpec
func (*SchemaMutationProcedureSpec) Kind ¶
func (s *SchemaMutationProcedureSpec) Kind() plan.ProcedureKind
type SchemaMutator ¶
type SchemaMutator interface {
Mutate(ctx context.Context, bctx *BuilderContext) error
}
type SetProcedureSpec ¶
type SetProcedureSpec struct { plan.DefaultCost Key, Value string }
func (*SetProcedureSpec) Copy ¶
func (s *SetProcedureSpec) Copy() plan.ProcedureSpec
func (*SetProcedureSpec) Kind ¶
func (s *SetProcedureSpec) Kind() plan.ProcedureKind
type ShiftOpSpec ¶
func (*ShiftOpSpec) Kind ¶
func (s *ShiftOpSpec) Kind() flux.OperationKind
type ShiftProcedureSpec ¶
type ShiftProcedureSpec struct { plan.DefaultCost Shift flux.Duration Columns []string Now time.Time }
func (*ShiftProcedureSpec) Copy ¶
func (s *ShiftProcedureSpec) Copy() plan.ProcedureSpec
func (*ShiftProcedureSpec) Kind ¶
func (s *ShiftProcedureSpec) Kind() plan.ProcedureKind
func (*ShiftProcedureSpec) TimeBounds ¶
func (s *ShiftProcedureSpec) TimeBounds(predecessorBounds *plan.Bounds) *plan.Bounds
TimeBounds implements plan.BoundsAwareProcedureSpec
func (*ShiftProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *ShiftProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type SkewAgg ¶
type SkewAgg struct {
// contains filtered or unexported fields
}
func (*SkewAgg) NewBoolAgg ¶
func (*SkewAgg) NewFloatAgg ¶
func (a *SkewAgg) NewFloatAgg() execute.DoFloatAgg
func (*SkewAgg) NewStringAgg ¶
func (a *SkewAgg) NewStringAgg() execute.DoStringAgg
func (*SkewAgg) NewUIntAgg ¶
func (*SkewAgg) ValueFloat ¶
type SkewOpSpec ¶
type SkewOpSpec struct {
execute.AggregateConfig
}
func (*SkewOpSpec) Kind ¶
func (s *SkewOpSpec) Kind() flux.OperationKind
type SkewProcedureSpec ¶
type SkewProcedureSpec struct {
execute.AggregateConfig
}
func (*SkewProcedureSpec) Copy ¶
func (s *SkewProcedureSpec) Copy() plan.ProcedureSpec
func (*SkewProcedureSpec) Kind ¶
func (s *SkewProcedureSpec) Kind() plan.ProcedureKind
func (*SkewProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *SkewProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type SortOpSpec ¶
func (*SortOpSpec) Kind ¶
func (s *SortOpSpec) Kind() flux.OperationKind
type SortProcedureSpec ¶
type SortProcedureSpec struct { plan.DefaultCost Columns []string Desc bool }
func (*SortProcedureSpec) Copy ¶
func (s *SortProcedureSpec) Copy() plan.ProcedureSpec
func (*SortProcedureSpec) Kind ¶
func (s *SortProcedureSpec) Kind() plan.ProcedureKind
func (*SortProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *SortProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type SpreadAgg ¶
type SpreadAgg struct {
// contains filtered or unexported fields
}
SpreadAgg finds the difference between the max and min values a table
func (*SpreadAgg) NewBoolAgg ¶
func (*SpreadAgg) NewFloatAgg ¶
func (a *SpreadAgg) NewFloatAgg() execute.DoFloatAgg
func (*SpreadAgg) NewStringAgg ¶
func (a *SpreadAgg) NewStringAgg() execute.DoStringAgg
func (*SpreadAgg) NewUIntAgg ¶
type SpreadFloatAgg ¶
type SpreadFloatAgg struct { SpreadAgg // contains filtered or unexported fields }
func (*SpreadFloatAgg) DoFloat ¶
func (a *SpreadFloatAgg) DoFloat(vs *array.Float64)
Do searches for the min and max value of the array and caches them in the aggregate
func (*SpreadFloatAgg) Type ¶
func (a *SpreadFloatAgg) Type() flux.ColType
func (*SpreadFloatAgg) ValueFloat ¶
func (a *SpreadFloatAgg) ValueFloat() float64
Value returns the difference between max and min
type SpreadIntAgg ¶
type SpreadIntAgg struct { SpreadAgg // contains filtered or unexported fields }
func (*SpreadIntAgg) DoInt ¶
func (a *SpreadIntAgg) DoInt(vs *array.Int64)
DoInt searches for the min and max value of the array and caches them in the aggregate
func (*SpreadIntAgg) Type ¶
func (a *SpreadIntAgg) Type() flux.ColType
func (*SpreadIntAgg) ValueInt ¶
func (a *SpreadIntAgg) ValueInt() int64
Value returns the difference between max and min
type SpreadOpSpec ¶
type SpreadOpSpec struct {
execute.AggregateConfig
}
SpreadOpSpec defines the required arguments for Flux. Currently, spread takes no arguments.
func (*SpreadOpSpec) Kind ¶
func (s *SpreadOpSpec) Kind() flux.OperationKind
Kind is used to lookup createSpreadOpSpec producing SpreadOpSpec
type SpreadProcedureSpec ¶
type SpreadProcedureSpec struct {
execute.AggregateConfig
}
SpreadProcedureSpec is created when mapping from SpreadOpSpec.Kind to a CreateProcedureSpec.
func (*SpreadProcedureSpec) Copy ¶
func (s *SpreadProcedureSpec) Copy() plan.ProcedureSpec
func (*SpreadProcedureSpec) Kind ¶
func (s *SpreadProcedureSpec) Kind() plan.ProcedureKind
Kind is used to lookup CreateTransformation producing SpreadAgg
func (*SpreadProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *SpreadProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type SpreadUIntAgg ¶
type SpreadUIntAgg struct { SpreadAgg // contains filtered or unexported fields }
func (*SpreadUIntAgg) DoUInt ¶
func (a *SpreadUIntAgg) DoUInt(vs *array.Uint64)
Do searches for the min and max value of the array and caches them in the aggregate
func (*SpreadUIntAgg) Type ¶
func (a *SpreadUIntAgg) Type() flux.ColType
func (*SpreadUIntAgg) ValueUInt ¶
func (a *SpreadUIntAgg) ValueUInt() uint64
Value returns the difference between max and min
type StateTrackingOpSpec ¶
type StateTrackingOpSpec struct { Fn interpreter.ResolvedFunction `json:"fn"` CountColumn string `json:"countColumn"` DurationColumn string `json:"durationColumn"` DurationUnit flux.Duration `json:"durationUnit"` TimeColumn string `json:"timeColumn"` }
func (*StateTrackingOpSpec) Kind ¶
func (s *StateTrackingOpSpec) Kind() flux.OperationKind
type StateTrackingProcedureSpec ¶
type StateTrackingProcedureSpec struct { plan.DefaultCost Fn interpreter.ResolvedFunction CountColumn, DurationColumn string DurationUnit flux.Duration TimeCol string }
func (*StateTrackingProcedureSpec) Copy ¶
func (s *StateTrackingProcedureSpec) Copy() plan.ProcedureSpec
func (*StateTrackingProcedureSpec) Kind ¶
func (s *StateTrackingProcedureSpec) Kind() plan.ProcedureKind
func (*StateTrackingProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *StateTrackingProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type StddevAgg ¶
type StddevAgg struct { Mode string // contains filtered or unexported fields }
func (*StddevAgg) NewBoolAgg ¶
func (*StddevAgg) NewFloatAgg ¶
func (a *StddevAgg) NewFloatAgg() execute.DoFloatAgg
func (*StddevAgg) NewStringAgg ¶
func (a *StddevAgg) NewStringAgg() execute.DoStringAgg
func (*StddevAgg) NewUIntAgg ¶
func (*StddevAgg) ValueFloat ¶
type StddevOpSpec ¶
type StddevOpSpec struct { Mode string `json:"mode"` execute.AggregateConfig }
func (*StddevOpSpec) Kind ¶
func (s *StddevOpSpec) Kind() flux.OperationKind
type StddevProcedureSpec ¶
type StddevProcedureSpec struct { Mode string `json:"mode"` execute.AggregateConfig }
func (*StddevProcedureSpec) Copy ¶
func (s *StddevProcedureSpec) Copy() plan.ProcedureSpec
func (*StddevProcedureSpec) Kind ¶
func (s *StddevProcedureSpec) Kind() plan.ProcedureKind
func (*StddevProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *StddevProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type SumAgg ¶
type SumAgg struct{}
func (*SumAgg) NewBoolAgg ¶
func (*SumAgg) NewFloatAgg ¶
func (a *SumAgg) NewFloatAgg() execute.DoFloatAgg
func (*SumAgg) NewStringAgg ¶
func (a *SumAgg) NewStringAgg() execute.DoStringAgg
func (*SumAgg) NewUIntAgg ¶
type SumFloatAgg ¶
type SumFloatAgg struct {
// contains filtered or unexported fields
}
func (*SumFloatAgg) DoFloat ¶
func (a *SumFloatAgg) DoFloat(vs *array.Float64)
func (*SumFloatAgg) IsNull ¶
func (a *SumFloatAgg) IsNull() bool
func (*SumFloatAgg) Type ¶
func (a *SumFloatAgg) Type() flux.ColType
func (*SumFloatAgg) ValueFloat ¶
func (a *SumFloatAgg) ValueFloat() float64
type SumOpSpec ¶
type SumOpSpec struct {
execute.AggregateConfig
}
func (*SumOpSpec) Kind ¶
func (s *SumOpSpec) Kind() flux.OperationKind
type SumProcedureSpec ¶
type SumProcedureSpec struct {
execute.AggregateConfig
}
func (*SumProcedureSpec) AggregateMethod ¶
func (s *SumProcedureSpec) AggregateMethod() string
func (*SumProcedureSpec) Copy ¶
func (s *SumProcedureSpec) Copy() plan.ProcedureSpec
func (*SumProcedureSpec) Kind ¶
func (s *SumProcedureSpec) Kind() plan.ProcedureKind
func (*SumProcedureSpec) ReAggregateSpec ¶
func (s *SumProcedureSpec) ReAggregateSpec() plan.ProcedureSpec
func (*SumProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *SumProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type SumUIntAgg ¶
type SumUIntAgg struct {
// contains filtered or unexported fields
}
func (*SumUIntAgg) DoUInt ¶
func (a *SumUIntAgg) DoUInt(vs *array.Uint64)
func (*SumUIntAgg) IsNull ¶
func (a *SumUIntAgg) IsNull() bool
func (*SumUIntAgg) Type ¶
func (a *SumUIntAgg) Type() flux.ColType
func (*SumUIntAgg) ValueUInt ¶
func (a *SumUIntAgg) ValueUInt() uint64
type TDigestQuantileProcedureSpec ¶ added in v0.24.0
type TDigestQuantileProcedureSpec struct { Quantile float64 `json:"quantile"` Compression float64 `json:"compression"` execute.AggregateConfig }
func (*TDigestQuantileProcedureSpec) Copy ¶ added in v0.24.0
func (s *TDigestQuantileProcedureSpec) Copy() plan.ProcedureSpec
func (*TDigestQuantileProcedureSpec) Kind ¶ added in v0.24.0
func (s *TDigestQuantileProcedureSpec) Kind() plan.ProcedureKind
func (*TDigestQuantileProcedureSpec) TriggerSpec ¶ added in v0.24.0
func (s *TDigestQuantileProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type TailOpSpec ¶ added in v0.39.0
TailOpSpec tails the number of rows returned per table.
func (*TailOpSpec) Kind ¶ added in v0.39.0
func (s *TailOpSpec) Kind() flux.OperationKind
type TailProcedureSpec ¶ added in v0.39.0
type TailProcedureSpec struct { plan.DefaultCost N int64 `json:"n"` Offset int64 `json:"offset"` }
func (*TailProcedureSpec) Copy ¶ added in v0.39.0
func (s *TailProcedureSpec) Copy() plan.ProcedureSpec
func (*TailProcedureSpec) Kind ¶ added in v0.39.0
func (s *TailProcedureSpec) Kind() plan.ProcedureKind
func (*TailProcedureSpec) TriggerSpec ¶ added in v0.39.0
func (s *TailProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type TripleExponentialDerivativeOpSpec ¶ added in v0.40.0
type TripleExponentialDerivativeOpSpec struct {
N int64 `json:"n"`
}
func (*TripleExponentialDerivativeOpSpec) Kind ¶ added in v0.40.0
func (s *TripleExponentialDerivativeOpSpec) Kind() flux.OperationKind
type TripleExponentialDerivativeProcedureSpec ¶ added in v0.40.0
type TripleExponentialDerivativeProcedureSpec struct { plan.DefaultCost N int64 `json:"n"` }
func (*TripleExponentialDerivativeProcedureSpec) Copy ¶ added in v0.40.0
func (s *TripleExponentialDerivativeProcedureSpec) Copy() plan.ProcedureSpec
func (*TripleExponentialDerivativeProcedureSpec) Kind ¶ added in v0.40.0
func (s *TripleExponentialDerivativeProcedureSpec) Kind() plan.ProcedureKind
func (*TripleExponentialDerivativeProcedureSpec) TriggerSpec ¶ added in v0.40.0
func (s *TripleExponentialDerivativeProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type UnionOpSpec ¶
type UnionOpSpec struct { }
func (*UnionOpSpec) Kind ¶
func (s *UnionOpSpec) Kind() flux.OperationKind
type UnionProcedureSpec ¶
type UnionProcedureSpec struct {
plan.DefaultCost
}
func (*UnionProcedureSpec) Copy ¶
func (s *UnionProcedureSpec) Copy() plan.ProcedureSpec
func (*UnionProcedureSpec) Kind ¶
func (s *UnionProcedureSpec) Kind() plan.ProcedureKind
type UniqueOpSpec ¶
type UniqueOpSpec struct {
Column string `json:"column"`
}
func (*UniqueOpSpec) Kind ¶
func (s *UniqueOpSpec) Kind() flux.OperationKind
type UniqueProcedureSpec ¶
type UniqueProcedureSpec struct { plan.DefaultCost Column string }
func (*UniqueProcedureSpec) Copy ¶
func (s *UniqueProcedureSpec) Copy() plan.ProcedureSpec
func (*UniqueProcedureSpec) Kind ¶
func (s *UniqueProcedureSpec) Kind() plan.ProcedureKind
func (*UniqueProcedureSpec) TriggerSpec ¶ added in v0.22.0
func (s *UniqueProcedureSpec) TriggerSpec() plan.TriggerSpec
TriggerSpec implements plan.TriggerAwareProcedureSpec
type WindowOpSpec ¶
type WindowOpSpec struct { Every flux.Duration `json:"every"` Period flux.Duration `json:"period"` Offset flux.Duration `json:"offset"` TimeColumn string `json:"timeColumn"` StopColumn string `json:"stopColumn"` StartColumn string `json:"startColumn"` CreateEmpty bool `json:"createEmpty"` }
func (*WindowOpSpec) Kind ¶
func (s *WindowOpSpec) Kind() flux.OperationKind
type WindowProcedureSpec ¶
type WindowProcedureSpec struct { plan.DefaultCost Window plan.WindowSpec TimeColumn, StartColumn, StopColumn string CreateEmpty bool }
func (*WindowProcedureSpec) Copy ¶
func (s *WindowProcedureSpec) Copy() plan.ProcedureSpec
func (*WindowProcedureSpec) Kind ¶
func (s *WindowProcedureSpec) Kind() plan.ProcedureKind
type WindowTriggerPhysicalRule ¶ added in v0.22.0
type WindowTriggerPhysicalRule struct{}
WindowTriggerPhysicalRule rewrites a physical window operation to use a narrow trigger if certain conditions are met.
func (WindowTriggerPhysicalRule) Name ¶ added in v0.22.0
func (WindowTriggerPhysicalRule) Name() string
func (WindowTriggerPhysicalRule) Pattern ¶ added in v0.22.0
func (WindowTriggerPhysicalRule) Pattern() plan.Pattern
Pattern matches the physical operator pattern consisting of a window operator with a single predecessor of any kind.
type YieldOpSpec ¶
type YieldOpSpec struct {
Name string `json:"name"`
}
func (*YieldOpSpec) Kind ¶
func (s *YieldOpSpec) Kind() flux.OperationKind
type YieldProcedureSpec ¶
type YieldProcedureSpec struct { plan.DefaultCost Name string `json:"name"` }
func (*YieldProcedureSpec) Copy ¶
func (s *YieldProcedureSpec) Copy() plan.ProcedureSpec
func (*YieldProcedureSpec) Kind ¶
func (s *YieldProcedureSpec) Kind() plan.ProcedureKind
func (*YieldProcedureSpec) YieldName ¶
func (s *YieldProcedureSpec) YieldName() string
Source Files ¶
- booleans.go
- chande_momentum_oscillator.go
- columns.go
- contains.go
- count.go
- covariance.go
- cumulative_sum.go
- derivative.go
- difference.go
- distinct.go
- elapsed.go
- exponential_moving_average.go
- fill.go
- filter.go
- first.go
- flux_gen.go
- flux_test_gen.go
- group.go
- histogram.go
- histogram_quantile.go
- holt_winters.go
- hour_selection.go
- integral.go
- join.go
- kaufmansAMA.go
- key_values.go
- keys.go
- last.go
- length.go
- limit.go
- map.go
- max.go
- mean.go
- min.go
- mode.go
- moving_average.go
- pivot.go
- quantile.go
- range.go
- reduce.go
- relative_strength_index.go
- sample.go
- schema_functions.go
- schema_mutators.go
- set.go
- shift.go
- skew.go
- sleep.go
- sort.go
- spread.go
- state_tracking.go
- stddev.go
- sum.go
- table_fns.go
- tail.go
- transformations.go
- triple_exponential_derivative.go
- typeconv.go
- union.go
- unique.go
- window.go
- yield.go