Documentation
¶
Overview ¶
Copyright (c) 2020. *
Metaprov.com
Copyright (c) 2020. *
Metaprov.com
+groupName=data.modela.ai +kubebuilder:object:generate=true
+kubebuilder:object:generate=true +groupName=data.modela.ai
Index ¶
- Constants
- Variables
- func AddKnownTypes(scheme *k8sruntime.Scheme) error
- func Kind(kind string) schema.GroupKind
- func Resource(resource string) schema.GroupResource
- type ApprovalType
- type BarChartSpec
- func (in *BarChartSpec) DeepCopy() *BarChartSpec
- func (in *BarChartSpec) DeepCopyInto(out *BarChartSpec)
- func (*BarChartSpec) Descriptor() ([]byte, []int)
- func (m *BarChartSpec) Marshal() (dAtA []byte, err error)
- func (m *BarChartSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *BarChartSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*BarChartSpec) ProtoMessage()
- func (m *BarChartSpec) Reset()
- func (m *BarChartSpec) Size() (n int)
- func (this *BarChartSpec) String() string
- func (m *BarChartSpec) Unmarshal(dAtA []byte) error
- func (m *BarChartSpec) XXX_DiscardUnknown()
- func (m *BarChartSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *BarChartSpec) XXX_Merge(src proto.Message)
- func (m *BarChartSpec) XXX_Size() int
- func (m *BarChartSpec) XXX_Unmarshal(b []byte) error
- type Column
- func (in *Column) DeepCopy() *Column
- func (in *Column) DeepCopyInto(out *Column)
- func (*Column) Descriptor() ([]byte, []int)
- func (m *Column) Marshal() (dAtA []byte, err error)
- func (m *Column) MarshalTo(dAtA []byte) (int, error)
- func (m *Column) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*Column) ProtoMessage()
- func (m *Column) Reset()
- func (m *Column) Size() (n int)
- func (this *Column) String() string
- func (m *Column) Unmarshal(dAtA []byte) error
- func (column *Column) Validate() (bool, []metav1.StatusCause)
- func (column *Column) ValidateColumn() (bool, []metav1.StatusCause)
- func (m *Column) XXX_DiscardUnknown()
- func (m *Column) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *Column) XXX_Merge(src proto.Message)
- func (m *Column) XXX_Size() int
- func (m *Column) XXX_Unmarshal(b []byte) error
- type ColumnHistogram
- func (in *ColumnHistogram) DeepCopy() *ColumnHistogram
- func (in *ColumnHistogram) DeepCopyInto(out *ColumnHistogram)
- func (*ColumnHistogram) Descriptor() ([]byte, []int)
- func (m *ColumnHistogram) Marshal() (dAtA []byte, err error)
- func (m *ColumnHistogram) MarshalTo(dAtA []byte) (int, error)
- func (m *ColumnHistogram) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*ColumnHistogram) ProtoMessage()
- func (m *ColumnHistogram) Reset()
- func (m *ColumnHistogram) Size() (n int)
- func (this *ColumnHistogram) String() string
- func (m *ColumnHistogram) Unmarshal(dAtA []byte) error
- func (m *ColumnHistogram) XXX_DiscardUnknown()
- func (m *ColumnHistogram) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *ColumnHistogram) XXX_Merge(src proto.Message)
- func (m *ColumnHistogram) XXX_Size() int
- func (m *ColumnHistogram) XXX_Unmarshal(b []byte) error
- type ColumnSpec
- func (in *ColumnSpec) DeepCopy() *ColumnSpec
- func (in *ColumnSpec) DeepCopyInto(out *ColumnSpec)
- func (*ColumnSpec) Descriptor() ([]byte, []int)
- func (m *ColumnSpec) Marshal() (dAtA []byte, err error)
- func (m *ColumnSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *ColumnSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*ColumnSpec) ProtoMessage()
- func (m *ColumnSpec) Reset()
- func (m *ColumnSpec) Size() (n int)
- func (this *ColumnSpec) String() string
- func (m *ColumnSpec) Unmarshal(dAtA []byte) error
- func (m *ColumnSpec) XXX_DiscardUnknown()
- func (m *ColumnSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *ColumnSpec) XXX_Merge(src proto.Message)
- func (m *ColumnSpec) XXX_Size() int
- func (m *ColumnSpec) XXX_Unmarshal(b []byte) error
- type ColumnStatistics
- func (col ColumnStatistics) BigBoolTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase
- func (col ColumnStatistics) BigCatTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase
- func (col ColumnStatistics) BigNumericTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase
- func (in *ColumnStatistics) DeepCopy() *ColumnStatistics
- func (in *ColumnStatistics) DeepCopyInto(out *ColumnStatistics)
- func (*ColumnStatistics) Descriptor() ([]byte, []int)
- func (col ColumnStatistics) GenDriftTestCase(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase
- func (m *ColumnStatistics) Marshal() (dAtA []byte, err error)
- func (m *ColumnStatistics) MarshalTo(dAtA []byte) (int, error)
- func (m *ColumnStatistics) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*ColumnStatistics) ProtoMessage()
- func (m *ColumnStatistics) Reset()
- func (m *ColumnStatistics) Size() (n int)
- func (col ColumnStatistics) SmallBoolTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase
- func (col ColumnStatistics) SmallCatTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase
- func (col ColumnStatistics) SmallNumericTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase
- func (this *ColumnStatistics) String() string
- func (m *ColumnStatistics) Unmarshal(dAtA []byte) error
- func (m *ColumnStatistics) XXX_DiscardUnknown()
- func (m *ColumnStatistics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *ColumnStatistics) XXX_Merge(src proto.Message)
- func (m *ColumnStatistics) XXX_Size() int
- func (m *ColumnStatistics) XXX_Unmarshal(b []byte) error
- type ComponentSpec
- func (in *ComponentSpec) DeepCopy() *ComponentSpec
- func (in *ComponentSpec) DeepCopyInto(out *ComponentSpec)
- func (*ComponentSpec) Descriptor() ([]byte, []int)
- func (m *ComponentSpec) Marshal() (dAtA []byte, err error)
- func (m *ComponentSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *ComponentSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*ComponentSpec) ProtoMessage()
- func (m *ComponentSpec) Reset()
- func (m *ComponentSpec) Size() (n int)
- func (this *ComponentSpec) String() string
- func (m *ComponentSpec) Unmarshal(dAtA []byte) error
- func (m *ComponentSpec) XXX_DiscardUnknown()
- func (m *ComponentSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *ComponentSpec) XXX_Merge(src proto.Message)
- func (m *ComponentSpec) XXX_Size() int
- func (m *ComponentSpec) XXX_Unmarshal(b []byte) error
- type ComponentView
- func (in *ComponentView) DeepCopy() *ComponentView
- func (in *ComponentView) DeepCopyInto(out *ComponentView)
- func (*ComponentView) Descriptor() ([]byte, []int)
- func (m *ComponentView) Marshal() (dAtA []byte, err error)
- func (m *ComponentView) MarshalTo(dAtA []byte) (int, error)
- func (m *ComponentView) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*ComponentView) ProtoMessage()
- func (m *ComponentView) Reset()
- func (m *ComponentView) Size() (n int)
- func (this *ComponentView) String() string
- func (m *ComponentView) Unmarshal(dAtA []byte) error
- func (m *ComponentView) XXX_DiscardUnknown()
- func (m *ComponentView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *ComponentView) XXX_Merge(src proto.Message)
- func (m *ComponentView) XXX_Size() int
- func (m *ComponentView) XXX_Unmarshal(b []byte) error
- type Correlation
- func (in *Correlation) DeepCopy() *Correlation
- func (in *Correlation) DeepCopyInto(out *Correlation)
- func (*Correlation) Descriptor() ([]byte, []int)
- func (m *Correlation) Marshal() (dAtA []byte, err error)
- func (m *Correlation) MarshalTo(dAtA []byte) (int, error)
- func (m *Correlation) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*Correlation) ProtoMessage()
- func (m *Correlation) Reset()
- func (m *Correlation) Size() (n int)
- func (this *Correlation) String() string
- func (m *Correlation) Unmarshal(dAtA []byte) error
- func (m *Correlation) XXX_DiscardUnknown()
- func (m *Correlation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *Correlation) XXX_Merge(src proto.Message)
- func (m *Correlation) XXX_Size() int
- func (m *Correlation) XXX_Unmarshal(b []byte) error
- type CorrelationSpec
- func (in *CorrelationSpec) DeepCopy() *CorrelationSpec
- func (in *CorrelationSpec) DeepCopyInto(out *CorrelationSpec)
- func (*CorrelationSpec) Descriptor() ([]byte, []int)
- func (m *CorrelationSpec) Marshal() (dAtA []byte, err error)
- func (m *CorrelationSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *CorrelationSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*CorrelationSpec) ProtoMessage()
- func (m *CorrelationSpec) Reset()
- func (m *CorrelationSpec) Size() (n int)
- func (this *CorrelationSpec) String() string
- func (m *CorrelationSpec) Unmarshal(dAtA []byte) error
- func (m *CorrelationSpec) XXX_DiscardUnknown()
- func (m *CorrelationSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *CorrelationSpec) XXX_Merge(src proto.Message)
- func (m *CorrelationSpec) XXX_Size() int
- func (m *CorrelationSpec) XXX_Unmarshal(b []byte) error
- type CsvFileSpec
- func (in *CsvFileSpec) DeepCopy() *CsvFileSpec
- func (in *CsvFileSpec) DeepCopyInto(out *CsvFileSpec)
- func (*CsvFileSpec) Descriptor() ([]byte, []int)
- func (m *CsvFileSpec) Marshal() (dAtA []byte, err error)
- func (m *CsvFileSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *CsvFileSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*CsvFileSpec) ProtoMessage()
- func (m *CsvFileSpec) Reset()
- func (m *CsvFileSpec) Size() (n int)
- func (this *CsvFileSpec) String() string
- func (m *CsvFileSpec) Unmarshal(dAtA []byte) error
- func (m *CsvFileSpec) XXX_DiscardUnknown()
- func (m *CsvFileSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *CsvFileSpec) XXX_Merge(src proto.Message)
- func (m *CsvFileSpec) XXX_Size() int
- func (m *CsvFileSpec) XXX_Unmarshal(b []byte) error
- type DataInputSpec
- func (in *DataInputSpec) DeepCopy() *DataInputSpec
- func (in *DataInputSpec) DeepCopyInto(out *DataInputSpec)
- func (*DataInputSpec) Descriptor() ([]byte, []int)
- func (m *DataInputSpec) Marshal() (dAtA []byte, err error)
- func (m *DataInputSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *DataInputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataInputSpec) ProtoMessage()
- func (m *DataInputSpec) Reset()
- func (m *DataInputSpec) Size() (n int)
- func (this *DataInputSpec) String() string
- func (m *DataInputSpec) Unmarshal(dAtA []byte) error
- func (m *DataInputSpec) XXX_DiscardUnknown()
- func (m *DataInputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataInputSpec) XXX_Merge(src proto.Message)
- func (m *DataInputSpec) XXX_Size() int
- func (m *DataInputSpec) XXX_Unmarshal(b []byte) error
- type DataLocation
- func (in *DataLocation) DeepCopy() *DataLocation
- func (in *DataLocation) DeepCopyInto(out *DataLocation)
- func (*DataLocation) Descriptor() ([]byte, []int)
- func (m *DataLocation) Marshal() (dAtA []byte, err error)
- func (m *DataLocation) MarshalTo(dAtA []byte) (int, error)
- func (m *DataLocation) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataLocation) ProtoMessage()
- func (m *DataLocation) Reset()
- func (m *DataLocation) Size() (n int)
- func (this *DataLocation) String() string
- func (m *DataLocation) Unmarshal(dAtA []byte) error
- func (loc DataLocation) Validate(field string) ([]metav1.StatusCause, bool)
- func (m *DataLocation) XXX_DiscardUnknown()
- func (m *DataLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataLocation) XXX_Merge(src proto.Message)
- func (m *DataLocation) XXX_Size() int
- func (m *DataLocation) XXX_Unmarshal(b []byte) error
- type DataLocationType
- type DataOutputSpec
- func (in *DataOutputSpec) DeepCopy() *DataOutputSpec
- func (in *DataOutputSpec) DeepCopyInto(out *DataOutputSpec)
- func (*DataOutputSpec) Descriptor() ([]byte, []int)
- func (m *DataOutputSpec) Marshal() (dAtA []byte, err error)
- func (m *DataOutputSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *DataOutputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataOutputSpec) ProtoMessage()
- func (m *DataOutputSpec) Reset()
- func (m *DataOutputSpec) Size() (n int)
- func (this *DataOutputSpec) String() string
- func (m *DataOutputSpec) Unmarshal(dAtA []byte) error
- func (m *DataOutputSpec) XXX_DiscardUnknown()
- func (m *DataOutputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataOutputSpec) XXX_Merge(src proto.Message)
- func (m *DataOutputSpec) XXX_Size() int
- func (m *DataOutputSpec) XXX_Unmarshal(b []byte) error
- type DataPipeline
- func (wr *DataPipeline) AddFinalizer()
- func (wr *DataPipeline) CreateOrUpdateCond(cond metav1.Condition)
- func (in *DataPipeline) DeepCopy() *DataPipeline
- func (in *DataPipeline) DeepCopyInto(out *DataPipeline)
- func (in *DataPipeline) DeepCopyObject() runtime.Object
- func (wr *DataPipeline) Default()
- func (*DataPipeline) Descriptor() ([]byte, []int)
- func (wr DataPipeline) GetCond(t string) metav1.Condition
- func (wr DataPipeline) GetCondIdx(t string) int
- func (wr DataPipeline) HasFinalizer() bool
- func (w DataPipeline) IsReady() bool
- func (w DataPipeline) IsSaved() bool
- func (wr DataPipeline) ManifestURI() string
- func (in *DataPipeline) MarkFailed(err error)
- func (in *DataPipeline) MarkReady()
- func (in *DataPipeline) MarkSaved()
- func (m *DataPipeline) Marshal() (dAtA []byte, err error)
- func (m *DataPipeline) MarshalTo(dAtA []byte) (int, error)
- func (m *DataPipeline) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataPipeline) ProtoMessage()
- func (wr *DataPipeline) RemoveFinalizer()
- func (m *DataPipeline) Reset()
- func (wr DataPipeline) RootURI() string
- func (wr *DataPipeline) SetupWebhookWithManager(mgr ctrl.Manager) error
- func (m *DataPipeline) Size() (n int)
- func (this *DataPipeline) String() string
- func (m *DataPipeline) Unmarshal(dAtA []byte) error
- func (in *DataPipeline) UpdateRunStatus(run DataPipelineRun)
- func (wr DataPipeline) ValidateCreate() error
- func (wr DataPipeline) ValidateDelete() error
- func (wr DataPipeline) ValidateUpdate(old runtime.Object) error
- func (m *DataPipeline) XXX_DiscardUnknown()
- func (m *DataPipeline) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataPipeline) XXX_Merge(src proto.Message)
- func (m *DataPipeline) XXX_Size() int
- func (m *DataPipeline) XXX_Unmarshal(b []byte) error
- type DataPipelineConditionType
- type DataPipelineList
- func (in *DataPipelineList) DeepCopy() *DataPipelineList
- func (in *DataPipelineList) DeepCopyInto(out *DataPipelineList)
- func (in *DataPipelineList) DeepCopyObject() runtime.Object
- func (*DataPipelineList) Descriptor() ([]byte, []int)
- func (m *DataPipelineList) Marshal() (dAtA []byte, err error)
- func (m *DataPipelineList) MarshalTo(dAtA []byte) (int, error)
- func (m *DataPipelineList) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataPipelineList) ProtoMessage()
- func (m *DataPipelineList) Reset()
- func (m *DataPipelineList) Size() (n int)
- func (this *DataPipelineList) String() string
- func (m *DataPipelineList) Unmarshal(dAtA []byte) error
- func (m *DataPipelineList) XXX_DiscardUnknown()
- func (m *DataPipelineList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataPipelineList) XXX_Merge(src proto.Message)
- func (m *DataPipelineList) XXX_Size() int
- func (m *DataPipelineList) XXX_Unmarshal(b []byte) error
- type DataPipelineRun
- func (in DataPipelineRun) Aborted() bool
- func (run *DataPipelineRun) AddFinalizer()
- func (run DataPipelineRun) CompletionAlert(tenantRef *v1.ObjectReference, notifierName *string) *infra.Alert
- func (run *DataPipelineRun) CreateOrUpdateCond(cond metav1.Condition)
- func (in *DataPipelineRun) DeepCopy() *DataPipelineRun
- func (in *DataPipelineRun) DeepCopyInto(out *DataPipelineRun)
- func (in *DataPipelineRun) DeepCopyObject() runtime.Object
- func (run *DataPipelineRun) Default()
- func (*DataPipelineRun) Descriptor() ([]byte, []int)
- func (run DataPipelineRun) ErrorAlert(tenantRef *v1.ObjectReference, notifierName *string, err error) *infra.Alert
- func (run DataPipelineRun) GetCond(t string) metav1.Condition
- func (run DataPipelineRun) GetCondIdx(t string) int
- func (run DataPipelineRun) HasFinalizer() bool
- func (in DataPipelineRun) IsCompleted() bool
- func (in DataPipelineRun) IsFailed() bool
- func (w DataPipelineRun) IsReady() bool
- func (in DataPipelineRun) IsRunning() bool
- func (w DataPipelineRun) IsSaved() bool
- func (run DataPipelineRun) ManifestURI() string
- func (in *DataPipelineRun) MarkAborted(err error)
- func (in *DataPipelineRun) MarkComplete()
- func (in *DataPipelineRun) MarkFailed(err error)
- func (r *DataPipelineRun) MarkRunning()
- func (in *DataPipelineRun) MarkSaved()
- func (m *DataPipelineRun) Marshal() (dAtA []byte, err error)
- func (m *DataPipelineRun) MarshalTo(dAtA []byte) (int, error)
- func (m *DataPipelineRun) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (in DataPipelineRun) Paused() bool
- func (*DataPipelineRun) ProtoMessage()
- func (run *DataPipelineRun) RemoveFinalizer()
- func (m *DataPipelineRun) Reset()
- func (run DataPipelineRun) RootURI() string
- func (run DataPipelineRun) RunStatus() *catalog.LastRunStatus
- func (wr *DataPipelineRun) SetupWebhookWithManager(mgr ctrl.Manager) error
- func (m *DataPipelineRun) Size() (n int)
- func (run DataPipelineRun) StatusString() string
- func (this *DataPipelineRun) String() string
- func (m *DataPipelineRun) Unmarshal(dAtA []byte) error
- func (run DataPipelineRun) ValidateCreate() error
- func (run DataPipelineRun) ValidateDelete() error
- func (run DataPipelineRun) ValidateUpdate(old runtime.Object) error
- func (m *DataPipelineRun) XXX_DiscardUnknown()
- func (m *DataPipelineRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataPipelineRun) XXX_Merge(src proto.Message)
- func (m *DataPipelineRun) XXX_Size() int
- func (m *DataPipelineRun) XXX_Unmarshal(b []byte) error
- type DataPipelineRunCondition
- func (in *DataPipelineRunCondition) DeepCopy() *DataPipelineRunCondition
- func (in *DataPipelineRunCondition) DeepCopyInto(out *DataPipelineRunCondition)
- func (*DataPipelineRunCondition) Descriptor() ([]byte, []int)
- func (m *DataPipelineRunCondition) Marshal() (dAtA []byte, err error)
- func (m *DataPipelineRunCondition) MarshalTo(dAtA []byte) (int, error)
- func (m *DataPipelineRunCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataPipelineRunCondition) ProtoMessage()
- func (m *DataPipelineRunCondition) Reset()
- func (m *DataPipelineRunCondition) Size() (n int)
- func (this *DataPipelineRunCondition) String() string
- func (m *DataPipelineRunCondition) Unmarshal(dAtA []byte) error
- func (m *DataPipelineRunCondition) XXX_DiscardUnknown()
- func (m *DataPipelineRunCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataPipelineRunCondition) XXX_Merge(src proto.Message)
- func (m *DataPipelineRunCondition) XXX_Size() int
- func (m *DataPipelineRunCondition) XXX_Unmarshal(b []byte) error
- type DataPipelineRunConditionType
- type DataPipelineRunList
- func (in *DataPipelineRunList) DeepCopy() *DataPipelineRunList
- func (in *DataPipelineRunList) DeepCopyInto(out *DataPipelineRunList)
- func (in *DataPipelineRunList) DeepCopyObject() runtime.Object
- func (*DataPipelineRunList) Descriptor() ([]byte, []int)
- func (m *DataPipelineRunList) Marshal() (dAtA []byte, err error)
- func (m *DataPipelineRunList) MarshalTo(dAtA []byte) (int, error)
- func (m *DataPipelineRunList) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataPipelineRunList) ProtoMessage()
- func (m *DataPipelineRunList) Reset()
- func (m *DataPipelineRunList) Size() (n int)
- func (this *DataPipelineRunList) String() string
- func (m *DataPipelineRunList) Unmarshal(dAtA []byte) error
- func (m *DataPipelineRunList) XXX_DiscardUnknown()
- func (m *DataPipelineRunList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataPipelineRunList) XXX_Merge(src proto.Message)
- func (m *DataPipelineRunList) XXX_Size() int
- func (m *DataPipelineRunList) XXX_Unmarshal(b []byte) error
- type DataPipelineRunPhase
- type DataPipelineRunSpec
- func (in *DataPipelineRunSpec) DeepCopy() *DataPipelineRunSpec
- func (in *DataPipelineRunSpec) DeepCopyInto(out *DataPipelineRunSpec)
- func (*DataPipelineRunSpec) Descriptor() ([]byte, []int)
- func (m *DataPipelineRunSpec) Marshal() (dAtA []byte, err error)
- func (m *DataPipelineRunSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *DataPipelineRunSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataPipelineRunSpec) ProtoMessage()
- func (m *DataPipelineRunSpec) Reset()
- func (m *DataPipelineRunSpec) Size() (n int)
- func (this *DataPipelineRunSpec) String() string
- func (m *DataPipelineRunSpec) Unmarshal(dAtA []byte) error
- func (m *DataPipelineRunSpec) XXX_DiscardUnknown()
- func (m *DataPipelineRunSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataPipelineRunSpec) XXX_Merge(src proto.Message)
- func (m *DataPipelineRunSpec) XXX_Size() int
- func (m *DataPipelineRunSpec) XXX_Unmarshal(b []byte) error
- type DataPipelineRunStatus
- func (in *DataPipelineRunStatus) DeepCopy() *DataPipelineRunStatus
- func (in *DataPipelineRunStatus) DeepCopyInto(out *DataPipelineRunStatus)
- func (*DataPipelineRunStatus) Descriptor() ([]byte, []int)
- func (m *DataPipelineRunStatus) Marshal() (dAtA []byte, err error)
- func (m *DataPipelineRunStatus) MarshalTo(dAtA []byte) (int, error)
- func (m *DataPipelineRunStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataPipelineRunStatus) ProtoMessage()
- func (m *DataPipelineRunStatus) Reset()
- func (m *DataPipelineRunStatus) Size() (n int)
- func (this *DataPipelineRunStatus) String() string
- func (m *DataPipelineRunStatus) Unmarshal(dAtA []byte) error
- func (m *DataPipelineRunStatus) XXX_DiscardUnknown()
- func (m *DataPipelineRunStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataPipelineRunStatus) XXX_Merge(src proto.Message)
- func (m *DataPipelineRunStatus) XXX_Size() int
- func (m *DataPipelineRunStatus) XXX_Unmarshal(b []byte) error
- type DataPipelineSpec
- func (in *DataPipelineSpec) DeepCopy() *DataPipelineSpec
- func (in *DataPipelineSpec) DeepCopyInto(out *DataPipelineSpec)
- func (*DataPipelineSpec) Descriptor() ([]byte, []int)
- func (m *DataPipelineSpec) Marshal() (dAtA []byte, err error)
- func (m *DataPipelineSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *DataPipelineSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataPipelineSpec) ProtoMessage()
- func (m *DataPipelineSpec) Reset()
- func (m *DataPipelineSpec) Size() (n int)
- func (this *DataPipelineSpec) String() string
- func (m *DataPipelineSpec) Unmarshal(dAtA []byte) error
- func (m *DataPipelineSpec) XXX_DiscardUnknown()
- func (m *DataPipelineSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataPipelineSpec) XXX_Merge(src proto.Message)
- func (m *DataPipelineSpec) XXX_Size() int
- func (m *DataPipelineSpec) XXX_Unmarshal(b []byte) error
- type DataPipelineStatus
- func (in *DataPipelineStatus) DeepCopy() *DataPipelineStatus
- func (in *DataPipelineStatus) DeepCopyInto(out *DataPipelineStatus)
- func (*DataPipelineStatus) Descriptor() ([]byte, []int)
- func (m *DataPipelineStatus) Marshal() (dAtA []byte, err error)
- func (m *DataPipelineStatus) MarshalTo(dAtA []byte) (int, error)
- func (m *DataPipelineStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataPipelineStatus) ProtoMessage()
- func (m *DataPipelineStatus) Reset()
- func (m *DataPipelineStatus) Size() (n int)
- func (this *DataPipelineStatus) String() string
- func (m *DataPipelineStatus) Unmarshal(dAtA []byte) error
- func (m *DataPipelineStatus) XXX_DiscardUnknown()
- func (m *DataPipelineStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataPipelineStatus) XXX_Merge(src proto.Message)
- func (m *DataPipelineStatus) XXX_Size() int
- func (m *DataPipelineStatus) XXX_Unmarshal(b []byte) error
- type DataProduct
- func (product *DataProduct) AddFinalizer()
- func (product DataProduct) CreateNamespace() *v1.Namespace
- func (product *DataProduct) CreateOrUpdateCond(cond metav1.Condition)
- func (product DataProduct) DataEngineer() *rbacv1.Role
- func (product DataProduct) DataLabler() *rbacv1.Role
- func (product DataProduct) DataScientist() *rbacv1.Role
- func (in *DataProduct) DeepCopy() *DataProduct
- func (in *DataProduct) DeepCopyInto(out *DataProduct)
- func (in *DataProduct) DeepCopyObject() runtime.Object
- func (product *DataProduct) Default()
- func (*DataProduct) Descriptor() ([]byte, []int)
- func (product DataProduct) GetCond(t string) metav1.Condition
- func (product DataProduct) GetCondIdx(t string) int
- func (product *DataProduct) GetRolesForAccount(account *infra.Account) []string
- func (product DataProduct) HasFinalizer() bool
- func (product DataProduct) IsClassification() bool
- func (product DataProduct) IsReady() bool
- func (product *DataProduct) IsSaved() bool
- func (product *DataProduct) MarkFailed(err error)
- func (product *DataProduct) MarkReady()
- func (product *DataProduct) MarkSaved()
- func (m *DataProduct) Marshal() (dAtA []byte, err error)
- func (m *DataProduct) MarshalTo(dAtA []byte) (int, error)
- func (m *DataProduct) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (product DataProduct) PredictionConsumer() *rbacv1.Role
- func (product DataProduct) PrefixArchiveURI(uri string) string
- func (product DataProduct) PrefixDepotURI(uri string) string
- func (product DataProduct) PrefixLiveURI(uri string) string
- func (product DataProduct) ProductAdmin() *rbacv1.Role
- func (*DataProduct) ProtoMessage()
- func (product *DataProduct) RemoveFinalizer()
- func (product DataProduct) ReportConsumer() *rbacv1.Role
- func (m *DataProduct) Reset()
- func (product *DataProduct) SetupWebhookWithManager(mgr ctrl.Manager) error
- func (m *DataProduct) Size() (n int)
- func (this *DataProduct) String() string
- func (m *DataProduct) Unmarshal(dAtA []byte) error
- func (product *DataProduct) UpdateBaselineVersion(versions DataProductVersionList)
- func (product DataProduct) ValidateCreate() error
- func (product DataProduct) ValidateDelete() error
- func (product DataProduct) ValidateUpdate(old runtime.Object) error
- func (m *DataProduct) XXX_DiscardUnknown()
- func (m *DataProduct) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataProduct) XXX_Merge(src proto.Message)
- func (m *DataProduct) XXX_Size() int
- func (m *DataProduct) XXX_Unmarshal(b []byte) error
- func (product DataProduct) YamlURI() string
- type DataProductConditionType
- type DataProductList
- func (in *DataProductList) DeepCopy() *DataProductList
- func (in *DataProductList) DeepCopyInto(out *DataProductList)
- func (in *DataProductList) DeepCopyObject() runtime.Object
- func (*DataProductList) Descriptor() ([]byte, []int)
- func (m *DataProductList) Marshal() (dAtA []byte, err error)
- func (m *DataProductList) MarshalTo(dAtA []byte) (int, error)
- func (m *DataProductList) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataProductList) ProtoMessage()
- func (m *DataProductList) Reset()
- func (m *DataProductList) Size() (n int)
- func (this *DataProductList) String() string
- func (m *DataProductList) Unmarshal(dAtA []byte) error
- func (m *DataProductList) XXX_DiscardUnknown()
- func (m *DataProductList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataProductList) XXX_Merge(src proto.Message)
- func (m *DataProductList) XXX_Size() int
- func (m *DataProductList) XXX_Unmarshal(b []byte) error
- type DataProductSpec
- func (in *DataProductSpec) DeepCopy() *DataProductSpec
- func (in *DataProductSpec) DeepCopyInto(out *DataProductSpec)
- func (*DataProductSpec) Descriptor() ([]byte, []int)
- func (m *DataProductSpec) Marshal() (dAtA []byte, err error)
- func (m *DataProductSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *DataProductSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataProductSpec) ProtoMessage()
- func (m *DataProductSpec) Reset()
- func (m *DataProductSpec) Size() (n int)
- func (this *DataProductSpec) String() string
- func (m *DataProductSpec) Unmarshal(dAtA []byte) error
- func (m *DataProductSpec) XXX_DiscardUnknown()
- func (m *DataProductSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataProductSpec) XXX_Merge(src proto.Message)
- func (m *DataProductSpec) XXX_Size() int
- func (m *DataProductSpec) XXX_Unmarshal(b []byte) error
- type DataProductStatus
- func (in *DataProductStatus) DeepCopy() *DataProductStatus
- func (in *DataProductStatus) DeepCopyInto(out *DataProductStatus)
- func (*DataProductStatus) Descriptor() ([]byte, []int)
- func (m *DataProductStatus) Marshal() (dAtA []byte, err error)
- func (m *DataProductStatus) MarshalTo(dAtA []byte) (int, error)
- func (m *DataProductStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataProductStatus) ProtoMessage()
- func (m *DataProductStatus) Reset()
- func (m *DataProductStatus) Size() (n int)
- func (this *DataProductStatus) String() string
- func (m *DataProductStatus) Unmarshal(dAtA []byte) error
- func (m *DataProductStatus) XXX_DiscardUnknown()
- func (m *DataProductStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataProductStatus) XXX_Merge(src proto.Message)
- func (m *DataProductStatus) XXX_Size() int
- func (m *DataProductStatus) XXX_Unmarshal(b []byte) error
- type DataProductVersion
- func (version *DataProductVersion) AddFinalizer()
- func (version DataProductVersion) Archived() bool
- func (version *DataProductVersion) CreateOrUpdateCond(cond metav1.Condition)
- func (in *DataProductVersion) DeepCopy() *DataProductVersion
- func (in *DataProductVersion) DeepCopyInto(out *DataProductVersion)
- func (in *DataProductVersion) DeepCopyObject() runtime.Object
- func (dp *DataProductVersion) Default()
- func (*DataProductVersion) Descriptor() ([]byte, []int)
- func (version DataProductVersion) GetCond(t string) metav1.Condition
- func (version DataProductVersion) GetCondIdx(t string) int
- func (version DataProductVersion) HasFinalizer() bool
- func (version DataProductVersion) IsReady() bool
- func (version *DataProductVersion) MarkArchived()
- func (version *DataProductVersion) MarkFailed(err error)
- func (version *DataProductVersion) MarkReady()
- func (m *DataProductVersion) Marshal() (dAtA []byte, err error)
- func (m *DataProductVersion) MarshalTo(dAtA []byte) (int, error)
- func (m *DataProductVersion) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (version DataProductVersion) MessageURI() string
- func (*DataProductVersion) ProtoMessage()
- func (version *DataProductVersion) RemoveFinalizer()
- func (m *DataProductVersion) Reset()
- func (version DataProductVersion) SetupWebhookWithManager(mgr ctrl.Manager) error
- func (m *DataProductVersion) Size() (n int)
- func (this *DataProductVersion) String() string
- func (m *DataProductVersion) Unmarshal(dAtA []byte) error
- func (version DataProductVersion) ValidateCreate() error
- func (version DataProductVersion) ValidateDelete() error
- func (version DataProductVersion) ValidateUpdate(old runtime.Object) error
- func (m *DataProductVersion) XXX_DiscardUnknown()
- func (m *DataProductVersion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataProductVersion) XXX_Merge(src proto.Message)
- func (m *DataProductVersion) XXX_Size() int
- func (m *DataProductVersion) XXX_Unmarshal(b []byte) error
- func (version DataProductVersion) YamlURI() string
- type DataProductVersionConditionType
- type DataProductVersionList
- func (in *DataProductVersionList) DeepCopy() *DataProductVersionList
- func (in *DataProductVersionList) DeepCopyInto(out *DataProductVersionList)
- func (in *DataProductVersionList) DeepCopyObject() runtime.Object
- func (*DataProductVersionList) Descriptor() ([]byte, []int)
- func (m *DataProductVersionList) Marshal() (dAtA []byte, err error)
- func (m *DataProductVersionList) MarshalTo(dAtA []byte) (int, error)
- func (m *DataProductVersionList) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataProductVersionList) ProtoMessage()
- func (m *DataProductVersionList) Reset()
- func (m *DataProductVersionList) Size() (n int)
- func (this *DataProductVersionList) String() string
- func (m *DataProductVersionList) Unmarshal(dAtA []byte) error
- func (m *DataProductVersionList) XXX_DiscardUnknown()
- func (m *DataProductVersionList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataProductVersionList) XXX_Merge(src proto.Message)
- func (m *DataProductVersionList) XXX_Size() int
- func (m *DataProductVersionList) XXX_Unmarshal(b []byte) error
- type DataProductVersionSpec
- func (in *DataProductVersionSpec) DeepCopy() *DataProductVersionSpec
- func (in *DataProductVersionSpec) DeepCopyInto(out *DataProductVersionSpec)
- func (*DataProductVersionSpec) Descriptor() ([]byte, []int)
- func (m *DataProductVersionSpec) Marshal() (dAtA []byte, err error)
- func (m *DataProductVersionSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *DataProductVersionSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataProductVersionSpec) ProtoMessage()
- func (m *DataProductVersionSpec) Reset()
- func (m *DataProductVersionSpec) Size() (n int)
- func (this *DataProductVersionSpec) String() string
- func (m *DataProductVersionSpec) Unmarshal(dAtA []byte) error
- func (m *DataProductVersionSpec) XXX_DiscardUnknown()
- func (m *DataProductVersionSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataProductVersionSpec) XXX_Merge(src proto.Message)
- func (m *DataProductVersionSpec) XXX_Size() int
- func (m *DataProductVersionSpec) XXX_Unmarshal(b []byte) error
- type DataProductVersionStatus
- func (in *DataProductVersionStatus) DeepCopy() *DataProductVersionStatus
- func (in *DataProductVersionStatus) DeepCopyInto(out *DataProductVersionStatus)
- func (*DataProductVersionStatus) Descriptor() ([]byte, []int)
- func (m *DataProductVersionStatus) Marshal() (dAtA []byte, err error)
- func (m *DataProductVersionStatus) MarshalTo(dAtA []byte) (int, error)
- func (m *DataProductVersionStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataProductVersionStatus) ProtoMessage()
- func (m *DataProductVersionStatus) Reset()
- func (m *DataProductVersionStatus) Size() (n int)
- func (this *DataProductVersionStatus) String() string
- func (m *DataProductVersionStatus) Unmarshal(dAtA []byte) error
- func (m *DataProductVersionStatus) XXX_DiscardUnknown()
- func (m *DataProductVersionStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataProductVersionStatus) XXX_Merge(src proto.Message)
- func (m *DataProductVersionStatus) XXX_Size() int
- func (m *DataProductVersionStatus) XXX_Unmarshal(b []byte) error
- type DataSource
- func (datasource DataSource) ActiveColumns() (string, error)
- func (datasource *DataSource) AddColumn(name string, dtype catalog.DataType, dformat catalog.DataDomain, Ignore bool, ...)
- func (datasource *DataSource) AddFinalizer()
- func (datasource DataSource) CountActiveAttributes() int
- func (datasource DataSource) CountTargetAttributes() int
- func (datasource *DataSource) CreateOrUpdateCond(cond metav1.Condition)
- func (in *DataSource) DeepCopy() *DataSource
- func (in *DataSource) DeepCopyInto(out *DataSource)
- func (in *DataSource) DeepCopyObject() runtime.Object
- func (datasource *DataSource) Default()
- func (*DataSource) Descriptor() ([]byte, []int)
- func (datasource DataSource) GetCond(t string) metav1.Condition
- func (datasource DataSource) GetCondIdx(t string) int
- func (datasource DataSource) HasFinalizer() bool
- func (datasource *DataSource) HaveValidationRules() bool
- func (datasource DataSource) InferTask() catalog.MLTask
- func (datasource DataSource) IsReady() bool
- func (datasource DataSource) Key() string
- func (datasource DataSource) ManifestURI() string
- func (datasource *DataSource) MarkFieldAsTarget(target string)
- func (datasource *DataSource) MarkLastFieldAsTarget()
- func (datasource *DataSource) MarkReady()
- func (datasource *DataSource) MarkSaved()
- func (m *DataSource) Marshal() (dAtA []byte, err error)
- func (m *DataSource) MarshalTo(dAtA []byte) (int, error)
- func (m *DataSource) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (datasource *DataSource) Poplulate()
- func (*DataSource) ProtoMessage()
- func (datasource *DataSource) RemoveFinalizer()
- func (m *DataSource) Reset()
- func (datasource DataSource) RootURI() string
- func (datasource DataSource) Saved() bool
- func (datasource *DataSource) SetupWebhookWithManager(mgr ctrl.Manager) error
- func (m *DataSource) Size() (n int)
- func (this *DataSource) String() string
- func (m *DataSource) Unmarshal(dAtA []byte) error
- func (datasource DataSource) Validate() (bool, []metav1.StatusCause)
- func (datasource DataSource) ValidateCreate() error
- func (datasource DataSource) ValidateDelete() error
- func (datasource DataSource) ValidateUpdate(old runtime.Object) error
- func (m *DataSource) XXX_DiscardUnknown()
- func (m *DataSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataSource) XXX_Merge(src proto.Message)
- func (m *DataSource) XXX_Size() int
- func (m *DataSource) XXX_Unmarshal(b []byte) error
- type DataSourceConditionType
- type DataSourceList
- func (in *DataSourceList) DeepCopy() *DataSourceList
- func (in *DataSourceList) DeepCopyInto(out *DataSourceList)
- func (in *DataSourceList) DeepCopyObject() runtime.Object
- func (*DataSourceList) Descriptor() ([]byte, []int)
- func (m *DataSourceList) Marshal() (dAtA []byte, err error)
- func (m *DataSourceList) MarshalTo(dAtA []byte) (int, error)
- func (m *DataSourceList) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataSourceList) ProtoMessage()
- func (m *DataSourceList) Reset()
- func (m *DataSourceList) Size() (n int)
- func (this *DataSourceList) String() string
- func (m *DataSourceList) Unmarshal(dAtA []byte) error
- func (m *DataSourceList) XXX_DiscardUnknown()
- func (m *DataSourceList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataSourceList) XXX_Merge(src proto.Message)
- func (m *DataSourceList) XXX_Size() int
- func (m *DataSourceList) XXX_Unmarshal(b []byte) error
- type DataSourceSpec
- func (in *DataSourceSpec) DeepCopy() *DataSourceSpec
- func (in *DataSourceSpec) DeepCopyInto(out *DataSourceSpec)
- func (*DataSourceSpec) Descriptor() ([]byte, []int)
- func (m *DataSourceSpec) Marshal() (dAtA []byte, err error)
- func (m *DataSourceSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *DataSourceSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataSourceSpec) ProtoMessage()
- func (m *DataSourceSpec) Reset()
- func (m *DataSourceSpec) Size() (n int)
- func (this *DataSourceSpec) String() string
- func (m *DataSourceSpec) Unmarshal(dAtA []byte) error
- func (m *DataSourceSpec) XXX_DiscardUnknown()
- func (m *DataSourceSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataSourceSpec) XXX_Merge(src proto.Message)
- func (m *DataSourceSpec) XXX_Size() int
- func (m *DataSourceSpec) XXX_Unmarshal(b []byte) error
- type DataSourceStatus
- func (in *DataSourceStatus) DeepCopy() *DataSourceStatus
- func (in *DataSourceStatus) DeepCopyInto(out *DataSourceStatus)
- func (*DataSourceStatus) Descriptor() ([]byte, []int)
- func (m *DataSourceStatus) Marshal() (dAtA []byte, err error)
- func (m *DataSourceStatus) MarshalTo(dAtA []byte) (int, error)
- func (m *DataSourceStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DataSourceStatus) ProtoMessage()
- func (m *DataSourceStatus) Reset()
- func (m *DataSourceStatus) Size() (n int)
- func (this *DataSourceStatus) String() string
- func (m *DataSourceStatus) Unmarshal(dAtA []byte) error
- func (m *DataSourceStatus) XXX_DiscardUnknown()
- func (m *DataSourceStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DataSourceStatus) XXX_Merge(src proto.Message)
- func (m *DataSourceStatus) XXX_Size() int
- func (m *DataSourceStatus) XXX_Unmarshal(b []byte) error
- type DatabaseServerType
- type Dataset
- func (dataset *Dataset) AddFinalizer()
- func (dataset Dataset) Archived() bool
- func (dataset Dataset) CompletionAlert(tenantRef *v1.ObjectReference, notifierName *string) *infra.Alert
- func (dataset Dataset) ConstuctFeatureHistogram() (*FeatureHistogram, error)
- func (dataset *Dataset) CreateOrUpdateCond(cond metav1.Condition)
- func (in *Dataset) DeepCopy() *Dataset
- func (in *Dataset) DeepCopyInto(out *Dataset)
- func (in *Dataset) DeepCopyObject() runtime.Object
- func (dataset *Dataset) Default()
- func (dataset Dataset) Deleted() bool
- func (*Dataset) Descriptor() ([]byte, []int)
- func (dataset Dataset) DriftColumnNames() []string
- func (dataset Dataset) ErrorAlert(tenantRef *v1.ObjectReference, notifierName *string, err error) *infra.Alert
- func (dataset Dataset) Generated() bool
- func (dataset Dataset) GetColumn(name string) (*ColumnStatistics, error)
- func (dataset Dataset) GetCond(t string) metav1.Condition
- func (dataset Dataset) GetCondIdx(t string) int
- func (dataset Dataset) GroupDataFile() string
- func (dataset Dataset) GroupDataFolder() string
- func (dataset Dataset) GroupFolder() string
- func (dataset *Dataset) GroupForecastFile() string
- func (dataset Dataset) GroupProfileFolder() string
- func (dataset Dataset) GroupReportFile() string
- func (dataset Dataset) Grouped() bool
- func (dataset Dataset) GroupsFolder() string
- func (dataset Dataset) HasFinalizer() bool
- func (dataset Dataset) IndexFileKey() string
- func (dataset Dataset) Ingested() bool
- func (dataset Dataset) IsFailed() bool
- func (dataset Dataset) IsFeatureGroup() bool
- func (dataset Dataset) IsGroup() bool
- func (dataset Dataset) IsInCond(ct string) bool
- func (dataset Dataset) IsReady() bool
- func (dataset Dataset) ManifestURI() string
- func (dataset *Dataset) MarkArchived()
- func (dataset *Dataset) MarkGenerated()
- func (dataset *Dataset) MarkGeneratedFailed(msg string)
- func (dataset *Dataset) MarkGenerting()
- func (dataset *Dataset) MarkGroupFailed(msg string)
- func (dataset *Dataset) MarkGroupSuccess()
- func (dataset *Dataset) MarkGrouping()
- func (dataset *Dataset) MarkIngestFailed(msg string)
- func (dataset *Dataset) MarkIngested()
- func (dataset *Dataset) MarkIngesting()
- func (dataset *Dataset) MarkProfiled(uri string)
- func (dataset *Dataset) MarkProfiledFailed(msg string)
- func (dataset *Dataset) MarkProfiling()
- func (dataset *Dataset) MarkReady()
- func (dataset *Dataset) MarkReportFailed(msg string)
- func (dataset *Dataset) MarkReported()
- func (dataset *Dataset) MarkReporting()
- func (dataset *Dataset) MarkSaved()
- func (dataset *Dataset) MarkSkewColumns()
- func (dataset *Dataset) MarkSnapshotFailed(msg string)
- func (dataset *Dataset) MarkSnapshotSuccess()
- func (dataset *Dataset) MarkTakingSnapshot()
- func (dataset *Dataset) MarkUnitTestFailed(msg string)
- func (dataset *Dataset) MarkUnitTested()
- func (dataset *Dataset) MarkUnitTesting()
- func (m *Dataset) Marshal() (dAtA []byte, err error)
- func (m *Dataset) MarshalTo(dAtA []byte) (int, error)
- func (m *Dataset) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (r *Dataset) OpName() string
- func (dataset *Dataset) Populate(name string)
- func (dataset Dataset) PrintConditions()
- func (dataset Dataset) ProfileURI() string
- func (dataset Dataset) Profiled() bool
- func (*Dataset) ProtoMessage()
- func (dataset *Dataset) RemoveFinalizer()
- func (dataset Dataset) ReportName() string
- func (dataset Dataset) ReportURI() string
- func (dataset Dataset) Reported() bool
- func (m *Dataset) Reset()
- func (dataset Dataset) RootURI() string
- func (dataset Dataset) Saved() bool
- func (r *Dataset) SetupWebhookWithManager(mgr ctrl.Manager) error
- func (m *Dataset) Size() (n int)
- func (dataset Dataset) Snapshotted() bool
- func (dataset Dataset) StatusString() string
- func (this *Dataset) String() string
- func (dataset Dataset) TaskIndexFileKey(task string) string
- func (dataset Dataset) UnitTested() bool
- func (m *Dataset) Unmarshal(dAtA []byte) error
- func (dataset *Dataset) UpdatePhaseFromConditions()
- func (dataset Dataset) ValidateCreate() error
- func (dataset Dataset) ValidateDelete() error
- func (dataset Dataset) ValidateUpdate(old runtime.Object) error
- func (dataset Dataset) WorkerIndexFileKey(workerIndex int, task string) string
- func (m *Dataset) XXX_DiscardUnknown()
- func (m *Dataset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *Dataset) XXX_Merge(src proto.Message)
- func (m *Dataset) XXX_Size() int
- func (m *Dataset) XXX_Unmarshal(b []byte) error
- type DatasetConditionType
- type DatasetGroupByStatus
- func (in *DatasetGroupByStatus) DeepCopy() *DatasetGroupByStatus
- func (in *DatasetGroupByStatus) DeepCopyInto(out *DatasetGroupByStatus)
- func (*DatasetGroupByStatus) Descriptor() ([]byte, []int)
- func (m *DatasetGroupByStatus) Marshal() (dAtA []byte, err error)
- func (m *DatasetGroupByStatus) MarshalTo(dAtA []byte) (int, error)
- func (m *DatasetGroupByStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DatasetGroupByStatus) ProtoMessage()
- func (m *DatasetGroupByStatus) Reset()
- func (m *DatasetGroupByStatus) Size() (n int)
- func (this *DatasetGroupByStatus) String() string
- func (m *DatasetGroupByStatus) Unmarshal(dAtA []byte) error
- func (m *DatasetGroupByStatus) XXX_DiscardUnknown()
- func (m *DatasetGroupByStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DatasetGroupByStatus) XXX_Merge(src proto.Message)
- func (m *DatasetGroupByStatus) XXX_Size() int
- func (m *DatasetGroupByStatus) XXX_Unmarshal(b []byte) error
- type DatasetList
- func (in *DatasetList) DeepCopy() *DatasetList
- func (in *DatasetList) DeepCopyInto(out *DatasetList)
- func (in *DatasetList) DeepCopyObject() runtime.Object
- func (*DatasetList) Descriptor() ([]byte, []int)
- func (m *DatasetList) Marshal() (dAtA []byte, err error)
- func (m *DatasetList) MarshalTo(dAtA []byte) (int, error)
- func (m *DatasetList) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DatasetList) ProtoMessage()
- func (m *DatasetList) Reset()
- func (m *DatasetList) Size() (n int)
- func (this *DatasetList) String() string
- func (m *DatasetList) Unmarshal(dAtA []byte) error
- func (m *DatasetList) XXX_DiscardUnknown()
- func (m *DatasetList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DatasetList) XXX_Merge(src proto.Message)
- func (m *DatasetList) XXX_Size() int
- func (m *DatasetList) XXX_Unmarshal(b []byte) error
- type DatasetPhase
- type DatasetRole
- type DatasetSpec
- func (in *DatasetSpec) DeepCopy() *DatasetSpec
- func (in *DatasetSpec) DeepCopyInto(out *DatasetSpec)
- func (*DatasetSpec) Descriptor() ([]byte, []int)
- func (m *DatasetSpec) Marshal() (dAtA []byte, err error)
- func (m *DatasetSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *DatasetSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DatasetSpec) ProtoMessage()
- func (m *DatasetSpec) Reset()
- func (m *DatasetSpec) Size() (n int)
- func (this *DatasetSpec) String() string
- func (m *DatasetSpec) Unmarshal(dAtA []byte) error
- func (m *DatasetSpec) XXX_DiscardUnknown()
- func (m *DatasetSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DatasetSpec) XXX_Merge(src proto.Message)
- func (m *DatasetSpec) XXX_Size() int
- func (m *DatasetSpec) XXX_Unmarshal(b []byte) error
- type DatasetStatistics
- func (in *DatasetStatistics) DeepCopy() *DatasetStatistics
- func (in *DatasetStatistics) DeepCopyInto(out *DatasetStatistics)
- func (*DatasetStatistics) Descriptor() ([]byte, []int)
- func (m *DatasetStatistics) Marshal() (dAtA []byte, err error)
- func (m *DatasetStatistics) MarshalTo(dAtA []byte) (int, error)
- func (m *DatasetStatistics) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DatasetStatistics) ProtoMessage()
- func (m *DatasetStatistics) Reset()
- func (m *DatasetStatistics) Size() (n int)
- func (this *DatasetStatistics) String() string
- func (m *DatasetStatistics) Unmarshal(dAtA []byte) error
- func (m *DatasetStatistics) XXX_DiscardUnknown()
- func (m *DatasetStatistics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DatasetStatistics) XXX_Merge(src proto.Message)
- func (m *DatasetStatistics) XXX_Size() int
- func (m *DatasetStatistics) XXX_Unmarshal(b []byte) error
- type DatasetStatus
- func (in *DatasetStatus) DeepCopy() *DatasetStatus
- func (in *DatasetStatus) DeepCopyInto(out *DatasetStatus)
- func (*DatasetStatus) Descriptor() ([]byte, []int)
- func (m *DatasetStatus) Marshal() (dAtA []byte, err error)
- func (m *DatasetStatus) MarshalTo(dAtA []byte) (int, error)
- func (m *DatasetStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DatasetStatus) ProtoMessage()
- func (m *DatasetStatus) Reset()
- func (m *DatasetStatus) Size() (n int)
- func (this *DatasetStatus) String() string
- func (m *DatasetStatus) Unmarshal(dAtA []byte) error
- func (m *DatasetStatus) XXX_DiscardUnknown()
- func (m *DatasetStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DatasetStatus) XXX_Merge(src proto.Message)
- func (m *DatasetStatus) XXX_Size() int
- func (m *DatasetStatus) XXX_Unmarshal(b []byte) error
- type DatasetTemplate
- func (in *DatasetTemplate) DeepCopy() *DatasetTemplate
- func (in *DatasetTemplate) DeepCopyInto(out *DatasetTemplate)
- func (*DatasetTemplate) Descriptor() ([]byte, []int)
- func (m *DatasetTemplate) Marshal() (dAtA []byte, err error)
- func (m *DatasetTemplate) MarshalTo(dAtA []byte) (int, error)
- func (m *DatasetTemplate) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DatasetTemplate) ProtoMessage()
- func (m *DatasetTemplate) Reset()
- func (m *DatasetTemplate) Size() (n int)
- func (this *DatasetTemplate) String() string
- func (m *DatasetTemplate) Unmarshal(dAtA []byte) error
- func (m *DatasetTemplate) XXX_DiscardUnknown()
- func (m *DatasetTemplate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DatasetTemplate) XXX_Merge(src proto.Message)
- func (m *DatasetTemplate) XXX_Size() int
- func (m *DatasetTemplate) XXX_Unmarshal(b []byte) error
- type Delimiter
- type DriftThreshold
- func (in *DriftThreshold) DeepCopy() *DriftThreshold
- func (in *DriftThreshold) DeepCopyInto(out *DriftThreshold)
- func (*DriftThreshold) Descriptor() ([]byte, []int)
- func (m *DriftThreshold) Marshal() (dAtA []byte, err error)
- func (m *DriftThreshold) MarshalTo(dAtA []byte) (int, error)
- func (m *DriftThreshold) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*DriftThreshold) ProtoMessage()
- func (m *DriftThreshold) Reset()
- func (m *DriftThreshold) Size() (n int)
- func (this *DriftThreshold) String() string
- func (m *DriftThreshold) Unmarshal(dAtA []byte) error
- func (m *DriftThreshold) XXX_DiscardUnknown()
- func (m *DriftThreshold) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *DriftThreshold) XXX_Merge(src proto.Message)
- func (m *DriftThreshold) XXX_Size() int
- func (m *DriftThreshold) XXX_Unmarshal(b []byte) error
- type Entity
- func (entity *Entity) AddFinalizer()
- func (entity Entity) Archived() bool
- func (entity *Entity) CreateOrUpdateCond(cond metav1.Condition)
- func (in *Entity) DeepCopy() *Entity
- func (in *Entity) DeepCopyInto(out *Entity)
- func (in *Entity) DeepCopyObject() runtime.Object
- func (entity *Entity) Default()
- func (*Entity) Descriptor() ([]byte, []int)
- func (entity *Entity) GetCond(t string) metav1.Condition
- func (entity *Entity) GetCondIdx(t string) int
- func (entity Entity) HasFinalizer() bool
- func (entity *Entity) IsGitObj() bool
- func (entity Entity) IsReady() bool
- func (entity Entity) Key() string
- func (entity *Entity) LabelWithCommit(commit string, uname string, branch string)
- func (entity *Entity) MarkArchived()
- func (entity *Entity) MarkReady()
- func (m *Entity) Marshal() (dAtA []byte, err error)
- func (m *Entity) MarshalTo(dAtA []byte) (int, error)
- func (m *Entity) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*Entity) ProtoMessage()
- func (entity *Entity) RemoveFinalizer()
- func (entity Entity) RepEntry() (string, error)
- func (entity Entity) RepPath(root string) (string, error)
- func (m *Entity) Reset()
- func (entity *Entity) SetChanged()
- func (entity *Entity) SetupWebhookWithManager(mgr ctrl.Manager) error
- func (m *Entity) Size() (n int)
- func (this *Entity) String() string
- func (m *Entity) Unmarshal(dAtA []byte) error
- func (entity Entity) ValidateCreate() error
- func (entity Entity) ValidateDelete() error
- func (entity Entity) ValidateUpdate(old runtime.Object) error
- func (m *Entity) XXX_DiscardUnknown()
- func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *Entity) XXX_Merge(src proto.Message)
- func (m *Entity) XXX_Size() int
- func (m *Entity) XXX_Unmarshal(b []byte) error
- type EntityCondition
- func (in *EntityCondition) DeepCopy() *EntityCondition
- func (in *EntityCondition) DeepCopyInto(out *EntityCondition)
- func (*EntityCondition) Descriptor() ([]byte, []int)
- func (m *EntityCondition) Marshal() (dAtA []byte, err error)
- func (m *EntityCondition) MarshalTo(dAtA []byte) (int, error)
- func (m *EntityCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*EntityCondition) ProtoMessage()
- func (m *EntityCondition) Reset()
- func (m *EntityCondition) Size() (n int)
- func (this *EntityCondition) String() string
- func (m *EntityCondition) Unmarshal(dAtA []byte) error
- func (m *EntityCondition) XXX_DiscardUnknown()
- func (m *EntityCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *EntityCondition) XXX_Merge(src proto.Message)
- func (m *EntityCondition) XXX_Size() int
- func (m *EntityCondition) XXX_Unmarshal(b []byte) error
- type EntityConditionType
- type EntityList
- func (in *EntityList) DeepCopy() *EntityList
- func (in *EntityList) DeepCopyInto(out *EntityList)
- func (in *EntityList) DeepCopyObject() runtime.Object
- func (*EntityList) Descriptor() ([]byte, []int)
- func (m *EntityList) Marshal() (dAtA []byte, err error)
- func (m *EntityList) MarshalTo(dAtA []byte) (int, error)
- func (m *EntityList) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*EntityList) ProtoMessage()
- func (m *EntityList) Reset()
- func (m *EntityList) Size() (n int)
- func (this *EntityList) String() string
- func (m *EntityList) Unmarshal(dAtA []byte) error
- func (m *EntityList) XXX_DiscardUnknown()
- func (m *EntityList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *EntityList) XXX_Merge(src proto.Message)
- func (m *EntityList) XXX_Size() int
- func (m *EntityList) XXX_Unmarshal(b []byte) error
- type EntitySpec
- func (in *EntitySpec) DeepCopy() *EntitySpec
- func (in *EntitySpec) DeepCopyInto(out *EntitySpec)
- func (*EntitySpec) Descriptor() ([]byte, []int)
- func (m *EntitySpec) Marshal() (dAtA []byte, err error)
- func (m *EntitySpec) MarshalTo(dAtA []byte) (int, error)
- func (m *EntitySpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*EntitySpec) ProtoMessage()
- func (m *EntitySpec) Reset()
- func (m *EntitySpec) Size() (n int)
- func (this *EntitySpec) String() string
- func (m *EntitySpec) Unmarshal(dAtA []byte) error
- func (m *EntitySpec) XXX_DiscardUnknown()
- func (m *EntitySpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *EntitySpec) XXX_Merge(src proto.Message)
- func (m *EntitySpec) XXX_Size() int
- func (m *EntitySpec) XXX_Unmarshal(b []byte) error
- type EntityStatus
- func (in *EntityStatus) DeepCopy() *EntityStatus
- func (in *EntityStatus) DeepCopyInto(out *EntityStatus)
- func (*EntityStatus) Descriptor() ([]byte, []int)
- func (m *EntityStatus) Marshal() (dAtA []byte, err error)
- func (m *EntityStatus) MarshalTo(dAtA []byte) (int, error)
- func (m *EntityStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*EntityStatus) ProtoMessage()
- func (m *EntityStatus) Reset()
- func (m *EntityStatus) Size() (n int)
- func (this *EntityStatus) String() string
- func (m *EntityStatus) Unmarshal(dAtA []byte) error
- func (m *EntityStatus) XXX_DiscardUnknown()
- func (m *EntityStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *EntityStatus) XXX_Merge(src proto.Message)
- func (m *EntityStatus) XXX_Size() int
- func (m *EntityStatus) XXX_Unmarshal(b []byte) error
- type EscapeChar
- type ExcelNotebookSpec
- func (in *ExcelNotebookSpec) DeepCopy() *ExcelNotebookSpec
- func (in *ExcelNotebookSpec) DeepCopyInto(out *ExcelNotebookSpec)
- func (*ExcelNotebookSpec) Descriptor() ([]byte, []int)
- func (m *ExcelNotebookSpec) Marshal() (dAtA []byte, err error)
- func (m *ExcelNotebookSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *ExcelNotebookSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*ExcelNotebookSpec) ProtoMessage()
- func (m *ExcelNotebookSpec) Reset()
- func (m *ExcelNotebookSpec) Size() (n int)
- func (this *ExcelNotebookSpec) String() string
- func (m *ExcelNotebookSpec) Unmarshal(dAtA []byte) error
- func (m *ExcelNotebookSpec) XXX_DiscardUnknown()
- func (m *ExcelNotebookSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *ExcelNotebookSpec) XXX_Merge(src proto.Message)
- func (m *ExcelNotebookSpec) XXX_Size() int
- func (m *ExcelNotebookSpec) XXX_Unmarshal(b []byte) error
- type ExcelSheetArea
- func (in *ExcelSheetArea) DeepCopy() *ExcelSheetArea
- func (in *ExcelSheetArea) DeepCopyInto(out *ExcelSheetArea)
- func (*ExcelSheetArea) Descriptor() ([]byte, []int)
- func (m *ExcelSheetArea) Marshal() (dAtA []byte, err error)
- func (m *ExcelSheetArea) MarshalTo(dAtA []byte) (int, error)
- func (m *ExcelSheetArea) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*ExcelSheetArea) ProtoMessage()
- func (m *ExcelSheetArea) Reset()
- func (m *ExcelSheetArea) Size() (n int)
- func (this *ExcelSheetArea) String() string
- func (m *ExcelSheetArea) Unmarshal(dAtA []byte) error
- func (m *ExcelSheetArea) XXX_DiscardUnknown()
- func (m *ExcelSheetArea) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *ExcelSheetArea) XXX_Merge(src proto.Message)
- func (m *ExcelSheetArea) XXX_Size() int
- func (m *ExcelSheetArea) XXX_Unmarshal(b []byte) error
- type FeatureGroup
- func (fg *FeatureGroup) AddConfiditions()
- func (fg *FeatureGroup) AddFinalizer()
- func (fg FeatureGroup) Archived() bool
- func (fg *FeatureGroup) CreateOrUpdateCond(cond metav1.Condition)
- func (in *FeatureGroup) DeepCopy() *FeatureGroup
- func (in *FeatureGroup) DeepCopyInto(out *FeatureGroup)
- func (in *FeatureGroup) DeepCopyObject() runtime.Object
- func (fg *FeatureGroup) Default()
- func (*FeatureGroup) Descriptor() ([]byte, []int)
- func (fh FeatureGroup) ErrorAlert(tenantRef *v1.ObjectReference, notifierName *string, err error) *infra.Alert
- func (fg FeatureGroup) GetCond(t string) metav1.Condition
- func (fg FeatureGroup) GetCondIdx(t string) int
- func (fg *FeatureGroup) HasFinalizer() bool
- func (fg FeatureGroup) IsDeleted() bool
- func (fg *FeatureGroup) IsGitObj() bool
- func (fg *FeatureGroup) IsIngesting() bool
- func (fg FeatureGroup) IsReady() bool
- func (fg *FeatureGroup) IsSynced() bool
- func (fg *FeatureGroup) IsSynching() bool
- func (fg FeatureGroup) Key() string
- func (fg *FeatureGroup) LabelWithCommit(commit string, uname string, branch string)
- func (fg *FeatureGroup) MarkArchived()
- func (fg *FeatureGroup) MarkIngestFailed(msg string)
- func (fg *FeatureGroup) MarkIngested()
- func (fg *FeatureGroup) MarkIngesting()
- func (fg *FeatureGroup) MarkReady()
- func (fg *FeatureGroup) MarkSyncFailed(msg string)
- func (fg *FeatureGroup) MarkSynced()
- func (fg *FeatureGroup) MarkSyncing()
- func (m *FeatureGroup) Marshal() (dAtA []byte, err error)
- func (m *FeatureGroup) MarshalTo(dAtA []byte) (int, error)
- func (m *FeatureGroup) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (fg *FeatureGroup) PrefixLiveURI(path string) string
- func (*FeatureGroup) ProtoMessage()
- func (fg *FeatureGroup) RemoveFinalizer()
- func (fg *FeatureGroup) RepEntry() (string, error)
- func (fg *FeatureGroup) RepPath(root string) (string, error)
- func (m *FeatureGroup) Reset()
- func (fg *FeatureGroup) SetChanged()
- func (fg *FeatureGroup) SetupWebhookWithManager(mgr ctrl.Manager) error
- func (m *FeatureGroup) Size() (n int)
- func (this *FeatureGroup) String() string
- func (fg FeatureGroup) TenantName() string
- func (m *FeatureGroup) Unmarshal(dAtA []byte) error
- func (fg FeatureGroup) ValidateCreate() error
- func (fg FeatureGroup) ValidateDelete() error
- func (fg FeatureGroup) ValidateUpdate(old runtime.Object) error
- func (m *FeatureGroup) XXX_DiscardUnknown()
- func (m *FeatureGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *FeatureGroup) XXX_Merge(src proto.Message)
- func (m *FeatureGroup) XXX_Size() int
- func (m *FeatureGroup) XXX_Unmarshal(b []byte) error
- type FeatureGroupConditionType
- type FeatureGroupList
- func (in *FeatureGroupList) DeepCopy() *FeatureGroupList
- func (in *FeatureGroupList) DeepCopyInto(out *FeatureGroupList)
- func (in *FeatureGroupList) DeepCopyObject() runtime.Object
- func (*FeatureGroupList) Descriptor() ([]byte, []int)
- func (m *FeatureGroupList) Marshal() (dAtA []byte, err error)
- func (m *FeatureGroupList) MarshalTo(dAtA []byte) (int, error)
- func (m *FeatureGroupList) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*FeatureGroupList) ProtoMessage()
- func (m *FeatureGroupList) Reset()
- func (m *FeatureGroupList) Size() (n int)
- func (this *FeatureGroupList) String() string
- func (m *FeatureGroupList) Unmarshal(dAtA []byte) error
- func (m *FeatureGroupList) XXX_DiscardUnknown()
- func (m *FeatureGroupList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *FeatureGroupList) XXX_Merge(src proto.Message)
- func (m *FeatureGroupList) XXX_Size() int
- func (m *FeatureGroupList) XXX_Unmarshal(b []byte) error
- type FeatureGroupPhase
- type FeatureGroupSpec
- func (in *FeatureGroupSpec) DeepCopy() *FeatureGroupSpec
- func (in *FeatureGroupSpec) DeepCopyInto(out *FeatureGroupSpec)
- func (*FeatureGroupSpec) Descriptor() ([]byte, []int)
- func (m *FeatureGroupSpec) Marshal() (dAtA []byte, err error)
- func (m *FeatureGroupSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *FeatureGroupSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*FeatureGroupSpec) ProtoMessage()
- func (m *FeatureGroupSpec) Reset()
- func (m *FeatureGroupSpec) Size() (n int)
- func (this *FeatureGroupSpec) String() string
- func (m *FeatureGroupSpec) Unmarshal(dAtA []byte) error
- func (m *FeatureGroupSpec) XXX_DiscardUnknown()
- func (m *FeatureGroupSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *FeatureGroupSpec) XXX_Merge(src proto.Message)
- func (m *FeatureGroupSpec) XXX_Size() int
- func (m *FeatureGroupSpec) XXX_Unmarshal(b []byte) error
- type FeatureGroupStatus
- func (in *FeatureGroupStatus) DeepCopy() *FeatureGroupStatus
- func (in *FeatureGroupStatus) DeepCopyInto(out *FeatureGroupStatus)
- func (*FeatureGroupStatus) Descriptor() ([]byte, []int)
- func (m *FeatureGroupStatus) Marshal() (dAtA []byte, err error)
- func (m *FeatureGroupStatus) MarshalTo(dAtA []byte) (int, error)
- func (m *FeatureGroupStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*FeatureGroupStatus) ProtoMessage()
- func (m *FeatureGroupStatus) Reset()
- func (m *FeatureGroupStatus) Size() (n int)
- func (this *FeatureGroupStatus) String() string
- func (m *FeatureGroupStatus) Unmarshal(dAtA []byte) error
- func (m *FeatureGroupStatus) XXX_DiscardUnknown()
- func (m *FeatureGroupStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *FeatureGroupStatus) XXX_Merge(src proto.Message)
- func (m *FeatureGroupStatus) XXX_Size() int
- func (m *FeatureGroupStatus) XXX_Unmarshal(b []byte) error
- type FeatureHistogram
- func (fh *FeatureHistogram) AddConditions()
- func (fh *FeatureHistogram) AddFinalizer()
- func (fh FeatureHistogram) Archived() bool
- func (fh *FeatureHistogram) CreateOrUpdateCond(cond metav1.Condition)
- func (in *FeatureHistogram) DeepCopy() *FeatureHistogram
- func (in *FeatureHistogram) DeepCopyInto(out *FeatureHistogram)
- func (in *FeatureHistogram) DeepCopyObject() runtime.Object
- func (fh *FeatureHistogram) Default()
- func (fh *FeatureHistogram) DefaultDriftThreshold(metric catalog.Metric) float64
- func (*FeatureHistogram) Descriptor() ([]byte, []int)
- func (fh FeatureHistogram) DriftAlert(tenantRef *v1.ObjectReference, notifierName *string, columns []string) *infra.Alert
- func (fh *FeatureHistogram) Drifted() bool
- func (fh FeatureHistogram) ErrorAlert(tenantRef *v1.ObjectReference, notifierName *string, err error) *infra.Alert
- func (fh *FeatureHistogram) Expired() bool
- func (fh *FeatureHistogram) GetCond(t string) metav1.Condition
- func (fh FeatureHistogram) GetCondIdx(t string) int
- func (fh *FeatureHistogram) HasFinalizer() bool
- func (fh FeatureHistogram) IsArchived() bool
- func (fh *FeatureHistogram) IsGitObj() bool
- func (fh FeatureHistogram) IsReady() bool
- func (fh FeatureHistogram) Key() string
- func (fh *FeatureHistogram) LabelWithCommit(commit string, uname string, branch string)
- func (fh FeatureHistogram) Live() bool
- func (fh *FeatureHistogram) MarkArchived()
- func (fh *FeatureHistogram) MarkDrift()
- func (fh *FeatureHistogram) MarkExpired()
- func (fh *FeatureHistogram) MarkFailed(msg string)
- func (fh *FeatureHistogram) MarkGenTest()
- func (fh *FeatureHistogram) MarkLive()
- func (fh *FeatureHistogram) MarkReady()
- func (fh *FeatureHistogram) MarkReadyToTest()
- func (fh *FeatureHistogram) MarkUnitTestFailed(msg string, stop bool)
- func (fh *FeatureHistogram) MarkUnitTested()
- func (fh *FeatureHistogram) MarkUnitTesting()
- func (m *FeatureHistogram) Marshal() (dAtA []byte, err error)
- func (m *FeatureHistogram) MarshalTo(dAtA []byte) (int, error)
- func (m *FeatureHistogram) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*FeatureHistogram) ProtoMessage()
- func (fh *FeatureHistogram) RemoveFinalizer()
- func (fh *FeatureHistogram) RepEntry() (string, error)
- func (fh *FeatureHistogram) RepPath(root string) (string, error)
- func (m *FeatureHistogram) Reset()
- func (fh *FeatureHistogram) SetChanged()
- func (fh *FeatureHistogram) SetupWebhookWithManager(mgr ctrl.Manager) error
- func (fh *FeatureHistogram) ShouldDetectDriftForColumn(column string) bool
- func (fh *FeatureHistogram) ShouldExpire(maxPredictions int32) bool
- func (fh *FeatureHistogram) ShouldGenerateUnitTest() bool
- func (fh *FeatureHistogram) ShouldUnitTest() bool
- func (m *FeatureHistogram) Size() (n int)
- func (this *FeatureHistogram) String() string
- func (fh FeatureHistogram) UnitTested() bool
- func (m *FeatureHistogram) Unmarshal(dAtA []byte) error
- func (fh FeatureHistogram) ValidateCreate() error
- func (fh FeatureHistogram) ValidateDelete() error
- func (fh FeatureHistogram) ValidateUpdate(old runtime.Object) error
- func (m *FeatureHistogram) XXX_DiscardUnknown()
- func (m *FeatureHistogram) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *FeatureHistogram) XXX_Merge(src proto.Message)
- func (m *FeatureHistogram) XXX_Size() int
- func (m *FeatureHistogram) XXX_Unmarshal(b []byte) error
- type FeatureHistogramConditionType
- type FeatureHistogramList
- func (in *FeatureHistogramList) DeepCopy() *FeatureHistogramList
- func (in *FeatureHistogramList) DeepCopyInto(out *FeatureHistogramList)
- func (in *FeatureHistogramList) DeepCopyObject() runtime.Object
- func (*FeatureHistogramList) Descriptor() ([]byte, []int)
- func (m *FeatureHistogramList) Marshal() (dAtA []byte, err error)
- func (m *FeatureHistogramList) MarshalTo(dAtA []byte) (int, error)
- func (m *FeatureHistogramList) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*FeatureHistogramList) ProtoMessage()
- func (m *FeatureHistogramList) Reset()
- func (m *FeatureHistogramList) Size() (n int)
- func (this *FeatureHistogramList) String() string
- func (m *FeatureHistogramList) Unmarshal(dAtA []byte) error
- func (m *FeatureHistogramList) XXX_DiscardUnknown()
- func (m *FeatureHistogramList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *FeatureHistogramList) XXX_Merge(src proto.Message)
- func (m *FeatureHistogramList) XXX_Size() int
- func (m *FeatureHistogramList) XXX_Unmarshal(b []byte) error
- type FeatureHistogramPhase
- type FeatureHistogramSpec
- func (in *FeatureHistogramSpec) DeepCopy() *FeatureHistogramSpec
- func (in *FeatureHistogramSpec) DeepCopyInto(out *FeatureHistogramSpec)
- func (*FeatureHistogramSpec) Descriptor() ([]byte, []int)
- func (m *FeatureHistogramSpec) Marshal() (dAtA []byte, err error)
- func (m *FeatureHistogramSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *FeatureHistogramSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*FeatureHistogramSpec) ProtoMessage()
- func (m *FeatureHistogramSpec) Reset()
- func (m *FeatureHistogramSpec) Size() (n int)
- func (this *FeatureHistogramSpec) String() string
- func (m *FeatureHistogramSpec) Unmarshal(dAtA []byte) error
- func (m *FeatureHistogramSpec) XXX_DiscardUnknown()
- func (m *FeatureHistogramSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *FeatureHistogramSpec) XXX_Merge(src proto.Message)
- func (m *FeatureHistogramSpec) XXX_Size() int
- func (m *FeatureHistogramSpec) XXX_Unmarshal(b []byte) error
- type FeatureHistogramStatus
- func (in *FeatureHistogramStatus) DeepCopy() *FeatureHistogramStatus
- func (in *FeatureHistogramStatus) DeepCopyInto(out *FeatureHistogramStatus)
- func (*FeatureHistogramStatus) Descriptor() ([]byte, []int)
- func (m *FeatureHistogramStatus) Marshal() (dAtA []byte, err error)
- func (m *FeatureHistogramStatus) MarshalTo(dAtA []byte) (int, error)
- func (m *FeatureHistogramStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*FeatureHistogramStatus) ProtoMessage()
- func (m *FeatureHistogramStatus) Reset()
- func (m *FeatureHistogramStatus) Size() (n int)
- func (this *FeatureHistogramStatus) String() string
- func (m *FeatureHistogramStatus) Unmarshal(dAtA []byte) error
- func (m *FeatureHistogramStatus) XXX_DiscardUnknown()
- func (m *FeatureHistogramStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *FeatureHistogramStatus) XXX_Merge(src proto.Message)
- func (m *FeatureHistogramStatus) XXX_Size() int
- func (m *FeatureHistogramStatus) XXX_Unmarshal(b []byte) error
- type FlatFileFormatSpec
- func (in *FlatFileFormatSpec) DeepCopy() *FlatFileFormatSpec
- func (in *FlatFileFormatSpec) DeepCopyInto(out *FlatFileFormatSpec)
- func (*FlatFileFormatSpec) Descriptor() ([]byte, []int)
- func (m *FlatFileFormatSpec) Marshal() (dAtA []byte, err error)
- func (m *FlatFileFormatSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *FlatFileFormatSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*FlatFileFormatSpec) ProtoMessage()
- func (m *FlatFileFormatSpec) Reset()
- func (m *FlatFileFormatSpec) Size() (n int)
- func (this *FlatFileFormatSpec) String() string
- func (m *FlatFileFormatSpec) Unmarshal(dAtA []byte) error
- func (m *FlatFileFormatSpec) XXX_DiscardUnknown()
- func (m *FlatFileFormatSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *FlatFileFormatSpec) XXX_Merge(src proto.Message)
- func (m *FlatFileFormatSpec) XXX_Size() int
- func (m *FlatFileFormatSpec) XXX_Unmarshal(b []byte) error
- type FlatFileType
- type GaugeSpec
- func (in *GaugeSpec) DeepCopy() *GaugeSpec
- func (in *GaugeSpec) DeepCopyInto(out *GaugeSpec)
- func (*GaugeSpec) Descriptor() ([]byte, []int)
- func (m *GaugeSpec) Marshal() (dAtA []byte, err error)
- func (m *GaugeSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *GaugeSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*GaugeSpec) ProtoMessage()
- func (m *GaugeSpec) Reset()
- func (m *GaugeSpec) Size() (n int)
- func (this *GaugeSpec) String() string
- func (m *GaugeSpec) Unmarshal(dAtA []byte) error
- func (m *GaugeSpec) XXX_DiscardUnknown()
- func (m *GaugeSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *GaugeSpec) XXX_Merge(src proto.Message)
- func (m *GaugeSpec) XXX_Size() int
- func (m *GaugeSpec) XXX_Unmarshal(b []byte) error
- type GitLocation
- func (in *GitLocation) DeepCopy() *GitLocation
- func (in *GitLocation) DeepCopyInto(out *GitLocation)
- func (*GitLocation) Descriptor() ([]byte, []int)
- func (m *GitLocation) Marshal() (dAtA []byte, err error)
- func (m *GitLocation) MarshalTo(dAtA []byte) (int, error)
- func (m *GitLocation) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*GitLocation) ProtoMessage()
- func (m *GitLocation) Reset()
- func (m *GitLocation) Size() (n int)
- func (this *GitLocation) String() string
- func (m *GitLocation) Unmarshal(dAtA []byte) error
- func (m *GitLocation) XXX_DiscardUnknown()
- func (m *GitLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *GitLocation) XXX_Merge(src proto.Message)
- func (m *GitLocation) XXX_Size() int
- func (m *GitLocation) XXX_Unmarshal(b []byte) error
- type GovernanceReviewStatus
- func (in *GovernanceReviewStatus) DeepCopy() *GovernanceReviewStatus
- func (in *GovernanceReviewStatus) DeepCopyInto(out *GovernanceReviewStatus)
- func (*GovernanceReviewStatus) Descriptor() ([]byte, []int)
- func (m *GovernanceReviewStatus) Marshal() (dAtA []byte, err error)
- func (m *GovernanceReviewStatus) MarshalTo(dAtA []byte) (int, error)
- func (m *GovernanceReviewStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*GovernanceReviewStatus) ProtoMessage()
- func (m *GovernanceReviewStatus) Reset()
- func (m *GovernanceReviewStatus) Size() (n int)
- func (this *GovernanceReviewStatus) String() string
- func (m *GovernanceReviewStatus) Unmarshal(dAtA []byte) error
- func (m *GovernanceReviewStatus) XXX_DiscardUnknown()
- func (m *GovernanceReviewStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *GovernanceReviewStatus) XXX_Merge(src proto.Message)
- func (m *GovernanceReviewStatus) XXX_Size() int
- func (m *GovernanceReviewStatus) XXX_Unmarshal(b []byte) error
- type GovernanceSpec
- func (in *GovernanceSpec) DeepCopy() *GovernanceSpec
- func (in *GovernanceSpec) DeepCopyInto(out *GovernanceSpec)
- func (*GovernanceSpec) Descriptor() ([]byte, []int)
- func (m *GovernanceSpec) Marshal() (dAtA []byte, err error)
- func (m *GovernanceSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *GovernanceSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*GovernanceSpec) ProtoMessage()
- func (m *GovernanceSpec) Reset()
- func (m *GovernanceSpec) Size() (n int)
- func (this *GovernanceSpec) String() string
- func (m *GovernanceSpec) Unmarshal(dAtA []byte) error
- func (m *GovernanceSpec) XXX_DiscardUnknown()
- func (m *GovernanceSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *GovernanceSpec) XXX_Merge(src proto.Message)
- func (m *GovernanceSpec) XXX_Size() int
- func (m *GovernanceSpec) XXX_Unmarshal(b []byte) error
- type GovernanceStatus
- func (in *GovernanceStatus) DeepCopy() *GovernanceStatus
- func (in *GovernanceStatus) DeepCopyInto(out *GovernanceStatus)
- func (*GovernanceStatus) Descriptor() ([]byte, []int)
- func (m *GovernanceStatus) Marshal() (dAtA []byte, err error)
- func (m *GovernanceStatus) MarshalTo(dAtA []byte) (int, error)
- func (m *GovernanceStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*GovernanceStatus) ProtoMessage()
- func (m *GovernanceStatus) Reset()
- func (m *GovernanceStatus) Size() (n int)
- func (this *GovernanceStatus) String() string
- func (m *GovernanceStatus) Unmarshal(dAtA []byte) error
- func (m *GovernanceStatus) XXX_DiscardUnknown()
- func (m *GovernanceStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *GovernanceStatus) XXX_Merge(src proto.Message)
- func (m *GovernanceStatus) XXX_Size() int
- func (m *GovernanceStatus) XXX_Unmarshal(b []byte) error
- type GroupBySpec
- func (in *GroupBySpec) DeepCopy() *GroupBySpec
- func (in *GroupBySpec) DeepCopyInto(out *GroupBySpec)
- func (*GroupBySpec) Descriptor() ([]byte, []int)
- func (m *GroupBySpec) Marshal() (dAtA []byte, err error)
- func (m *GroupBySpec) MarshalTo(dAtA []byte) (int, error)
- func (m *GroupBySpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*GroupBySpec) ProtoMessage()
- func (m *GroupBySpec) Reset()
- func (m *GroupBySpec) Size() (n int)
- func (this *GroupBySpec) String() string
- func (m *GroupBySpec) Unmarshal(dAtA []byte) error
- func (gb GroupBySpec) Validate(field string) ([]metav1.StatusCause, bool)
- func (m *GroupBySpec) XXX_DiscardUnknown()
- func (m *GroupBySpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *GroupBySpec) XXX_Merge(src proto.Message)
- func (m *GroupBySpec) XXX_Size() int
- func (m *GroupBySpec) XXX_Unmarshal(b []byte) error
- type GroupDatasetLocationsSpec
- func (in *GroupDatasetLocationsSpec) DeepCopy() *GroupDatasetLocationsSpec
- func (in *GroupDatasetLocationsSpec) DeepCopyInto(out *GroupDatasetLocationsSpec)
- func (*GroupDatasetLocationsSpec) Descriptor() ([]byte, []int)
- func (m *GroupDatasetLocationsSpec) Marshal() (dAtA []byte, err error)
- func (m *GroupDatasetLocationsSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *GroupDatasetLocationsSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*GroupDatasetLocationsSpec) ProtoMessage()
- func (m *GroupDatasetLocationsSpec) Reset()
- func (m *GroupDatasetLocationsSpec) Size() (n int)
- func (this *GroupDatasetLocationsSpec) String() string
- func (m *GroupDatasetLocationsSpec) Unmarshal(dAtA []byte) error
- func (m *GroupDatasetLocationsSpec) XXX_DiscardUnknown()
- func (m *GroupDatasetLocationsSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *GroupDatasetLocationsSpec) XXX_Merge(src proto.Message)
- func (m *GroupDatasetLocationsSpec) XXX_Size() int
- func (m *GroupDatasetLocationsSpec) XXX_Unmarshal(b []byte) error
- type HistogramSpec
- func (in *HistogramSpec) DeepCopy() *HistogramSpec
- func (in *HistogramSpec) DeepCopyInto(out *HistogramSpec)
- func (*HistogramSpec) Descriptor() ([]byte, []int)
- func (m *HistogramSpec) Marshal() (dAtA []byte, err error)
- func (m *HistogramSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *HistogramSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*HistogramSpec) ProtoMessage()
- func (m *HistogramSpec) Reset()
- func (m *HistogramSpec) Size() (n int)
- func (this *HistogramSpec) String() string
- func (m *HistogramSpec) Unmarshal(dAtA []byte) error
- func (m *HistogramSpec) XXX_DiscardUnknown()
- func (m *HistogramSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *HistogramSpec) XXX_Merge(src proto.Message)
- func (m *HistogramSpec) XXX_Size() int
- func (m *HistogramSpec) XXX_Unmarshal(b []byte) error
- type ImageLocation
- func (in *ImageLocation) DeepCopy() *ImageLocation
- func (in *ImageLocation) DeepCopyInto(out *ImageLocation)
- func (*ImageLocation) Descriptor() ([]byte, []int)
- func (m *ImageLocation) Marshal() (dAtA []byte, err error)
- func (m *ImageLocation) MarshalTo(dAtA []byte) (int, error)
- func (m *ImageLocation) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*ImageLocation) ProtoMessage()
- func (m *ImageLocation) Reset()
- func (m *ImageLocation) Size() (n int)
- func (this *ImageLocation) String() string
- func (m *ImageLocation) Unmarshal(dAtA []byte) error
- func (m *ImageLocation) XXX_DiscardUnknown()
- func (m *ImageLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *ImageLocation) XXX_Merge(src proto.Message)
- func (m *ImageLocation) XXX_Size() int
- func (m *ImageLocation) XXX_Unmarshal(b []byte) error
- type KPI
- func (in *KPI) DeepCopy() *KPI
- func (in *KPI) DeepCopyInto(out *KPI)
- func (*KPI) Descriptor() ([]byte, []int)
- func (m *KPI) Marshal() (dAtA []byte, err error)
- func (m *KPI) MarshalTo(dAtA []byte) (int, error)
- func (m *KPI) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*KPI) ProtoMessage()
- func (m *KPI) Reset()
- func (m *KPI) Size() (n int)
- func (this *KPI) String() string
- func (m *KPI) Unmarshal(dAtA []byte) error
- func (m *KPI) XXX_DiscardUnknown()
- func (m *KPI) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *KPI) XXX_Merge(src proto.Message)
- func (m *KPI) XXX_Size() int
- func (m *KPI) XXX_Unmarshal(b []byte) error
- type LabelingRule
- func (in *LabelingRule) DeepCopy() *LabelingRule
- func (in *LabelingRule) DeepCopyInto(out *LabelingRule)
- func (*LabelingRule) Descriptor() ([]byte, []int)
- func (m *LabelingRule) Marshal() (dAtA []byte, err error)
- func (m *LabelingRule) MarshalTo(dAtA []byte) (int, error)
- func (m *LabelingRule) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*LabelingRule) ProtoMessage()
- func (m *LabelingRule) Reset()
- func (m *LabelingRule) Size() (n int)
- func (this *LabelingRule) String() string
- func (m *LabelingRule) Unmarshal(dAtA []byte) error
- func (m *LabelingRule) XXX_DiscardUnknown()
- func (m *LabelingRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *LabelingRule) XXX_Merge(src proto.Message)
- func (m *LabelingRule) XXX_Size() int
- func (m *LabelingRule) XXX_Unmarshal(b []byte) error
- type LabelingSpec
- func (in *LabelingSpec) DeepCopy() *LabelingSpec
- func (in *LabelingSpec) DeepCopyInto(out *LabelingSpec)
- func (*LabelingSpec) Descriptor() ([]byte, []int)
- func (m *LabelingSpec) Marshal() (dAtA []byte, err error)
- func (m *LabelingSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *LabelingSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*LabelingSpec) ProtoMessage()
- func (m *LabelingSpec) Reset()
- func (m *LabelingSpec) Size() (n int)
- func (this *LabelingSpec) String() string
- func (m *LabelingSpec) Unmarshal(dAtA []byte) error
- func (m *LabelingSpec) XXX_DiscardUnknown()
- func (m *LabelingSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *LabelingSpec) XXX_Merge(src proto.Message)
- func (m *LabelingSpec) XXX_Size() int
- func (m *LabelingSpec) XXX_Unmarshal(b []byte) error
- type LineChartSpec
- func (in *LineChartSpec) DeepCopy() *LineChartSpec
- func (in *LineChartSpec) DeepCopyInto(out *LineChartSpec)
- func (*LineChartSpec) Descriptor() ([]byte, []int)
- func (m *LineChartSpec) Marshal() (dAtA []byte, err error)
- func (m *LineChartSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *LineChartSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*LineChartSpec) ProtoMessage()
- func (m *LineChartSpec) Reset()
- func (m *LineChartSpec) Size() (n int)
- func (this *LineChartSpec) String() string
- func (m *LineChartSpec) Unmarshal(dAtA []byte) error
- func (m *LineChartSpec) XXX_DiscardUnknown()
- func (m *LineChartSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *LineChartSpec) XXX_Merge(src proto.Message)
- func (m *LineChartSpec) XXX_Size() int
- func (m *LineChartSpec) XXX_Unmarshal(b []byte) error
- type MaterializationSpec
- func (in *MaterializationSpec) DeepCopy() *MaterializationSpec
- func (in *MaterializationSpec) DeepCopyInto(out *MaterializationSpec)
- func (*MaterializationSpec) Descriptor() ([]byte, []int)
- func (m *MaterializationSpec) Marshal() (dAtA []byte, err error)
- func (m *MaterializationSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *MaterializationSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*MaterializationSpec) ProtoMessage()
- func (m *MaterializationSpec) Reset()
- func (m *MaterializationSpec) Size() (n int)
- func (this *MaterializationSpec) String() string
- func (m *MaterializationSpec) Unmarshal(dAtA []byte) error
- func (m *MaterializationSpec) XXX_DiscardUnknown()
- func (m *MaterializationSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *MaterializationSpec) XXX_Merge(src proto.Message)
- func (m *MaterializationSpec) XXX_Size() int
- func (m *MaterializationSpec) XXX_Unmarshal(b []byte) error
- type MetricSpec
- func (in *MetricSpec) DeepCopy() *MetricSpec
- func (in *MetricSpec) DeepCopyInto(out *MetricSpec)
- func (*MetricSpec) Descriptor() ([]byte, []int)
- func (m *MetricSpec) Marshal() (dAtA []byte, err error)
- func (m *MetricSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *MetricSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*MetricSpec) ProtoMessage()
- func (m *MetricSpec) Reset()
- func (m *MetricSpec) Size() (n int)
- func (this *MetricSpec) String() string
- func (m *MetricSpec) Unmarshal(dAtA []byte) error
- func (m *MetricSpec) XXX_DiscardUnknown()
- func (m *MetricSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *MetricSpec) XXX_Merge(src proto.Message)
- func (m *MetricSpec) XXX_Size() int
- func (m *MetricSpec) XXX_Unmarshal(b []byte) error
- type OutlierStat
- func (in *OutlierStat) DeepCopy() *OutlierStat
- func (in *OutlierStat) DeepCopyInto(out *OutlierStat)
- func (*OutlierStat) Descriptor() ([]byte, []int)
- func (m *OutlierStat) Marshal() (dAtA []byte, err error)
- func (m *OutlierStat) MarshalTo(dAtA []byte) (int, error)
- func (m *OutlierStat) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*OutlierStat) ProtoMessage()
- func (m *OutlierStat) Reset()
- func (m *OutlierStat) Size() (n int)
- func (this *OutlierStat) String() string
- func (m *OutlierStat) Unmarshal(dAtA []byte) error
- func (m *OutlierStat) XXX_DiscardUnknown()
- func (m *OutlierStat) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *OutlierStat) XXX_Merge(src proto.Message)
- func (m *OutlierStat) XXX_Size() int
- func (m *OutlierStat) XXX_Unmarshal(b []byte) error
- type PageSpec
- func (in *PageSpec) DeepCopy() *PageSpec
- func (in *PageSpec) DeepCopyInto(out *PageSpec)
- func (*PageSpec) Descriptor() ([]byte, []int)
- func (m *PageSpec) Marshal() (dAtA []byte, err error)
- func (m *PageSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *PageSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*PageSpec) ProtoMessage()
- func (m *PageSpec) Reset()
- func (m *PageSpec) Size() (n int)
- func (this *PageSpec) String() string
- func (m *PageSpec) Unmarshal(dAtA []byte) error
- func (m *PageSpec) XXX_DiscardUnknown()
- func (m *PageSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *PageSpec) XXX_Merge(src proto.Message)
- func (m *PageSpec) XXX_Size() int
- func (m *PageSpec) XXX_Unmarshal(b []byte) error
- type ParquetFileSpec
- func (in *ParquetFileSpec) DeepCopy() *ParquetFileSpec
- func (in *ParquetFileSpec) DeepCopyInto(out *ParquetFileSpec)
- func (*ParquetFileSpec) Descriptor() ([]byte, []int)
- func (m *ParquetFileSpec) Marshal() (dAtA []byte, err error)
- func (m *ParquetFileSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *ParquetFileSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*ParquetFileSpec) ProtoMessage()
- func (m *ParquetFileSpec) Reset()
- func (m *ParquetFileSpec) Size() (n int)
- func (this *ParquetFileSpec) String() string
- func (m *ParquetFileSpec) Unmarshal(dAtA []byte) error
- func (m *ParquetFileSpec) XXX_DiscardUnknown()
- func (m *ParquetFileSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *ParquetFileSpec) XXX_Merge(src proto.Message)
- func (m *ParquetFileSpec) XXX_Size() int
- func (m *ParquetFileSpec) XXX_Unmarshal(b []byte) error
- type QuoteChar
- type Recipe
- func (recipe *Recipe) AddFinalizer()
- func (recipe *Recipe) CreateOrUpdateCond(cond metav1.Condition)
- func (in *Recipe) DeepCopy() *Recipe
- func (in *Recipe) DeepCopyInto(out *Recipe)
- func (in *Recipe) DeepCopyObject() runtime.Object
- func (recipe *Recipe) Default()
- func (recipe Recipe) Deleted() bool
- func (*Recipe) Descriptor() ([]byte, []int)
- func (recipe Recipe) GetCond(t string) metav1.Condition
- func (recipe *Recipe) GetCondIdx(t string) int
- func (recipe Recipe) HasFinalizer() bool
- func (recipe Recipe) IsInCond(ct string) bool
- func (recipe Recipe) IsReady() bool
- func (recipe Recipe) IsSaved() bool
- func (recipe *Recipe) MarkReady()
- func (recipe *Recipe) MarkSaved()
- func (m *Recipe) Marshal() (dAtA []byte, err error)
- func (m *Recipe) MarshalTo(dAtA []byte) (int, error)
- func (m *Recipe) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (recipe *Recipe) Populate(name string)
- func (recipe Recipe) PrintConditions()
- func (*Recipe) ProtoMessage()
- func (recipe *Recipe) RemoveFinalizer()
- func (m *Recipe) Reset()
- func (recipe *Recipe) SetupWebhookWithManager(mgr ctrl.Manager) error
- func (m *Recipe) Size() (n int)
- func (this *Recipe) String() string
- func (m *Recipe) Unmarshal(dAtA []byte) error
- func (recipe *Recipe) UpdateRunStatus(run RecipeRun)
- func (recipe Recipe) ValidateCreate() error
- func (recipe Recipe) ValidateDelete() error
- func (recipe Recipe) ValidateUpdate(old runtime.Object) error
- func (m *Recipe) XXX_DiscardUnknown()
- func (m *Recipe) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *Recipe) XXX_Merge(src proto.Message)
- func (m *Recipe) XXX_Size() int
- func (m *Recipe) XXX_Unmarshal(b []byte) error
- type RecipeConditionType
- type RecipeInputSpec
- func (in *RecipeInputSpec) DeepCopy() *RecipeInputSpec
- func (in *RecipeInputSpec) DeepCopyInto(out *RecipeInputSpec)
- func (*RecipeInputSpec) Descriptor() ([]byte, []int)
- func (m *RecipeInputSpec) Marshal() (dAtA []byte, err error)
- func (m *RecipeInputSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *RecipeInputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*RecipeInputSpec) ProtoMessage()
- func (m *RecipeInputSpec) Reset()
- func (m *RecipeInputSpec) Size() (n int)
- func (this *RecipeInputSpec) String() string
- func (m *RecipeInputSpec) Unmarshal(dAtA []byte) error
- func (m *RecipeInputSpec) XXX_DiscardUnknown()
- func (m *RecipeInputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RecipeInputSpec) XXX_Merge(src proto.Message)
- func (m *RecipeInputSpec) XXX_Size() int
- func (m *RecipeInputSpec) XXX_Unmarshal(b []byte) error
- type RecipeList
- func (in *RecipeList) DeepCopy() *RecipeList
- func (in *RecipeList) DeepCopyInto(out *RecipeList)
- func (in *RecipeList) DeepCopyObject() runtime.Object
- func (*RecipeList) Descriptor() ([]byte, []int)
- func (m *RecipeList) Marshal() (dAtA []byte, err error)
- func (m *RecipeList) MarshalTo(dAtA []byte) (int, error)
- func (m *RecipeList) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*RecipeList) ProtoMessage()
- func (m *RecipeList) Reset()
- func (m *RecipeList) Size() (n int)
- func (this *RecipeList) String() string
- func (m *RecipeList) Unmarshal(dAtA []byte) error
- func (m *RecipeList) XXX_DiscardUnknown()
- func (m *RecipeList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RecipeList) XXX_Merge(src proto.Message)
- func (m *RecipeList) XXX_Size() int
- func (m *RecipeList) XXX_Unmarshal(b []byte) error
- type RecipeOutputSpec
- func (in *RecipeOutputSpec) DeepCopy() *RecipeOutputSpec
- func (in *RecipeOutputSpec) DeepCopyInto(out *RecipeOutputSpec)
- func (*RecipeOutputSpec) Descriptor() ([]byte, []int)
- func (m *RecipeOutputSpec) Marshal() (dAtA []byte, err error)
- func (m *RecipeOutputSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *RecipeOutputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*RecipeOutputSpec) ProtoMessage()
- func (m *RecipeOutputSpec) Reset()
- func (m *RecipeOutputSpec) Size() (n int)
- func (this *RecipeOutputSpec) String() string
- func (m *RecipeOutputSpec) Unmarshal(dAtA []byte) error
- func (m *RecipeOutputSpec) XXX_DiscardUnknown()
- func (m *RecipeOutputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RecipeOutputSpec) XXX_Merge(src proto.Message)
- func (m *RecipeOutputSpec) XXX_Size() int
- func (m *RecipeOutputSpec) XXX_Unmarshal(b []byte) error
- type RecipePartSpec
- func (in *RecipePartSpec) DeepCopy() *RecipePartSpec
- func (in *RecipePartSpec) DeepCopyInto(out *RecipePartSpec)
- func (*RecipePartSpec) Descriptor() ([]byte, []int)
- func (m *RecipePartSpec) Marshal() (dAtA []byte, err error)
- func (m *RecipePartSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *RecipePartSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*RecipePartSpec) ProtoMessage()
- func (m *RecipePartSpec) Reset()
- func (m *RecipePartSpec) Size() (n int)
- func (this *RecipePartSpec) String() string
- func (m *RecipePartSpec) Unmarshal(dAtA []byte) error
- func (m *RecipePartSpec) XXX_DiscardUnknown()
- func (m *RecipePartSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RecipePartSpec) XXX_Merge(src proto.Message)
- func (m *RecipePartSpec) XXX_Size() int
- func (m *RecipePartSpec) XXX_Unmarshal(b []byte) error
- type RecipeRun
- func (reciperun *RecipeRun) AddFinalizer()
- func (reciperun RecipeRun) CompletionAlert(tenantRef *v1.ObjectReference, notifierName *string) *infra.Alert
- func (reciperun *RecipeRun) CreateOrUpdateCond(cond metav1.Condition)
- func (in *RecipeRun) DeepCopy() *RecipeRun
- func (in *RecipeRun) DeepCopyInto(out *RecipeRun)
- func (in *RecipeRun) DeepCopyObject() runtime.Object
- func (reciperun *RecipeRun) Default()
- func (reciperun *RecipeRun) Deleted() bool
- func (*RecipeRun) Descriptor() ([]byte, []int)
- func (reciperun RecipeRun) ErrorAlert(tenantRef *v1.ObjectReference, notifierName *string, err error) *infra.Alert
- func (reciperun RecipeRun) GetCond(t string) metav1.Condition
- func (reciperun RecipeRun) GetCondIdx(t string) int
- func (reciperun RecipeRun) HasFinalizer() bool
- func (reciperun *RecipeRun) IsFailed() bool
- func (reciperun RecipeRun) IsInCond(ct string) bool
- func (reciperun RecipeRun) IsReady() bool
- func (reciperun *RecipeRun) IsRunning() bool
- func (reciperun *RecipeRun) IsSaved() bool
- func (reciperun RecipeRun) ManifestURI() string
- func (reciperun *RecipeRun) MarkCompleted()
- func (reciperun *RecipeRun) MarkFailed(error string)
- func (reciperun *RecipeRun) MarkRunning()
- func (reciperun *RecipeRun) MarkSaved()
- func (m *RecipeRun) Marshal() (dAtA []byte, err error)
- func (m *RecipeRun) MarshalTo(dAtA []byte) (int, error)
- func (m *RecipeRun) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (reciperun RecipeRun) PrintConditions()
- func (*RecipeRun) ProtoMessage()
- func (reciperun *RecipeRun) RemoveFinalizer()
- func (reciperun RecipeRun) ReportName() string
- func (m *RecipeRun) Reset()
- func (reciperun RecipeRun) RootURI() string
- func (reciperun RecipeRun) RunStatus() *catalog.LastRunStatus
- func (reciperun *RecipeRun) SetupWebhookWithManager(mgr ctrl.Manager) error
- func (m *RecipeRun) Size() (n int)
- func (reciperun RecipeRun) StatusString() string
- func (this *RecipeRun) String() string
- func (m *RecipeRun) Unmarshal(dAtA []byte) error
- func (reciperun *RecipeRun) ValidateCreate() error
- func (reciperun *RecipeRun) ValidateDelete() error
- func (reciperun *RecipeRun) ValidateUpdate(old runtime.Object) error
- func (m *RecipeRun) XXX_DiscardUnknown()
- func (m *RecipeRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RecipeRun) XXX_Merge(src proto.Message)
- func (m *RecipeRun) XXX_Size() int
- func (m *RecipeRun) XXX_Unmarshal(b []byte) error
- type RecipeRunList
- func (in *RecipeRunList) DeepCopy() *RecipeRunList
- func (in *RecipeRunList) DeepCopyInto(out *RecipeRunList)
- func (in *RecipeRunList) DeepCopyObject() runtime.Object
- func (*RecipeRunList) Descriptor() ([]byte, []int)
- func (m *RecipeRunList) Marshal() (dAtA []byte, err error)
- func (m *RecipeRunList) MarshalTo(dAtA []byte) (int, error)
- func (m *RecipeRunList) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*RecipeRunList) ProtoMessage()
- func (m *RecipeRunList) Reset()
- func (m *RecipeRunList) Size() (n int)
- func (this *RecipeRunList) String() string
- func (m *RecipeRunList) Unmarshal(dAtA []byte) error
- func (m *RecipeRunList) XXX_DiscardUnknown()
- func (m *RecipeRunList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RecipeRunList) XXX_Merge(src proto.Message)
- func (m *RecipeRunList) XXX_Size() int
- func (m *RecipeRunList) XXX_Unmarshal(b []byte) error
- type RecipeRunPhase
- type RecipeRunSpec
- func (in *RecipeRunSpec) DeepCopy() *RecipeRunSpec
- func (in *RecipeRunSpec) DeepCopyInto(out *RecipeRunSpec)
- func (*RecipeRunSpec) Descriptor() ([]byte, []int)
- func (m *RecipeRunSpec) Marshal() (dAtA []byte, err error)
- func (m *RecipeRunSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *RecipeRunSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*RecipeRunSpec) ProtoMessage()
- func (m *RecipeRunSpec) Reset()
- func (m *RecipeRunSpec) Size() (n int)
- func (this *RecipeRunSpec) String() string
- func (m *RecipeRunSpec) Unmarshal(dAtA []byte) error
- func (m *RecipeRunSpec) XXX_DiscardUnknown()
- func (m *RecipeRunSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RecipeRunSpec) XXX_Merge(src proto.Message)
- func (m *RecipeRunSpec) XXX_Size() int
- func (m *RecipeRunSpec) XXX_Unmarshal(b []byte) error
- type RecipeRunStatus
- func (in *RecipeRunStatus) DeepCopy() *RecipeRunStatus
- func (in *RecipeRunStatus) DeepCopyInto(out *RecipeRunStatus)
- func (*RecipeRunStatus) Descriptor() ([]byte, []int)
- func (m *RecipeRunStatus) Marshal() (dAtA []byte, err error)
- func (m *RecipeRunStatus) MarshalTo(dAtA []byte) (int, error)
- func (m *RecipeRunStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*RecipeRunStatus) ProtoMessage()
- func (m *RecipeRunStatus) Reset()
- func (m *RecipeRunStatus) Size() (n int)
- func (this *RecipeRunStatus) String() string
- func (m *RecipeRunStatus) Unmarshal(dAtA []byte) error
- func (m *RecipeRunStatus) XXX_DiscardUnknown()
- func (m *RecipeRunStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RecipeRunStatus) XXX_Merge(src proto.Message)
- func (m *RecipeRunStatus) XXX_Size() int
- func (m *RecipeRunStatus) XXX_Unmarshal(b []byte) error
- type RecipeSpec
- func (in *RecipeSpec) DeepCopy() *RecipeSpec
- func (in *RecipeSpec) DeepCopyInto(out *RecipeSpec)
- func (*RecipeSpec) Descriptor() ([]byte, []int)
- func (m *RecipeSpec) Marshal() (dAtA []byte, err error)
- func (m *RecipeSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *RecipeSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*RecipeSpec) ProtoMessage()
- func (m *RecipeSpec) Reset()
- func (m *RecipeSpec) Size() (n int)
- func (this *RecipeSpec) String() string
- func (m *RecipeSpec) Unmarshal(dAtA []byte) error
- func (m *RecipeSpec) XXX_DiscardUnknown()
- func (m *RecipeSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RecipeSpec) XXX_Merge(src proto.Message)
- func (m *RecipeSpec) XXX_Size() int
- func (m *RecipeSpec) XXX_Unmarshal(b []byte) error
- type RecipeStatus
- func (in *RecipeStatus) DeepCopy() *RecipeStatus
- func (in *RecipeStatus) DeepCopyInto(out *RecipeStatus)
- func (*RecipeStatus) Descriptor() ([]byte, []int)
- func (m *RecipeStatus) Marshal() (dAtA []byte, err error)
- func (m *RecipeStatus) MarshalTo(dAtA []byte) (int, error)
- func (m *RecipeStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*RecipeStatus) ProtoMessage()
- func (m *RecipeStatus) Reset()
- func (m *RecipeStatus) Size() (n int)
- func (this *RecipeStatus) String() string
- func (m *RecipeStatus) Unmarshal(dAtA []byte) error
- func (m *RecipeStatus) XXX_DiscardUnknown()
- func (m *RecipeStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RecipeStatus) XXX_Merge(src proto.Message)
- func (m *RecipeStatus) XXX_Size() int
- func (m *RecipeStatus) XXX_Unmarshal(b []byte) error
- type RecipeStep
- func (in *RecipeStep) DeepCopy() *RecipeStep
- func (in *RecipeStep) DeepCopyInto(out *RecipeStep)
- func (*RecipeStep) Descriptor() ([]byte, []int)
- func (m *RecipeStep) Marshal() (dAtA []byte, err error)
- func (m *RecipeStep) MarshalTo(dAtA []byte) (int, error)
- func (m *RecipeStep) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*RecipeStep) ProtoMessage()
- func (m *RecipeStep) Reset()
- func (m *RecipeStep) Size() (n int)
- func (this *RecipeStep) String() string
- func (m *RecipeStep) Unmarshal(dAtA []byte) error
- func (m *RecipeStep) XXX_DiscardUnknown()
- func (m *RecipeStep) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RecipeStep) XXX_Merge(src proto.Message)
- func (m *RecipeStep) XXX_Size() int
- func (m *RecipeStep) XXX_Unmarshal(b []byte) error
- type RecipeStepOperation
- type RecipeStepParam
- func (in *RecipeStepParam) DeepCopy() *RecipeStepParam
- func (in *RecipeStepParam) DeepCopyInto(out *RecipeStepParam)
- func (*RecipeStepParam) Descriptor() ([]byte, []int)
- func (m *RecipeStepParam) Marshal() (dAtA []byte, err error)
- func (m *RecipeStepParam) MarshalTo(dAtA []byte) (int, error)
- func (m *RecipeStepParam) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*RecipeStepParam) ProtoMessage()
- func (m *RecipeStepParam) Reset()
- func (m *RecipeStepParam) Size() (n int)
- func (this *RecipeStepParam) String() string
- func (m *RecipeStepParam) Unmarshal(dAtA []byte) error
- func (m *RecipeStepParam) XXX_DiscardUnknown()
- func (m *RecipeStepParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RecipeStepParam) XXX_Merge(src proto.Message)
- func (m *RecipeStepParam) XXX_Size() int
- func (m *RecipeStepParam) XXX_Unmarshal(b []byte) error
- type RecommendationSchema
- func (in *RecommendationSchema) DeepCopy() *RecommendationSchema
- func (in *RecommendationSchema) DeepCopyInto(out *RecommendationSchema)
- func (*RecommendationSchema) Descriptor() ([]byte, []int)
- func (m *RecommendationSchema) Marshal() (dAtA []byte, err error)
- func (m *RecommendationSchema) MarshalTo(dAtA []byte) (int, error)
- func (m *RecommendationSchema) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*RecommendationSchema) ProtoMessage()
- func (m *RecommendationSchema) Reset()
- func (m *RecommendationSchema) Size() (n int)
- func (this *RecommendationSchema) String() string
- func (m *RecommendationSchema) Unmarshal(dAtA []byte) error
- func (m *RecommendationSchema) XXX_DiscardUnknown()
- func (m *RecommendationSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RecommendationSchema) XXX_Merge(src proto.Message)
- func (m *RecommendationSchema) XXX_Size() int
- func (m *RecommendationSchema) XXX_Unmarshal(b []byte) error
- type RelationshipSpec
- func (in *RelationshipSpec) DeepCopy() *RelationshipSpec
- func (in *RelationshipSpec) DeepCopyInto(out *RelationshipSpec)
- func (*RelationshipSpec) Descriptor() ([]byte, []int)
- func (m *RelationshipSpec) Marshal() (dAtA []byte, err error)
- func (m *RelationshipSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *RelationshipSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*RelationshipSpec) ProtoMessage()
- func (m *RelationshipSpec) Reset()
- func (m *RelationshipSpec) Size() (n int)
- func (this *RelationshipSpec) String() string
- func (m *RelationshipSpec) Unmarshal(dAtA []byte) error
- func (m *RelationshipSpec) XXX_DiscardUnknown()
- func (m *RelationshipSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RelationshipSpec) XXX_Merge(src proto.Message)
- func (m *RelationshipSpec) XXX_Size() int
- func (m *RelationshipSpec) XXX_Unmarshal(b []byte) error
- type RowSpec
- func (in *RowSpec) DeepCopy() *RowSpec
- func (in *RowSpec) DeepCopyInto(out *RowSpec)
- func (*RowSpec) Descriptor() ([]byte, []int)
- func (m *RowSpec) Marshal() (dAtA []byte, err error)
- func (m *RowSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *RowSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*RowSpec) ProtoMessage()
- func (m *RowSpec) Reset()
- func (m *RowSpec) Size() (n int)
- func (this *RowSpec) String() string
- func (m *RowSpec) Unmarshal(dAtA []byte) error
- func (m *RowSpec) XXX_DiscardUnknown()
- func (m *RowSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *RowSpec) XXX_Merge(src proto.Message)
- func (m *RowSpec) XXX_Size() int
- func (m *RowSpec) XXX_Unmarshal(b []byte) error
- type SampleSpec
- func (in *SampleSpec) DeepCopy() *SampleSpec
- func (in *SampleSpec) DeepCopyInto(out *SampleSpec)
- func (*SampleSpec) Descriptor() ([]byte, []int)
- func (m *SampleSpec) Marshal() (dAtA []byte, err error)
- func (m *SampleSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *SampleSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*SampleSpec) ProtoMessage()
- func (m *SampleSpec) Reset()
- func (m *SampleSpec) Size() (n int)
- func (this *SampleSpec) String() string
- func (m *SampleSpec) Unmarshal(dAtA []byte) error
- func (m *SampleSpec) XXX_DiscardUnknown()
- func (m *SampleSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *SampleSpec) XXX_Merge(src proto.Message)
- func (m *SampleSpec) XXX_Size() int
- func (m *SampleSpec) XXX_Unmarshal(b []byte) error
- type ScatterPlotSpec
- func (in *ScatterPlotSpec) DeepCopy() *ScatterPlotSpec
- func (in *ScatterPlotSpec) DeepCopyInto(out *ScatterPlotSpec)
- func (*ScatterPlotSpec) Descriptor() ([]byte, []int)
- func (m *ScatterPlotSpec) Marshal() (dAtA []byte, err error)
- func (m *ScatterPlotSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *ScatterPlotSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*ScatterPlotSpec) ProtoMessage()
- func (m *ScatterPlotSpec) Reset()
- func (m *ScatterPlotSpec) Size() (n int)
- func (this *ScatterPlotSpec) String() string
- func (m *ScatterPlotSpec) Unmarshal(dAtA []byte) error
- func (m *ScatterPlotSpec) XXX_DiscardUnknown()
- func (m *ScatterPlotSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *ScatterPlotSpec) XXX_Merge(src proto.Message)
- func (m *ScatterPlotSpec) XXX_Size() int
- func (m *ScatterPlotSpec) XXX_Unmarshal(b []byte) error
- type Schema
- func (in *Schema) DeepCopy() *Schema
- func (in *Schema) DeepCopyInto(out *Schema)
- func (*Schema) Descriptor() ([]byte, []int)
- func (m *Schema) Marshal() (dAtA []byte, err error)
- func (m *Schema) MarshalTo(dAtA []byte) (int, error)
- func (m *Schema) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*Schema) ProtoMessage()
- func (m *Schema) Reset()
- func (m *Schema) Size() (n int)
- func (this *Schema) String() string
- func (m *Schema) Unmarshal(dAtA []byte) error
- func (m *Schema) XXX_DiscardUnknown()
- func (m *Schema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *Schema) XXX_Merge(src proto.Message)
- func (m *Schema) XXX_Size() int
- func (m *Schema) XXX_Unmarshal(b []byte) error
- type SyntheticSpec
- func (in *SyntheticSpec) DeepCopy() *SyntheticSpec
- func (in *SyntheticSpec) DeepCopyInto(out *SyntheticSpec)
- func (*SyntheticSpec) Descriptor() ([]byte, []int)
- func (m *SyntheticSpec) Marshal() (dAtA []byte, err error)
- func (m *SyntheticSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *SyntheticSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*SyntheticSpec) ProtoMessage()
- func (m *SyntheticSpec) Reset()
- func (m *SyntheticSpec) Size() (n int)
- func (this *SyntheticSpec) String() string
- func (m *SyntheticSpec) Unmarshal(dAtA []byte) error
- func (m *SyntheticSpec) XXX_DiscardUnknown()
- func (m *SyntheticSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *SyntheticSpec) XXX_Merge(src proto.Message)
- func (m *SyntheticSpec) XXX_Size() int
- func (m *SyntheticSpec) XXX_Unmarshal(b []byte) error
- type TableSpec
- func (in *TableSpec) DeepCopy() *TableSpec
- func (in *TableSpec) DeepCopyInto(out *TableSpec)
- func (*TableSpec) Descriptor() ([]byte, []int)
- func (m *TableSpec) Marshal() (dAtA []byte, err error)
- func (m *TableSpec) MarshalTo(dAtA []byte) (int, error)
- func (m *TableSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*TableSpec) ProtoMessage()
- func (m *TableSpec) Reset()
- func (m *TableSpec) Size() (n int)
- func (this *TableSpec) String() string
- func (m *TableSpec) Unmarshal(dAtA []byte) error
- func (m *TableSpec) XXX_DiscardUnknown()
- func (m *TableSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *TableSpec) XXX_Merge(src proto.Message)
- func (m *TableSpec) XXX_Size() int
- func (m *TableSpec) XXX_Unmarshal(b []byte) error
- type TimeSeriesSchema
- func (in *TimeSeriesSchema) DeepCopy() *TimeSeriesSchema
- func (in *TimeSeriesSchema) DeepCopyInto(out *TimeSeriesSchema)
- func (*TimeSeriesSchema) Descriptor() ([]byte, []int)
- func (m *TimeSeriesSchema) Marshal() (dAtA []byte, err error)
- func (m *TimeSeriesSchema) MarshalTo(dAtA []byte) (int, error)
- func (m *TimeSeriesSchema) MarshalToSizedBuffer(dAtA []byte) (int, error)
- func (*TimeSeriesSchema) ProtoMessage()
- func (m *TimeSeriesSchema) Reset()
- func (m *TimeSeriesSchema) Size() (n int)
- func (this *TimeSeriesSchema) String() string
- func (m *TimeSeriesSchema) Unmarshal(dAtA []byte) error
- func (m *TimeSeriesSchema) XXX_DiscardUnknown()
- func (m *TimeSeriesSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
- func (m *TimeSeriesSchema) XXX_Merge(src proto.Message)
- func (m *TimeSeriesSchema) XXX_Size() int
- func (m *TimeSeriesSchema) XXX_Unmarshal(b []byte) error
Constants ¶
const ( DataPipelineReady = "Ready" DataPipelineSaved = "Saved" )
/ DataPipeline Condition
const ( DataPipelineRunCompleted = "Completed" DataPipelineRunSaved = "Saved" )
/ DataPipelineRun Condition
const ( DataProductReady = "Ready" DataProductSaved = "Saved" )
/ ProductRef Condition
const ( DataProductVersionReady = "Ready" DataProductVersionSaved = "Saved" )
/ DataProductVersion Condition
const ( DatasetReported = "Reported" DatasetUnitTested = "UnitTested" DatasetSnapshotted = "Snapshotted" DatasetProfiled = "Profiled" DatasetIngested = "Ingested" DatasetGrouped = "Grouped" DatasetGenerated = "Generated" DatasetSaved = "Saved" DatasetArchived = "Archived" DatasetReady = "Ready" )
/ DatasetName Condition
const ( MultiDatasetSameNumberOfRows catalog.AssertionType = "multi-dataset-same-number-of-rows" MultiDatasetOuterJoinEmpty catalog.AssertionType = "multi-dataset-outer-join-empty" MultiDatasetOuterJoinNotEmpty catalog.AssertionType = "multi-dataset-outer-join-not-empty" MultiDatasetInnerJoinEmpty catalog.AssertionType = "multi-dataset-inner-join-empty" MultiDatasetInnerJoinNotEmpty catalog.AssertionType = "multi-dataset-inner-join-not-empty" MultiDatasetLeftJoinEmpty catalog.AssertionType = "multi-dataset-left-join-empty" MultiDatasetLeftJoinNotEmpty catalog.AssertionType = "multi-dataset-left-join-not-empty" MultiDatasetRightJoinEmpty catalog.AssertionType = "multi-dataset-right-join-empty" MultiDatasetRightJoinNotEmpty catalog.AssertionType = "multi-dataset-right-join-not-empty" DatasetColumnsCountEqual catalog.AssertionType = "dataset-columns-count-equal" DatasetColumnsNameInSet catalog.AssertionType = "dataset-columns-in-set" DatasetColumnsInOrderedList catalog.AssertionType = "dataset-columns-in-ordered-list" DatasetRowCountBetween catalog.AssertionType = "dataset-row-count-between" DatasetNotEmpty catalog.AssertionType = "dataset-dataset-not-empty" DatasetEmpty catalog.AssertionType = "dataset-empty" MultiColumnCorr catalog.AssertionType = "multi-column-corr" MultiColumnUnique catalog.AssertionType = "multi-column-unique" MultiColumnLessThan catalog.AssertionType = "multi-column-less-than" MultiColumnLessThanOrEqualTo catalog.AssertionType = "multi-column-less-than-or-equal-to" MultiColumnUniqueValueRatio catalog.AssertionType = "multi-column-unique-value-ratio" ColumnExist catalog.AssertionType = "column-exist" ColumnHaveValues catalog.AssertionType = "column-have-values" ColumnHasNoValue catalog.AssertionType = "column-has-no-values" ColumnHaveNulls catalog.AssertionType = "column-value-have-nulls" ColumnHasNoNull catalog.AssertionType = "column-value-has-no-nulls" ColumnOfType catalog.AssertionType = "column-of-type" ColumnValuesInSet catalog.AssertionType = "column-values-in-set" ColumnValuesIncreasing catalog.AssertionType = "column-values-increasing" ColumnsValuesDecreasing catalog.AssertionType = "column-values-decreasing" ColumnValueLengthBetween catalog.AssertionType = "column-value-length-between" ColumnValueNameMatchPattern catalog.AssertionType = "column-value-match-pattern" ColumnValueIsDate catalog.AssertionType = "column-value-is-date" ColumnValueIsJson catalog.AssertionType = "column-value-is-json" ColumnValueInDomain catalog.AssertionType = "column-value-in-domain" ColumnUniqueValueCountBetween catalog.AssertionType = "column-unique-value-count-between" ColumnOutlierValueUniqueBetween catalog.AssertionType = "column-outlier-value-count-between" ColumnValidValueUniqueBetween catalog.AssertionType = "column-valid-values-count-between" ColumnMismatchValueBetween catalog.AssertionType = "column-mismatch-values-between" ColumnValueMinBetween catalog.AssertionType = "column-value-min-between" ColumnValueLowerQuartileBetween catalog.AssertionType = "column-value-lower-quartile-between" ColumnValueMedianBetween catalog.AssertionType = "column-value-median-between" ColumnValueAvgBetween catalog.AssertionType = "column-value-average-between" ColumnValueUpperQuartileBetween catalog.AssertionType = "column-value-upper-quartile-between" ColumnValueMaxBetween catalog.AssertionType = "column-value-max-between" ColumnValueStddevBetween catalog.AssertionType = "column-value-stddev-between" ColumnValueChiSquarePValueBetween catalog.AssertionType = "column-value-chi-square-p-value-between" ColumnValuePairCramersBetween catalog.AssertionType = "column-value-pair-cramers-between" ColumnValueHasEntropy catalog.AssertionType = "column-value-has-entropy" ColumnValueMinLength catalog.AssertionType = "column-value-min-length" ColumnValueMaxLength catalog.AssertionType = "column-value-max-length" ColumnValueNonNegative catalog.AssertionType = "column-value-non-negative" ColumnValuePositive catalog.AssertionType = "column-value-positive" FileSizeBetween catalog.AssertionType = "file-size-between" FileExist catalog.AssertionType = "file-exist" FileRegexMatchCountBetween catalog.AssertionType = "file-regex-match-count-between" FileValidJson catalog.AssertionType = "file-valid-json" FileValidCsv catalog.AssertionType = "file-valid-csv" // Pre defined type ColumnValuePhoneNumber catalog.AssertionType = "column-value-phone-number" ColumnValueEmail catalog.AssertionType = "column-value-email" ColumnValueCreditCard catalog.AssertionType = "column-value-credit-card" ColumnValueBase64 catalog.AssertionType = "column-value-base64" ColumnValueIsbn10 catalog.AssertionType = "column-value-isbn10" ColumnValueIsbn13 catalog.AssertionType = "column-value-isbn13" ColumnValueUUID3 catalog.AssertionType = "column-value-uuid3" ColumnValueUUID4 catalog.AssertionType = "column-value-uuid4" ColumnValueUUID5 catalog.AssertionType = "column-value-uuid5" ColumnValueAscii catalog.AssertionType = "column-value-ascii" ColumnValueLatitude catalog.AssertionType = "column-value-latitude" ColumnValueLongitude catalog.AssertionType = "column-value-longitude" ColumnValueSSN catalog.AssertionType = "column-value-ssn" ColumnValueHostName catalog.AssertionType = "column-value-hostname" )
const ( // Regular state of the store. DatasourceReady = "Ready" DatasourceSaved = "Saved" )
/ Datastore Condition
const ( EntityReady = "Ready" EntitySaved = "Saved" )
/ Entity Condition
const ( FeatureGroupReady = "Ready" FeatureGroupSaved = "Saved" FeatureGroupSynced = "Synced" FeatureGroupIngested = "Ingested" )
const ( ModelDriftTwoSampleKSTestLessThan catalog.AssertionType = "model-two-sample-ks-test-less-than" ModelDriftChiSquaredLessThan catalog.AssertionType = "model-chi-squared-test-less-than" ModelDriftProportionDiffTestLessThan catalog.AssertionType = "model-proportion-difference-test-less-than" ModelDriftWassersteinDistanceLessThan catalog.AssertionType = "model-wasserstein-distance-less-than" ModelDriftJSDivergenceLessThan catalog.AssertionType = "model-js-divergence-less" ModelDriftPSILessThan catalog.AssertionType = "model-psi-less-than" ModelDriftKLDivergenceLessThan catalog.AssertionType = "model-kl-divergence-less-than" )
const ( FeatureHistogramReady = "Ready" FeatureHistogramUnitTested = "UnitTested" FeatureHistogramSaved = "Saved" )
/ FeatureHistogram Condition
const ( RecipeReady = "Ready" RecipeSaved = "Saved" )
/ RecipeName Condition
const ( RecipeRunUnitTested = "UnitTested" RecipeRunCompleted = "Completed" RecipeRunSaved = "Saved" )
/ RecipeName Condition
Variables ¶
var ( ErrInvalidLengthGenerated = fmt.Errorf("proto: negative length found during unmarshaling") ErrIntOverflowGenerated = fmt.Errorf("proto: integer overflow") ErrUnexpectedEndOfGroupGenerated = fmt.Errorf("proto: unexpected end of group") )
var ( // SchemeBuilder registers our types SchemeBuilder = k8sruntime.NewSchemeBuilder(AddKnownTypes) // AddToScheme local alias for SchemeBuilder.AddToScheme AddToScheme = SchemeBuilder.AddToScheme )
var SchemeGroupVersion = schema.GroupVersion{Group: data.GroupName, Version: "v1alpha1"}
SchemeGroupVersion is group version used to register these objects
Functions ¶
func AddKnownTypes ¶
func AddKnownTypes(scheme *k8sruntime.Scheme) error
Adds the list of known types to api.Scheme.
func Resource ¶
func Resource(resource string) schema.GroupResource
Resource takes an unqualified resource and returns a Group qualified GroupResource
Types ¶
type ApprovalType ¶
type ApprovalType string
const ( ApprovalTypeApproved ApprovalType = "approved" ApprovalTypeReject ApprovalType = "reject" )
type BarChartSpec ¶
type BarChartSpec struct { // Dataset is the name of the dataset // +kubebuilder:validation:Optional DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"` // name of the X column // +kubebuilder:validation:Optional X *string `json:"x,omitempty" protobuf:"bytes,2,opt,name=x"` // Y column // +kubebuilder:validation:Optional Y *string `json:"y,omitempty" protobuf:"bytes,3,opt,name=y"` // Show borther // +kubebuilder:validation:Optional Legend *bool `json:"legend,omitempty" protobuf:"varint,4,opt,name=legend"` // Show borther // +kubebuilder:validation:Optional Sort *bool `json:"sort,omitempty" protobuf:"varint,5,opt,name=sort"` }
Bar Chart
func (*BarChartSpec) DeepCopy ¶
func (in *BarChartSpec) DeepCopy() *BarChartSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BarChartSpec.
func (*BarChartSpec) DeepCopyInto ¶
func (in *BarChartSpec) DeepCopyInto(out *BarChartSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*BarChartSpec) Descriptor ¶
func (*BarChartSpec) Descriptor() ([]byte, []int)
func (*BarChartSpec) Marshal ¶
func (m *BarChartSpec) Marshal() (dAtA []byte, err error)
func (*BarChartSpec) MarshalToSizedBuffer ¶
func (m *BarChartSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*BarChartSpec) ProtoMessage ¶
func (*BarChartSpec) ProtoMessage()
func (*BarChartSpec) Reset ¶
func (m *BarChartSpec) Reset()
func (*BarChartSpec) Size ¶
func (m *BarChartSpec) Size() (n int)
func (*BarChartSpec) String ¶
func (this *BarChartSpec) String() string
func (*BarChartSpec) Unmarshal ¶
func (m *BarChartSpec) Unmarshal(dAtA []byte) error
func (*BarChartSpec) XXX_DiscardUnknown ¶
func (m *BarChartSpec) XXX_DiscardUnknown()
func (*BarChartSpec) XXX_Marshal ¶
func (m *BarChartSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*BarChartSpec) XXX_Merge ¶
func (m *BarChartSpec) XXX_Merge(src proto.Message)
func (*BarChartSpec) XXX_Size ¶
func (m *BarChartSpec) XXX_Size() int
func (*BarChartSpec) XXX_Unmarshal ¶
func (m *BarChartSpec) XXX_Unmarshal(b []byte) error
type Column ¶
type Column struct { // The name of the column // +kubebuilder:validation:MaxLength=63 // +kubebuilder:validation:MinLength=1 Name string `json:"name" protobuf:"bytes,1,opt,name=name"` // The display name of the column, which is used in reports and other visual elements. If omitted, it will use the raw name // +kubebuilder:validation:MaxLength=63 // +kubebuilder:validation:Optional DisplayName *string `json:"displayName,omitempty" protobuf:"bytes,2,opt,name=displayName"` // The data type of the feature (e.g. number, string, boolean, etc.) DataType catalog.DataType `json:"datatype" protobuf:"bytes,3,opt,name=datatype"` // The data domain of the feature, which constrains the contents of the feature to a specific set of values // +kubebuilder:validation:Optional Format *catalog.DataDomain `json:"format,omitempty" protobuf:"bytes,4,opt,name=format"` // The user-specified description of the feature // +kubebuilder:validation:MaxLength=63 // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" protobuf:"bytes,5,opt,name=description"` // Indicates if the feature should be ignored when building models // +kubebuilder:validation:Optional Ignore *bool `json:"ignore,omitempty" protobuf:"varint,6,opt,name=ignore"` // Indicates if the feature is the target feature of the model, and the feature which predictions will be made on // +kubebuilder:validation:Optional Target *bool `json:"target,omitempty" protobuf:"varint,7,opt,name=target"` // Indicates if the column can contain null values // +kubebuilder:validation:Optional Nullable *bool `json:"nullable,omitempty" protobuf:"varint,8,opt,name=nullable"` // Denotes if the column specifies a primary key of a database table (i.e. a users ID) // +kubebuilder:validation:Optional PK *bool `json:"pk,omitempty" protobuf:"varint,9,opt,name=pk"` // Denotes if the column specifies a foreign key of another database table // +kubebuilder:validation:Optional FK *bool `json:"fk,omitempty" protobuf:"varint,10,opt,name=fk"` // The integer value which the values of the column should be a multiple of // +kubebuilder:validation:Minimum=0 // +kubebuilder:validation:Optional MultipleOf *int32 `json:"multipleOf,omitempty" protobuf:"varint,11,opt,name=multipleOf"` // The maximum value of values all values in the column // +kubebuilder:validation:Optional Maximum *float64 `json:"maximum,omitempty" protobuf:"bytes,12,opt,name=maximum"` // The exclusive upper limit of all values in the column, which does not include the maximum value // +kubebuilder:validation:Optional ExclusiveMaximum *bool `json:"exclusiveMaximum,omitempty" protobuf:"varint,13,opt,name=exclusiveMaximum"` // The minimum value of values all values in the column // +kubebuilder:validation:Optional Minimum *float64 `json:"minimum,omitempty" protobuf:"bytes,14,opt,name=minimum"` // The exclusive lower limit of all values in the column, which does not include the minimum value // +kubebuilder:validation:Optional ExclusiveMinimum *bool `json:"exclusiveMinimum,omitempty" protobuf:"varint,15,opt,name=exclusiveMinimum"` // The maximum length of values in the column, if the column data type is a string // +kubebuilder:validation:Minimum=0 // +kubebuilder:validation:Optional MaxLength *int32 `json:"maxLength,omitempty" protobuf:"varint,16,opt,name=maxLength"` // The minimum length of values in the column, if the column data type is a string // +kubebuilder:validation:Minimum=0 // +kubebuilder:validation:Optional MinLength *int32 `json:"minLength,omitempty" protobuf:"varint,17,opt,name=minLength"` // The regex pattern which values in the column must adhere to // +kubebuilder:validation:Optional Pattern *string `json:"pattern,omitempty" protobuf:"bytes,18,opt,name=pattern"` // Required // +kubebuilder:validation:Optional Required *bool `json:"required,omitempty" protobuf:"varint,19,opt,name=required"` // A user-specified example value // +kubebuilder:validation:Optional Example *string `json:"example,omitempty" protobuf:"bytes,20,opt,name=example"` // A link to user-specified external documentation // +kubebuilder:validation:Optional ExternalDocs *string `json:"externalDocs,omitempty" protobuf:"bytes,21,opt,name=externalDocs"` // The collection of unique values for categorical features // +kubebuilder:validation:Optional Enum []string `json:"enum,omitempty" protobuf:"bytes,22,rep,name=enum"` // The maximum number of items if the column is a list of values // +kubebuilder:validation:Minimum=0 // +kubebuilder:validation:Optional MaxItems *int32 `json:"maxItems,omitempty" protobuf:"varint,24,opt,name=maxItems"` // The minimum number of items if the column is a list of values // +kubebuilder:validation:Minimum=0 // +kubebuilder:validation:Optional MinItems *int32 `json:"minItems,omitempty" protobuf:"varint,25,opt,name=minItems"` // Enforce that all the items in the list are unique // +kubebuilder:validation:Optional UniqueItems *bool `json:"uniqueItems,omitempty" protobuf:"varint,26,opt,name=uniqueItems"` // Indicates if the column contains personally identifiable information // +kubebuilder:validation:Optional PII *bool `json:"pii,omitempty" protobuf:"varint,28,opt,name=pii"` // Indicates if the column contains personal health information // +kubebuilder:validation:Optional PHI *bool `json:"phi,omitempty" protobuf:"varint,29,opt,name=phi"` // Protected means that this feature is important for ethical AI / Fairness // +kubebuilder:validation:Optional Protected *bool `json:"protected,omitempty" protobuf:"varint,31,opt,name=protected"` // The default value for number types; used internally for synthetic data and validation DefaultValueNum *float64 `json:"DefaultValueNum,omitempty" protobuf:"bytes,32,opt,name=defaultValueNum"` // Indicates if values from this column will be sampled on a logarithmic scale // +kubebuilder:validation:Optional Log *bool `json:"log,omitempty" protobuf:"varint,33,opt,name=log"` // Mu is the mean of the normal distribution // +kubebuilder:validation:Optional Mu *float64 `json:"mu,omitempty" protobuf:"bytes,34,opt,name=mu"` // Sigma is the standard deviation of the distribution // +kubebuilder:validation:Optional Sigma *float64 `json:"sigma,omitempty" protobuf:"bytes,35,opt,name=sigma"` // The threshold skew for skew detection for the feature represented by this feature. // +kubebuilder:validation:Optional SkewThreshold *float64 `json:"skewThreshold,omitempty" protobuf:"bytes,36,opt,name=skewThreshold"` // The threshold drift value for model drift detection for the feature represented by this feature // +kubebuilder:validation:Optional DriftThreshold *float64 `json:"driftThreshold,omitempty" protobuf:"bytes,37,opt,name=driftThreshold"` // Indicates if the column is an key column // +kubebuilder:validation:Optional Key *bool `json:"key,omitempty" protobuf:"varint,38,opt,name=key"` // Indicates if the column holds fold values // +kubebuilder:validation:Optional Fold *bool `json:"fold,omitempty" protobuf:"varint,39,opt,name=fold"` // If True than this is a weight column // +kubebuilder:validation:Optional Weight *bool `json:"weight,omitempty" protobuf:"varint,40,opt,name=weight"` // Indicates that the feature should always be used in training // +kubebuilder:validation:Optional Reserved *bool `json:"reserved,omitempty" protobuf:"varint,41,opt,name=reserved"` // The recommended imputation method for the column // +kubebuilder:validation:Optional Imputation *catalog.Imputation `json:"imputation,omitempty" protobuf:"bytes,42,opt,name=imputation"` // The recommended scaling method for the column // +kubebuilder:validation:Optional Scaling *catalog.Scaling `json:"scaling,omitempty" protobuf:"bytes,43,opt,name=scaling"` // Indicates if the feature was automatically generated // +kubebuilder:validation:Optional Generated *bool `json:"generated,omitempty" protobuf:"varint,44,opt,name=generated"` // The formula used to generate the column // +kubebuilder:validation:Optional Formula *string `json:"formula,omitempty" protobuf:"bytes,45,opt,name=formula"` // Indicates if the column is an ID column // +kubebuilder:validation:Optional ID *bool `json:"id,omitempty" protobuf:"varint,46,opt,name=id"` // The step value if the column values are a sequence of numbers // +kubebuilder:validation:Optional Step *float64 `json:"step,omitempty" protobuf:"bytes,47,opt,name=step"` // Contain the Index for the column in the schema // +kubebuilder:validation:Optional Loc int32 `json:"loc,omitempty" protobuf:"varint,48,opt,name=loc"` // The format of the datetime column. This is only setup if the column contain datetime type. // +kubebuilder:validation:Optional DateTimeFormat *string `json:"datetimeFormat,omitempty" protobuf:"bytes,49,opt,name=datetimeFormat"` // Indicates if the column is contain a time series, // In case of forecasting, if only one column is a time series, this is a univariate time series // Otherwise, if two or more columns contain time series, than this is a univariate time series. // +kubebuilder:validation:Optional TimeSeries *bool `json:"timeseries,omitempty" protobuf:"varint,50,opt,name=timeseries"` // In forecasting based data sets Indicates if the column is regressor // This is relevant only for time series schema // +kubebuilder:validation:Optional Regressor *bool `json:"regressor,omitempty" protobuf:"varint,51,opt,name=regressor"` // In forecasting based data sets Indicates if the column is regressor // This is relevant only for time series schema // +kubebuilder:validation:Optional LaggedRegressor *bool `json:"laggedRegressor,omitempty" protobuf:"varint,52,opt,name=laggedRegressor"` // For time series, the field indicate tha this column will be used as the data time index // for the time series. Note that there can multiple datatime type columns, but only one // time column. // +kubebuilder:validation:Optional TimeIndex *bool `json:"timeIndex,omitempty" protobuf:"varint,53,opt,name=timeIndex"` // If this column is aggregate. Define the aggregate // +kubebuilder:validation:Optional Aggr *catalog.Aggregate `json:"aggr,omitempty" protobuf:"varint,54,opt,name=aggr"` // If this column is aggregate // +kubebuilder:validation:Optional Window *int32 `json:"window,omitempty" protobuf:"varint,55,opt,name=window"` }
Column specifies the attribute of a single column in a dataset. The fields of the Column align with the JSON schema standard; you can view detailed documentation at https://json-schema.org/draft/2020-12/json-schema-validation.html
func (*Column) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Column.
func (*Column) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Column) Descriptor ¶
func (*Column) MarshalToSizedBuffer ¶
func (*Column) ProtoMessage ¶
func (*Column) ProtoMessage()
func (*Column) ValidateColumn ¶
func (column *Column) ValidateColumn() (bool, []metav1.StatusCause)
func (*Column) XXX_DiscardUnknown ¶
func (m *Column) XXX_DiscardUnknown()
func (*Column) XXX_Marshal ¶
func (*Column) XXX_Unmarshal ¶
type ColumnHistogram ¶ added in v0.4.925
type ColumnHistogram struct { // The name of the column //+kubebuilder:validation:Optional Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"` // Measure of drift for a column //+kubebuilder:validation:Optional Histogram catalog.HistogramData `json:"histogram,omitempty" protobuf:"bytes,2,opt,name=histogram"` // Measure of drift for this column //+kubebuilder:validation:Optional Metrics []catalog.Measurement `json:"metrics,omitempty" protobuf:"bytes,3,rep,name=metrics"` // true if drift was detected for this column //+kubebuilder:validation:Optional Drift *bool `json:"drift,omitempty" protobuf:"varint,4,opt,name=drift"` }
func (*ColumnHistogram) DeepCopy ¶ added in v0.4.926
func (in *ColumnHistogram) DeepCopy() *ColumnHistogram
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnHistogram.
func (*ColumnHistogram) DeepCopyInto ¶ added in v0.4.926
func (in *ColumnHistogram) DeepCopyInto(out *ColumnHistogram)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*ColumnHistogram) Descriptor ¶ added in v0.4.925
func (*ColumnHistogram) Descriptor() ([]byte, []int)
func (*ColumnHistogram) Marshal ¶ added in v0.4.925
func (m *ColumnHistogram) Marshal() (dAtA []byte, err error)
func (*ColumnHistogram) MarshalTo ¶ added in v0.4.925
func (m *ColumnHistogram) MarshalTo(dAtA []byte) (int, error)
func (*ColumnHistogram) MarshalToSizedBuffer ¶ added in v0.4.925
func (m *ColumnHistogram) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*ColumnHistogram) ProtoMessage ¶ added in v0.4.925
func (*ColumnHistogram) ProtoMessage()
func (*ColumnHistogram) Reset ¶ added in v0.4.925
func (m *ColumnHistogram) Reset()
func (*ColumnHistogram) Size ¶ added in v0.4.925
func (m *ColumnHistogram) Size() (n int)
func (*ColumnHistogram) String ¶ added in v0.4.925
func (this *ColumnHistogram) String() string
func (*ColumnHistogram) Unmarshal ¶ added in v0.4.925
func (m *ColumnHistogram) Unmarshal(dAtA []byte) error
func (*ColumnHistogram) XXX_DiscardUnknown ¶ added in v0.4.925
func (m *ColumnHistogram) XXX_DiscardUnknown()
func (*ColumnHistogram) XXX_Marshal ¶ added in v0.4.925
func (m *ColumnHistogram) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*ColumnHistogram) XXX_Merge ¶ added in v0.4.925
func (m *ColumnHistogram) XXX_Merge(src proto.Message)
func (*ColumnHistogram) XXX_Size ¶ added in v0.4.925
func (m *ColumnHistogram) XXX_Size() int
func (*ColumnHistogram) XXX_Unmarshal ¶ added in v0.4.925
func (m *ColumnHistogram) XXX_Unmarshal(b []byte) error
type ColumnSpec ¶
type ColumnSpec struct { // If true this column is an empty spacer // +kubebuilder:default:=false // +kubebuilder:validation:Optional Spacer *bool `json:"spacer,omitempty" protobuf:"varint,1,opt,name=spacer"` // The width in columns, each row is divided into 12 columns // +kubebuilder:validation:Optional Width *int32 `json:"width,omitempty" protobuf:"varint,2,opt,name=width"` // The actual Content ComponentSpec `json:",omitempty" protobuf:"bytes,3,opt,name=content"` }
The spec for a column in the row. A column can span multiple grid based columns
func (*ColumnSpec) DeepCopy ¶
func (in *ColumnSpec) DeepCopy() *ColumnSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnSpec.
func (*ColumnSpec) DeepCopyInto ¶
func (in *ColumnSpec) DeepCopyInto(out *ColumnSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*ColumnSpec) Descriptor ¶
func (*ColumnSpec) Descriptor() ([]byte, []int)
func (*ColumnSpec) Marshal ¶
func (m *ColumnSpec) Marshal() (dAtA []byte, err error)
func (*ColumnSpec) MarshalToSizedBuffer ¶
func (m *ColumnSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*ColumnSpec) ProtoMessage ¶
func (*ColumnSpec) ProtoMessage()
func (*ColumnSpec) Reset ¶
func (m *ColumnSpec) Reset()
func (*ColumnSpec) Size ¶
func (m *ColumnSpec) Size() (n int)
func (*ColumnSpec) String ¶
func (this *ColumnSpec) String() string
func (*ColumnSpec) Unmarshal ¶
func (m *ColumnSpec) Unmarshal(dAtA []byte) error
func (*ColumnSpec) XXX_DiscardUnknown ¶
func (m *ColumnSpec) XXX_DiscardUnknown()
func (*ColumnSpec) XXX_Marshal ¶
func (m *ColumnSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*ColumnSpec) XXX_Merge ¶
func (m *ColumnSpec) XXX_Merge(src proto.Message)
func (*ColumnSpec) XXX_Size ¶
func (m *ColumnSpec) XXX_Size() int
func (*ColumnSpec) XXX_Unmarshal ¶
func (m *ColumnSpec) XXX_Unmarshal(b []byte) error
type ColumnStatistics ¶
type ColumnStatistics struct { // The name of the column // +kubebuilder:validation:Optional Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"` // The data type of the column // +kubebuilder:validation:Optional DataType catalog.DataType `json:"datatype,omitempty" protobuf:"bytes,2,opt,name=datatype"` // Amount of rows which contain a value for the feature // +kubebuilder:validation:Optional Count float64 `json:"count,omitempty" protobuf:"bytes,3,opt,name=count"` // Amount of unique values present in the column // +kubebuilder:validation:Optional Distinct int32 `json:"distinct,omitempty" protobuf:"varint,4,opt,name=distinct"` // Amount of missing values present in the column // +kubebuilder:validation:Optional Missing int32 `json:"missing,omitempty" protobuf:"varint,5,opt,name=missing"` // Percentage of missing values in the column // +kubebuilder:validation:Optional PercentMissing float64 `json:"percentMissing,omitempty" protobuf:"bytes,6,opt,name=percentMissing"` // The mean of all values in the column, if the column data type is a number // +kubebuilder:validation:Optional Mean float64 `json:"mean,omitempty" protobuf:"bytes,7,opt,name=mean"` // The standard deviation of the columns values // +kubebuilder:validation:Optional StdDev float64 `json:"stddev,omitempty" protobuf:"bytes,8,opt,name=stddev"` // The variability of the columns values from the columns mean // +kubebuilder:validation:Optional Variance float64 `json:"variance,omitempty" protobuf:"bytes,9,opt,name=variance"` // The minimum value of all values in the column // +kubebuilder:validation:Optional Min float64 `json:"min,omitempty" protobuf:"bytes,10,opt,name=min"` // The maximum value of all values in the column // +kubebuilder:validation:Optional Max float64 `json:"max,omitempty" protobuf:"bytes,11,opt,name=max"` // The computed kurtosis, which measures the peakedness of the distribution of values in the column // +kubebuilder:validation:Optional Kurtosis float64 `json:"kurtosis,omitempty" protobuf:"bytes,12,opt,name=kurtosis"` // The computed skewness, which measures the asymmetry of the distribution of values in the column // +kubebuilder:validation:Optional Skewness float64 `json:"skewness,omitempty" protobuf:"bytes,13,opt,name=skewness"` // Skewness is the standard deviation value of the attribute // +kubebuilder:validation:Optional Sum float64 `json:"sum,omitempty" protobuf:"bytes,14,opt,name=sum"` // The sum of all values in the column // +kubebuilder:validation:Optional Mad float64 `json:"mad,omitempty" protobuf:"bytes,15,opt,name=mad"` // The 25% point of all the values of the column in order // +kubebuilder:validation:Optional P25 float64 `json:"p25,omitempty" protobuf:"bytes,16,opt,name=p25"` // The 50% point of all the values of the column in order, also known as the median // +kubebuilder:validation:Optional P50 float64 `json:"p50,omitempty" protobuf:"bytes,17,opt,name=p50"` // The 75% point of all the values of the column in order // +kubebuilder:validation:Optional P75 float64 `json:"p75,omitempty" protobuf:"bytes,18,opt,name=p75"` // The interquartile range of the columns values // +kubebuilder:validation:Optional IQR float64 `json:"iqr,omitempty" protobuf:"bytes,19,opt,name=iqr"` // The mode value of the column, also known as the most frequent value // +kubebuilder:validation:Optional Mode string `json:"mode,omitempty" protobuf:"bytes,20,opt,name=mode"` // The number of zero values in the column // +kubebuilder:validation:Optional Zeros float64 `json:"zeros,omitempty" protobuf:"bytes,21,opt,name=zeros"` // The number of invalid values in the column // +kubebuilder:validation:Optional Invalid int32 `json:"invalid,omitempty" protobuf:"varint,22,opt,name=invalid"` // The feature importance of the column // +kubebuilder:validation:Optional Importance float64 `json:"importance,omitempty" protobuf:"bytes,23,opt,name=importance"` // Indicates if the feature is the target attribute for a Study, as specified by the Dataset's DataSource // +kubebuilder:validation:Optional Target bool `json:"target,omitempty" protobuf:"varint,24,opt,name=target"` // Indicates if the column should be ignored, as specified by the Dataset's DataSource // +kubebuilder:validation:Optional Ignore bool `json:"ignore,omitempty" protobuf:"varint,25,opt,name=ignore"` // Indicates if the column may contain null values, as specified by the Dataset's DataSource // +kubebuilder:validation:Optional Nullable bool `json:"nullable,omitempty" protobuf:"varint,26,opt,name=nullable"` // Indicates if the column has high cardinality and should use the high cardinality encoder during feature engineering // +kubebuilder:validation:Optional HighCardinality bool `json:"highCardinality,omitempty" protobuf:"varint,27,opt,name=highCardinality"` // Indicates if the column has high correlation with another feature, and that it should be dropped // +kubebuilder:validation:Optional HighCorrWithOtherFeatures bool `json:"highCorrWithOtherFeatures,omitempty" protobuf:"varint,28,opt,name=highCorrWithOtherFeatures"` // Indicate that the feature has low correlation with the target feature, and that it should be dropped // +kubebuilder:validation:Optional LowCorrWithTarget bool `json:"lowCorrWithTarget,omitempty" protobuf:"varint,29,opt,name=lowCorrWithTarget"` // Indicates if the column has a high percentage of missing values, and that it should be dropped // +kubebuilder:validation:Optional HighMissingPct bool `json:"highMissingPct,omitempty" protobuf:"varint,30,opt,name=highMissingPct"` // Marks that the column is skewed and would require a power transform. // // If skewness is less than -1 or greater than 1, the distribution is highly skewed. // If skewness is between -1 and -0.5 or between 0.5 and 1, the distribution is moderately skewed. // If skewness is between -0.5 and 0.5, the distribution is approximately symmetric // +kubebuilder:validation:Optional Skewed bool `json:"skewed,omitempty" protobuf:"varint,31,opt,name=skewed"` // Indicates if the column is an ID column, such as a primary key // +kubebuilder:validation:Optional Id bool `json:"id,omitempty" protobuf:"varint,32,opt,name=id"` // +kubebuilder:validation:Optional Constant bool `json:"constant,omitempty" protobuf:"varint,33,opt,name=constant"` // Indicates if the column is a duplicate of another column // +kubebuilder:validation:Optional Duplicate bool `json:"duplicate,omitempty" protobuf:"varint,34,opt,name=duplicate"` // Indicates if the column is reserved and must be a feature included in model training // +kubebuilder:validation:Optional Reserved bool `json:"reserved,omitempty" protobuf:"varint,35,opt,name=reserved"` // The ratio between non-null and null values in the column // +kubebuilder:validation:Optional Completeness float64 `json:"completeness,omitempty" protobuf:"bytes,37,opt,name=completeness"` // The ratio between unique values and non-unique values in the column // +kubebuilder:validation:Optional DistinctValueCount float64 `json:"distinctValueCount,omitempty" protobuf:"bytes,38,opt,name=distinctValueCount"` // The ratio between most the most frequent value to the number of total values in the column // +kubebuilder:validation:Optional MostFreqValuesRatio float64 `json:"mostFreqValuesRatio,omitempty" protobuf:"bytes,39,opt,name=mostFreqValuesRatio"` // Used for text attributes // +kubebuilder:validation:Optional IndexOfPeculiarity float64 `json:"indexOfPeculiarity,omitempty" protobuf:"bytes,40,opt,name=indexOfPeculiarity"` // Histogram data representing the distribution of the values in the column // +kubebuilder:validation:Optional Histogram catalog.HistogramData `json:"histogram,omitempty" protobuf:"bytes,41,opt,name=histogram"` // Correlation to the target feature // +kubebuilder:validation:Optional CorrToTarget float64 `json:"corrToTarget,omitempty" protobuf:"bytes,42,opt,name=corrToTarget"` // The column index in the dataset Index int32 `json:"index,omitempty" protobuf:"bytes,43,opt,name=index"` // Outlier statistics. Outliers OutlierStat `json:"outliers,omitempty" protobuf:"bytes,44,opt,name=outliers"` }
ColumnStatistics contains statistical parameters for a single feature from a dataset
func (ColumnStatistics) BigBoolTest ¶ added in v0.5.69
func (col ColumnStatistics) BigBoolTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase
func (ColumnStatistics) BigCatTest ¶ added in v0.5.69
func (col ColumnStatistics) BigCatTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase
func (ColumnStatistics) BigNumericTest ¶ added in v0.5.69
func (col ColumnStatistics) BigNumericTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase
func (*ColumnStatistics) DeepCopy ¶
func (in *ColumnStatistics) DeepCopy() *ColumnStatistics
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnStatistics.
func (*ColumnStatistics) DeepCopyInto ¶
func (in *ColumnStatistics) DeepCopyInto(out *ColumnStatistics)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*ColumnStatistics) Descriptor ¶
func (*ColumnStatistics) Descriptor() ([]byte, []int)
func (ColumnStatistics) GenDriftTestCase ¶ added in v0.5.69
func (col ColumnStatistics) GenDriftTestCase(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase
Based on the column type generate the drift test case
func (*ColumnStatistics) Marshal ¶
func (m *ColumnStatistics) Marshal() (dAtA []byte, err error)
func (*ColumnStatistics) MarshalToSizedBuffer ¶
func (m *ColumnStatistics) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*ColumnStatistics) ProtoMessage ¶
func (*ColumnStatistics) ProtoMessage()
func (*ColumnStatistics) Reset ¶
func (m *ColumnStatistics) Reset()
func (*ColumnStatistics) Size ¶
func (m *ColumnStatistics) Size() (n int)
func (ColumnStatistics) SmallBoolTest ¶ added in v0.5.69
func (col ColumnStatistics) SmallBoolTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase
func (ColumnStatistics) SmallCatTest ¶ added in v0.5.69
func (col ColumnStatistics) SmallCatTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase
func (ColumnStatistics) SmallNumericTest ¶ added in v0.5.69
func (col ColumnStatistics) SmallNumericTest(thresholds []DriftThreshold, rowCount int32) *catalog.DataTestCase
func (*ColumnStatistics) String ¶
func (this *ColumnStatistics) String() string
func (*ColumnStatistics) Unmarshal ¶
func (m *ColumnStatistics) Unmarshal(dAtA []byte) error
func (*ColumnStatistics) XXX_DiscardUnknown ¶
func (m *ColumnStatistics) XXX_DiscardUnknown()
func (*ColumnStatistics) XXX_Marshal ¶
func (m *ColumnStatistics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*ColumnStatistics) XXX_Merge ¶
func (m *ColumnStatistics) XXX_Merge(src proto.Message)
func (*ColumnStatistics) XXX_Size ¶
func (m *ColumnStatistics) XXX_Size() int
func (*ColumnStatistics) XXX_Unmarshal ¶
func (m *ColumnStatistics) XXX_Unmarshal(b []byte) error
type ComponentSpec ¶
type ComponentSpec struct { // +kubebuilder:validation:Optional Title *string `json:"title,omitempty" protobuf:"bytes,1,opt,name=title"` // +kubebuilder:validation:Optional SubTitle *string `json:"subtitle,omitempty" protobuf:"bytes,2,opt,name=subtitle"` // +kubebuilder:validation:Optional ComponentView `json:",inline" protobuf:"bytes,4,opt,name=content"` }
func (*ComponentSpec) DeepCopy ¶
func (in *ComponentSpec) DeepCopy() *ComponentSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ComponentSpec.
func (*ComponentSpec) DeepCopyInto ¶
func (in *ComponentSpec) DeepCopyInto(out *ComponentSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*ComponentSpec) Descriptor ¶
func (*ComponentSpec) Descriptor() ([]byte, []int)
func (*ComponentSpec) Marshal ¶
func (m *ComponentSpec) Marshal() (dAtA []byte, err error)
func (*ComponentSpec) MarshalToSizedBuffer ¶
func (m *ComponentSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*ComponentSpec) ProtoMessage ¶
func (*ComponentSpec) ProtoMessage()
func (*ComponentSpec) Reset ¶
func (m *ComponentSpec) Reset()
func (*ComponentSpec) Size ¶
func (m *ComponentSpec) Size() (n int)
func (*ComponentSpec) String ¶
func (this *ComponentSpec) String() string
func (*ComponentSpec) Unmarshal ¶
func (m *ComponentSpec) Unmarshal(dAtA []byte) error
func (*ComponentSpec) XXX_DiscardUnknown ¶
func (m *ComponentSpec) XXX_DiscardUnknown()
func (*ComponentSpec) XXX_Marshal ¶
func (m *ComponentSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*ComponentSpec) XXX_Merge ¶
func (m *ComponentSpec) XXX_Merge(src proto.Message)
func (*ComponentSpec) XXX_Size ¶
func (m *ComponentSpec) XXX_Size() int
func (*ComponentSpec) XXX_Unmarshal ¶
func (m *ComponentSpec) XXX_Unmarshal(b []byte) error
type ComponentView ¶
type ComponentView struct { // +kubebuilder:validation:Optional Metric *MetricSpec `json:"metric,omitempty" protobuf:"bytes,1,opt,name=metric"` // +kubebuilder:validation:Optional Gauge *GaugeSpec `json:"gauge,omitempty" protobuf:"bytes,2,opt,name=gauge"` // +kubebuilder:validation:Optional Histogram *HistogramSpec `json:"histogram,omitempty" protobuf:"bytes,3,opt,name=histogram"` // +kubebuilder:validation:Optional Table *TableSpec `json:"table,omitempty" protobuf:"bytes,4,opt,name=table"` // +kubebuilder:validation:Optional LineChart *LineChartSpec `json:"lineChart,omitempty" protobuf:"bytes,5,opt,name=lineChart"` // +kubebuilder:validation:Optional BarChart *BarChartSpec `json:"barChart,omitempty" protobuf:"bytes,6,opt,name=barChart"` // +kubebuilder:validation:Optional ScatterChart *ScatterPlotSpec `json:"scatterPlot,omitempty" protobuf:"bytes,7,opt,name=scatterPlot"` }
Represent the view part of a component. Only one part should be specified per component
func (*ComponentView) DeepCopy ¶
func (in *ComponentView) DeepCopy() *ComponentView
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ComponentView.
func (*ComponentView) DeepCopyInto ¶
func (in *ComponentView) DeepCopyInto(out *ComponentView)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*ComponentView) Descriptor ¶
func (*ComponentView) Descriptor() ([]byte, []int)
func (*ComponentView) Marshal ¶
func (m *ComponentView) Marshal() (dAtA []byte, err error)
func (*ComponentView) MarshalToSizedBuffer ¶
func (m *ComponentView) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*ComponentView) ProtoMessage ¶
func (*ComponentView) ProtoMessage()
func (*ComponentView) Reset ¶
func (m *ComponentView) Reset()
func (*ComponentView) Size ¶
func (m *ComponentView) Size() (n int)
func (*ComponentView) String ¶
func (this *ComponentView) String() string
func (*ComponentView) Unmarshal ¶
func (m *ComponentView) Unmarshal(dAtA []byte) error
func (*ComponentView) XXX_DiscardUnknown ¶
func (m *ComponentView) XXX_DiscardUnknown()
func (*ComponentView) XXX_Marshal ¶
func (m *ComponentView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*ComponentView) XXX_Merge ¶
func (m *ComponentView) XXX_Merge(src proto.Message)
func (*ComponentView) XXX_Size ¶
func (m *ComponentView) XXX_Size() int
func (*ComponentView) XXX_Unmarshal ¶
func (m *ComponentView) XXX_Unmarshal(b []byte) error
type Correlation ¶
type Correlation struct { // The first feature name Feature1 string `json:"feature1" protobuf:"bytes,1,opt,name=feature1"` // The second feature name Feature2 string `json:"feature2" protobuf:"bytes,2,opt,name=feature2"` // The correlation value Value float64 `json:"value,omitempty" protobuf:"bytes,3,opt,name=value"` // How the value was calculated Method string `json:"method,omitempty" protobuf:"bytes,4,opt,name=method"` }
Correlation records the correlation between two features in a Dataset
func (*Correlation) DeepCopy ¶
func (in *Correlation) DeepCopy() *Correlation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Correlation.
func (*Correlation) DeepCopyInto ¶
func (in *Correlation) DeepCopyInto(out *Correlation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Correlation) Descriptor ¶
func (*Correlation) Descriptor() ([]byte, []int)
func (*Correlation) Marshal ¶
func (m *Correlation) Marshal() (dAtA []byte, err error)
func (*Correlation) MarshalToSizedBuffer ¶
func (m *Correlation) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*Correlation) ProtoMessage ¶
func (*Correlation) ProtoMessage()
func (*Correlation) Reset ¶
func (m *Correlation) Reset()
func (*Correlation) Size ¶
func (m *Correlation) Size() (n int)
func (*Correlation) String ¶
func (this *Correlation) String() string
func (*Correlation) Unmarshal ¶
func (m *Correlation) Unmarshal(dAtA []byte) error
func (*Correlation) XXX_DiscardUnknown ¶
func (m *Correlation) XXX_DiscardUnknown()
func (*Correlation) XXX_Marshal ¶
func (m *Correlation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*Correlation) XXX_Merge ¶
func (m *Correlation) XXX_Merge(src proto.Message)
func (*Correlation) XXX_Size ¶
func (m *Correlation) XXX_Size() int
func (*Correlation) XXX_Unmarshal ¶
func (m *Correlation) XXX_Unmarshal(b []byte) error
type CorrelationSpec ¶
type CorrelationSpec struct { // The minimum value of a computed correlation to be stored as a result // +kubebuilder:default:=50 // +kubebuilder:validation:Optional Cutoff *float64 `json:"cutoff,omitempty" protobuf:"bytes,1,opt,name=cutoff"` // The method to be used when computing correlations // +kubebuilder:default:="pearson" // +kubebuilder:validation:Optional Method *string `json:"method,omitempty" protobuf:"bytes,2,opt,name=method"` // The number of top correlations to be included in the correlation results // +kubebuilder:default:=10 // +kubebuilder:validation:Optional Top *int32 `json:"top,omitempty" protobuf:"varint,3,opt,name=top"` }
CorrelationSpec specifies how the correlations between features in a Dataset should be computed
func (*CorrelationSpec) DeepCopy ¶
func (in *CorrelationSpec) DeepCopy() *CorrelationSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CorrelationSpec.
func (*CorrelationSpec) DeepCopyInto ¶
func (in *CorrelationSpec) DeepCopyInto(out *CorrelationSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*CorrelationSpec) Descriptor ¶
func (*CorrelationSpec) Descriptor() ([]byte, []int)
func (*CorrelationSpec) Marshal ¶
func (m *CorrelationSpec) Marshal() (dAtA []byte, err error)
func (*CorrelationSpec) MarshalToSizedBuffer ¶
func (m *CorrelationSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*CorrelationSpec) ProtoMessage ¶
func (*CorrelationSpec) ProtoMessage()
func (*CorrelationSpec) Reset ¶
func (m *CorrelationSpec) Reset()
func (*CorrelationSpec) Size ¶
func (m *CorrelationSpec) Size() (n int)
func (*CorrelationSpec) String ¶
func (this *CorrelationSpec) String() string
func (*CorrelationSpec) Unmarshal ¶
func (m *CorrelationSpec) Unmarshal(dAtA []byte) error
func (*CorrelationSpec) XXX_DiscardUnknown ¶
func (m *CorrelationSpec) XXX_DiscardUnknown()
func (*CorrelationSpec) XXX_Marshal ¶
func (m *CorrelationSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*CorrelationSpec) XXX_Merge ¶
func (m *CorrelationSpec) XXX_Merge(src proto.Message)
func (*CorrelationSpec) XXX_Size ¶
func (m *CorrelationSpec) XXX_Size() int
func (*CorrelationSpec) XXX_Unmarshal ¶
func (m *CorrelationSpec) XXX_Unmarshal(b []byte) error
type CsvFileSpec ¶
type CsvFileSpec struct { // The character used to separate fields (by default, a comma) // +kubebuilder:default:="comma" // +kubebuilder:validation:Optional ColumnDelimiter *Delimiter `json:"columnDelimiter,omitempty" protobuf:"bytes,1,opt,name=columnDelimiter"` // The character used to signal the end of a row (by default, a newline \n) // +kubebuilder:default:="crlf" // +kubebuilder:validation:Optional RowDelimiter *Delimiter `json:"rowDelimiter,omitempty" protobuf:"bytes,2,opt,name=rowDelimiter"` // The charcter used for quotes (by default, a double quote ") // +kubebuilder:default:="double-quote" // +kubebuilder:validation:Optional QuoteChar QuoteChar `json:"quote,omitempty" protobuf:"bytes,3,opt,name=quote"` // The character used to escape the delimiter // +kubebuilder:default:="none" // +kubebuilder:validation:Optional EscapeChar EscapeChar `json:"escapeChar,omitempty" protobuf:"bytes,4,opt,name=escapeChar"` // The comment character used to split comments off the end of lines (by default, a hashtag #) // +kubebuilder:default:="#" // +kubebuilder:validation:Optional CommentChars *string `json:"commentChars,omitempty" protobuf:"bytes,5,opt,name=commentChars"` // Indicates if a header is present in the file // +kubebuilder:default:=true // +kubebuilder:validation:Optional Header *bool `json:"header,omitempty" protobuf:"varint,6,opt,name=header"` // The number of rows to skip from the top of the file // +kubebuilder:default:=0 // +kubebuilder:validation:Minimum=0 // +kubebuilder:validation:Optional SkipRows *int32 `json:"skipRows,omitempty" protobuf:"varint,7,opt,name=skipRows"` // NullValues is a sequence of values to replace with NA. // +kubebuilder:default:="" // +kubebuilder:validation:Optional NullValues *string `json:"nullValues,omitempty" protobuf:"bytes,8,opt,name=nullValues"` // The unicode encoding of the file (e.g. 'utf-8' for UTF-8 encoded text) // +kubebuilder:default:="utf-8" // +kubebuilder:validation:Optional Encoding *catalog.FileEncoding `json:"encoding,omitempty" protobuf:"bytes,9,opt,name=encoding"` // The maximum number of rows to read // +kubebuilder:validation:Minimum=0 // +kubebuilder:validation:Optional MaxRows *int32 `json:"maxRows,omitempty" protobuf:"varint,10,opt,name=maxRows"` // Indicates if the read of the CSV file should fail if there are any errors // +kubebuilder:default:=true // +kubebuilder:validation:Optional Strict *bool `json:"strict,omitempty" protobuf:"varint,11,opt,name=strict"` // The compression type, if the file is compressed // +kubebuilder:default:="none" // +kubebuilder:validation:Optional Compression *string `json:"compression,omitempty" protobuf:"bytes,12,opt,name=compression"` // Indicates if the file contains an index column // +kubebuilder:default:=false // +kubebuilder:validation:Optional HasIndexColumn *bool `json:"hasIndexColumn,omitempty" protobuf:"varint,13,opt,name=hasIndexColumn"` }
CsvFileSpec specifies the format of a CSV (comma-separated values) file
func (*CsvFileSpec) DeepCopy ¶
func (in *CsvFileSpec) DeepCopy() *CsvFileSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CsvFileSpec.
func (*CsvFileSpec) DeepCopyInto ¶
func (in *CsvFileSpec) DeepCopyInto(out *CsvFileSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*CsvFileSpec) Descriptor ¶
func (*CsvFileSpec) Descriptor() ([]byte, []int)
func (*CsvFileSpec) Marshal ¶
func (m *CsvFileSpec) Marshal() (dAtA []byte, err error)
func (*CsvFileSpec) MarshalToSizedBuffer ¶
func (m *CsvFileSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*CsvFileSpec) ProtoMessage ¶
func (*CsvFileSpec) ProtoMessage()
func (*CsvFileSpec) Reset ¶
func (m *CsvFileSpec) Reset()
func (*CsvFileSpec) Size ¶
func (m *CsvFileSpec) Size() (n int)
func (*CsvFileSpec) String ¶
func (this *CsvFileSpec) String() string
func (*CsvFileSpec) Unmarshal ¶
func (m *CsvFileSpec) Unmarshal(dAtA []byte) error
func (*CsvFileSpec) XXX_DiscardUnknown ¶
func (m *CsvFileSpec) XXX_DiscardUnknown()
func (*CsvFileSpec) XXX_Marshal ¶
func (m *CsvFileSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*CsvFileSpec) XXX_Merge ¶
func (m *CsvFileSpec) XXX_Merge(src proto.Message)
func (*CsvFileSpec) XXX_Size ¶
func (m *CsvFileSpec) XXX_Size() int
func (*CsvFileSpec) XXX_Unmarshal ¶
func (m *CsvFileSpec) XXX_Unmarshal(b []byte) error
type DataInputSpec ¶ added in v0.4.649
type DataInputSpec struct { // List of SQL statements to run before performing the prediction // +kubebuilder:validation:Optional PreSql []string `json:"preSQL,omitempty" protobuf:"bytes,1,opt,name=preSQL"` // The physical location of the observation file. // the location can point to a database. // +kubebuilder:validation:Optional Observation *DataLocation `json:"observation,omitempty" protobuf:"bytes,2,opt,name=observation"` // The file format of the dataset, if applicable // +kubebuilder:default:="csv" // +kubebuilder:validation:Optional Format *FlatFileType `json:"format,omitempty" protobuf:"bytes,3,opt,name=format"` }
DataInputSpec specifies the format and location of an input dataset
func (*DataInputSpec) DeepCopy ¶ added in v0.4.649
func (in *DataInputSpec) DeepCopy() *DataInputSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataInputSpec.
func (*DataInputSpec) DeepCopyInto ¶ added in v0.4.649
func (in *DataInputSpec) DeepCopyInto(out *DataInputSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataInputSpec) Descriptor ¶ added in v0.4.649
func (*DataInputSpec) Descriptor() ([]byte, []int)
func (*DataInputSpec) Marshal ¶ added in v0.4.649
func (m *DataInputSpec) Marshal() (dAtA []byte, err error)
func (*DataInputSpec) MarshalTo ¶ added in v0.4.649
func (m *DataInputSpec) MarshalTo(dAtA []byte) (int, error)
func (*DataInputSpec) MarshalToSizedBuffer ¶ added in v0.4.649
func (m *DataInputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataInputSpec) ProtoMessage ¶ added in v0.4.649
func (*DataInputSpec) ProtoMessage()
func (*DataInputSpec) Reset ¶ added in v0.4.649
func (m *DataInputSpec) Reset()
func (*DataInputSpec) Size ¶ added in v0.4.649
func (m *DataInputSpec) Size() (n int)
func (*DataInputSpec) String ¶ added in v0.4.649
func (this *DataInputSpec) String() string
func (*DataInputSpec) Unmarshal ¶ added in v0.4.649
func (m *DataInputSpec) Unmarshal(dAtA []byte) error
func (*DataInputSpec) XXX_DiscardUnknown ¶ added in v0.4.649
func (m *DataInputSpec) XXX_DiscardUnknown()
func (*DataInputSpec) XXX_Marshal ¶ added in v0.4.649
func (m *DataInputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataInputSpec) XXX_Merge ¶ added in v0.4.649
func (m *DataInputSpec) XXX_Merge(src proto.Message)
func (*DataInputSpec) XXX_Size ¶ added in v0.4.649
func (m *DataInputSpec) XXX_Size() int
func (*DataInputSpec) XXX_Unmarshal ¶ added in v0.4.649
func (m *DataInputSpec) XXX_Unmarshal(b []byte) error
type DataLocation ¶
type DataLocation struct { // The type of location where the data resides, which can either be an object inside an object storage system (i.e. Minio), a SQL location // like a table or a view, a data stream (i.e. Kafka, currently unsupported), or a web location (currently unsupported) // +kubebuilder:default:="object" // +kubebuilder:validation:Optional Type *DataLocationType `json:"type,omitempty" protobuf:"bytes,1,opt,name=type"` // In the case of the type of location being a database, ConnectionName specifies the name of the Connection resource // that exists in the same tenant as the resource specifying the DataLocation. Modela will attempt to connect // to the database using the credentials specified in the Connection, and will execute the query specified by the SQL field // +kubebuilder:default:="" // +kubebuilder:validation:Optional ConnectionName *string `json:"connectionName,omitempty" protobuf:"bytes,2,opt,name=connectionName"` // In the case of the location type being an object storage system, BucketName is the name of the VirtualBucket resource // that exists in the same tenant as the resource specifying the DataLocation. Modela will connect to the external // object storage system, and will access the file from the path specified by the Path field // +kubebuilder:default:="" // +kubebuilder:validation:Optional BucketName *string `json:"bucketName,omitempty" protobuf:"bytes,3,opt,name=bucketName"` // The path to a flat-file inside an object storage system. When using the Modela API to upload files (through the // FileService API), Modela will upload the data to a predetermined path based on the Tenant, DataProduct, // DataProductVersion, and resource type of the resource in relation to the file being uploaded. // The path does not need to adhere to this format; you can give the path to a file inside a bucket not managed by Modela // +kubebuilder:default:="" // +kubebuilder:validation:Optional Path *string `json:"path,omitempty" protobuf:"bytes,4,opt,name=path"` // The name of a table inside a database, if applicable // +kubebuilder:default:="" // +kubebuilder:validation:Optional Table *string `json:"table,omitempty" protobuf:"bytes,5,opt,name=table"` // The name of a database inside the database system specified by the ConnectionName field // +kubebuilder:default:="" // +kubebuilder:validation:Optional Database *string `json:"database,omitempty" protobuf:"bytes,6,opt,name=database"` // The SQL statement which will be executed to query data from the table specified by Table // +kubebuilder:default:="" // +kubebuilder:validation:Optional Sql *string `json:"sql,omitempty" protobuf:"bytes,7,opt,name=sql"` // The name of the streaming topic (currently unsupported) // +kubebuilder:default:="" // +kubebuilder:validation:Optional Topic *string `json:"topic,omitempty" protobuf:"bytes,8,opt,name=topic"` // In the case of the location type being WebApi, URL specifies the external location (HTTP or Git) that will be queried // and then stored as flat-file by the resource which specifies the DataLocation // +kubebuilder:default:="" // +kubebuilder:validation:Optional URL *string `json:"url,omitempty" protobuf:"bytes,9,opt,name=url"` // In the case of the location type being Dataset or PublicDataset, ResourceRef references another resource that // containing data that will be used as a data source // +kubebuilder:validation:Optional ResourceRef *v1.ObjectReference `json:"resourceRef,omitempty" protobuf:"bytes,10,opt,name=resourceRef"` }
DataLocation describes the external location of data that will be accessed by Modela, and additional information on how to query the data if the location is a non flat-file source.
func (*DataLocation) DeepCopy ¶
func (in *DataLocation) DeepCopy() *DataLocation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataLocation.
func (*DataLocation) DeepCopyInto ¶
func (in *DataLocation) DeepCopyInto(out *DataLocation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataLocation) Descriptor ¶
func (*DataLocation) Descriptor() ([]byte, []int)
func (*DataLocation) Marshal ¶
func (m *DataLocation) Marshal() (dAtA []byte, err error)
func (*DataLocation) MarshalToSizedBuffer ¶
func (m *DataLocation) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataLocation) ProtoMessage ¶
func (*DataLocation) ProtoMessage()
func (*DataLocation) Reset ¶
func (m *DataLocation) Reset()
func (*DataLocation) Size ¶
func (m *DataLocation) Size() (n int)
func (*DataLocation) String ¶
func (this *DataLocation) String() string
func (*DataLocation) Unmarshal ¶
func (m *DataLocation) Unmarshal(dAtA []byte) error
func (DataLocation) Validate ¶ added in v0.5.515
func (loc DataLocation) Validate(field string) ([]metav1.StatusCause, bool)
func (*DataLocation) XXX_DiscardUnknown ¶
func (m *DataLocation) XXX_DiscardUnknown()
func (*DataLocation) XXX_Marshal ¶
func (m *DataLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataLocation) XXX_Merge ¶
func (m *DataLocation) XXX_Merge(src proto.Message)
func (*DataLocation) XXX_Size ¶
func (m *DataLocation) XXX_Size() int
func (*DataLocation) XXX_Unmarshal ¶
func (m *DataLocation) XXX_Unmarshal(b []byte) error
type DataLocationType ¶
type DataLocationType string
+kubebuilder:validation:Enum="object";"table";"view";"stream";"web";"public-dataset";"dataset"
const ( DataLocationObjectStorage DataLocationType = "object" DataLocationSQLTable DataLocationType = "table" DataLocationSQLView DataLocationType = "view" DataLocationStream DataLocationType = "stream" DataLocationWebApi DataLocationType = "web" DataLocationPublicDataset DataLocationType = "public-dataset" // The data reside in a public dataset DataLocationDataset DataLocationType = "dataset" // The data reside inside another dataset )
type DataOutputSpec ¶
type DataOutputSpec struct { // DatasetName is the name of a new Dataset resource that will be created. // If empty, the system will save the dataset to a file // +kubebuilder:default:="" // +kubebuilder:validation:Optional DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"` // The location where the dataset will be stored // +kubebuilder:validation:Optional Location *DataLocation `json:"location,omitempty" protobuf:"bytes,2,opt,name=location"` // The format of the dataset, applicable if the output location is a flat-file // +kubebuilder:default:="csv" // +kubebuilder:validation:Optional Format *FlatFileType `json:"format,omitempty" protobuf:"bytes,3,opt,name=format"` // The update strategy for the dataset in the case that the output location already exists (i.e a database table) // UpsertUpdateStrategy will insert new records and update existing ones; // InsertUpdateStrategy will insert new records and not update existing ones; // UpdateUpdateStrategy will not insert new records and only update existing ones // +kubebuilder:default:="upsert" // +kubebuilder:validation:Optional Action *catalog.UpdateStrategy `json:"action,omitempty" protobuf:"bytes,4,opt,name=action"` // If true, the database table specified by Location will be created if it does not exist // +kubebuilder:validation:Optional CreateTableIfNotExist *bool `json:"createTableIfNotExist,omitempty" protobuf:"varint,5,opt,name=createTableIfNotExist"` // Indicates if the features (i.e. all the columns) of the input dataset, excluding the // feature which was predicted on, will be included in the output dataset // +kubebuilder:default:=true // +kubebuilder:validation:Optional IncludeFeatures *bool `json:"includeFeatures,omitempty" protobuf:"varint,6,opt,name=includeFeatures"` // If true, SHAP values for each predicted row will be included as JSON as an additional column of the dataset // +kubebuilder:default:=false // +kubebuilder:validation:Optional XAI *bool `json:"xai,omitempty" protobuf:"varint,7,opt,name=xai"` // If true, an additional column will be added to the dataset which // indicates if each predicted row was detected to be an outlier // +kubebuilder:default:=false // +kubebuilder:validation:Optional DetectOutliers *bool `json:"detectOutliers,omitempty" protobuf:"varint,8,opt,name=detectOutliers"` // List of SQL statements to run after the prediction was performed // +kubebuilder:validation:Optional PostSQL []string `json:"postSQL,omitempty" protobuf:"bytes,9,opt,name=postSQL"` }
DataOutputSpec specifies the format, features, and output location of a transformed dataset
func (*DataOutputSpec) DeepCopy ¶
func (in *DataOutputSpec) DeepCopy() *DataOutputSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataOutputSpec.
func (*DataOutputSpec) DeepCopyInto ¶
func (in *DataOutputSpec) DeepCopyInto(out *DataOutputSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataOutputSpec) Descriptor ¶
func (*DataOutputSpec) Descriptor() ([]byte, []int)
func (*DataOutputSpec) Marshal ¶
func (m *DataOutputSpec) Marshal() (dAtA []byte, err error)
func (*DataOutputSpec) MarshalToSizedBuffer ¶
func (m *DataOutputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataOutputSpec) ProtoMessage ¶
func (*DataOutputSpec) ProtoMessage()
func (*DataOutputSpec) Reset ¶
func (m *DataOutputSpec) Reset()
func (*DataOutputSpec) Size ¶
func (m *DataOutputSpec) Size() (n int)
func (*DataOutputSpec) String ¶
func (this *DataOutputSpec) String() string
func (*DataOutputSpec) Unmarshal ¶
func (m *DataOutputSpec) Unmarshal(dAtA []byte) error
func (*DataOutputSpec) XXX_DiscardUnknown ¶
func (m *DataOutputSpec) XXX_DiscardUnknown()
func (*DataOutputSpec) XXX_Marshal ¶
func (m *DataOutputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataOutputSpec) XXX_Merge ¶
func (m *DataOutputSpec) XXX_Merge(src proto.Message)
func (*DataOutputSpec) XXX_Size ¶
func (m *DataOutputSpec) XXX_Size() int
func (*DataOutputSpec) XXX_Unmarshal ¶
func (m *DataOutputSpec) XXX_Unmarshal(b []byte) error
type DataPipeline ¶
type DataPipeline struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Spec DataPipelineSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"` //+optional Status DataPipelineStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"` }
+kubebuilder:object:root=true +kubebuilder:resource:path=datapipelines,singular=datapipeline,shortName="dpl",categories={data,modela} +kubebuilder:subresource:status +kubebuilder:storageversion +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description",description="" +kubebuilder:printcolumn:name="Schedule",type="string",JSONPath=".spec.schedule",description="" +kubebuilder:printcolumn:name="Last Run",type="date",JSONPath=".status.lastRun.at",description="" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" DataPipeline represents the ETL flow from the data sources to a processed dataset, ready for training.
func (*DataPipeline) AddFinalizer ¶
func (wr *DataPipeline) AddFinalizer()
func (*DataPipeline) CreateOrUpdateCond ¶
func (wr *DataPipeline) CreateOrUpdateCond(cond metav1.Condition)
Merge or update condition
func (*DataPipeline) DeepCopy ¶
func (in *DataPipeline) DeepCopy() *DataPipeline
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipeline.
func (*DataPipeline) DeepCopyInto ¶
func (in *DataPipeline) DeepCopyInto(out *DataPipeline)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataPipeline) DeepCopyObject ¶
func (in *DataPipeline) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DataPipeline) Default ¶
func (wr *DataPipeline) Default()
func (*DataPipeline) Descriptor ¶
func (*DataPipeline) Descriptor() ([]byte, []int)
func (DataPipeline) GetCondIdx ¶
func (wr DataPipeline) GetCondIdx(t string) int
func (DataPipeline) HasFinalizer ¶
func (wr DataPipeline) HasFinalizer() bool
func (DataPipeline) IsReady ¶
func (w DataPipeline) IsReady() bool
func (DataPipeline) IsSaved ¶
func (w DataPipeline) IsSaved() bool
func (DataPipeline) ManifestURI ¶ added in v0.5.472
func (wr DataPipeline) ManifestURI() string
func (*DataPipeline) MarkFailed ¶
func (in *DataPipeline) MarkFailed(err error)
func (*DataPipeline) MarkReady ¶
func (in *DataPipeline) MarkReady()
func (*DataPipeline) MarkSaved ¶
func (in *DataPipeline) MarkSaved()
func (*DataPipeline) Marshal ¶
func (m *DataPipeline) Marshal() (dAtA []byte, err error)
func (*DataPipeline) MarshalToSizedBuffer ¶
func (m *DataPipeline) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataPipeline) ProtoMessage ¶
func (*DataPipeline) ProtoMessage()
func (*DataPipeline) RemoveFinalizer ¶
func (wr *DataPipeline) RemoveFinalizer()
func (*DataPipeline) Reset ¶
func (m *DataPipeline) Reset()
func (DataPipeline) RootURI ¶ added in v0.5.472
func (wr DataPipeline) RootURI() string
func (*DataPipeline) SetupWebhookWithManager ¶
func (wr *DataPipeline) SetupWebhookWithManager(mgr ctrl.Manager) error
func (*DataPipeline) Size ¶
func (m *DataPipeline) Size() (n int)
func (*DataPipeline) String ¶
func (this *DataPipeline) String() string
func (*DataPipeline) Unmarshal ¶
func (m *DataPipeline) Unmarshal(dAtA []byte) error
func (*DataPipeline) UpdateRunStatus ¶ added in v0.4.612
func (in *DataPipeline) UpdateRunStatus(run DataPipelineRun)
func (DataPipeline) ValidateCreate ¶
func (wr DataPipeline) ValidateCreate() error
ValidateCreate implements webhook.Validator so a webhook will be registered for the type
func (DataPipeline) ValidateDelete ¶
func (wr DataPipeline) ValidateDelete() error
func (DataPipeline) ValidateUpdate ¶
func (wr DataPipeline) ValidateUpdate(old runtime.Object) error
ValidateUpdate implements webhook.Validator so a webhook will be registered for the type
func (*DataPipeline) XXX_DiscardUnknown ¶
func (m *DataPipeline) XXX_DiscardUnknown()
func (*DataPipeline) XXX_Marshal ¶
func (m *DataPipeline) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataPipeline) XXX_Merge ¶
func (m *DataPipeline) XXX_Merge(src proto.Message)
func (*DataPipeline) XXX_Size ¶
func (m *DataPipeline) XXX_Size() int
func (*DataPipeline) XXX_Unmarshal ¶
func (m *DataPipeline) XXX_Unmarshal(b []byte) error
type DataPipelineConditionType ¶
type DataPipelineConditionType string
Condition of the data pipeline
type DataPipelineList ¶
type DataPipelineList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"` Items []DataPipeline `json:"items" protobuf:"bytes,2,rep,name=items"` }
+kubebuilder:object:root=true DataPipelineList contain the list of DataPipeline
func (*DataPipelineList) DeepCopy ¶
func (in *DataPipelineList) DeepCopy() *DataPipelineList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineList.
func (*DataPipelineList) DeepCopyInto ¶
func (in *DataPipelineList) DeepCopyInto(out *DataPipelineList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataPipelineList) DeepCopyObject ¶
func (in *DataPipelineList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DataPipelineList) Descriptor ¶
func (*DataPipelineList) Descriptor() ([]byte, []int)
func (*DataPipelineList) Marshal ¶
func (m *DataPipelineList) Marshal() (dAtA []byte, err error)
func (*DataPipelineList) MarshalToSizedBuffer ¶
func (m *DataPipelineList) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataPipelineList) ProtoMessage ¶
func (*DataPipelineList) ProtoMessage()
func (*DataPipelineList) Reset ¶
func (m *DataPipelineList) Reset()
func (*DataPipelineList) Size ¶
func (m *DataPipelineList) Size() (n int)
func (*DataPipelineList) String ¶
func (this *DataPipelineList) String() string
func (*DataPipelineList) Unmarshal ¶
func (m *DataPipelineList) Unmarshal(dAtA []byte) error
func (*DataPipelineList) XXX_DiscardUnknown ¶
func (m *DataPipelineList) XXX_DiscardUnknown()
func (*DataPipelineList) XXX_Marshal ¶
func (m *DataPipelineList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataPipelineList) XXX_Merge ¶
func (m *DataPipelineList) XXX_Merge(src proto.Message)
func (*DataPipelineList) XXX_Size ¶
func (m *DataPipelineList) XXX_Size() int
func (*DataPipelineList) XXX_Unmarshal ¶
func (m *DataPipelineList) XXX_Unmarshal(b []byte) error
type DataPipelineRun ¶
type DataPipelineRun struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Spec DataPipelineRunSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"` //+optional Status DataPipelineRunStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"` }
+kubebuilder:object:root=true +kubebuilder:resource:path=datapipelineruns,singular=datapipelinerun,shortName="dpr",categories={data,modela,all} +kubebuilder:subresource:status +kubebuilder:storageversion +kubebuilder:printcolumn:name="Status",type="string",JSONPath=".status.phase" +kubebuilder:printcolumn:name="Progress",type="string",JSONPath=".status.progress",priority=1 +kubebuilder:printcolumn:name="Pipeline",type="string",JSONPath=".spec.datapipelineName" +kubebuilder:printcolumn:name="StartedAt",type="date",JSONPath=".status.startTime",priority=1 +kubebuilder:printcolumn:name="CompletedAt",type="date",JSONPath=".status.completionTime",priority=1 +kubebuilder:printcolumn:name="Failure",type="string",JSONPath=".metadata.failureMessage" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp" DataPipelineRun represent one execution of the data pipeline
func (DataPipelineRun) Aborted ¶
func (in DataPipelineRun) Aborted() bool
func (*DataPipelineRun) AddFinalizer ¶
func (run *DataPipelineRun) AddFinalizer()
func (DataPipelineRun) CompletionAlert ¶ added in v0.4.601
func (run DataPipelineRun) CompletionAlert(tenantRef *v1.ObjectReference, notifierName *string) *infra.Alert
Generate a dataset completion alert
func (*DataPipelineRun) CreateOrUpdateCond ¶
func (run *DataPipelineRun) CreateOrUpdateCond(cond metav1.Condition)
Merge or update condition
func (*DataPipelineRun) DeepCopy ¶
func (in *DataPipelineRun) DeepCopy() *DataPipelineRun
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRun.
func (*DataPipelineRun) DeepCopyInto ¶
func (in *DataPipelineRun) DeepCopyInto(out *DataPipelineRun)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataPipelineRun) DeepCopyObject ¶
func (in *DataPipelineRun) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DataPipelineRun) Default ¶
func (run *DataPipelineRun) Default()
func (*DataPipelineRun) Descriptor ¶
func (*DataPipelineRun) Descriptor() ([]byte, []int)
func (DataPipelineRun) ErrorAlert ¶ added in v0.4.601
func (run DataPipelineRun) ErrorAlert(tenantRef *v1.ObjectReference, notifierName *string, err error) *infra.Alert
func (DataPipelineRun) GetCondIdx ¶
func (run DataPipelineRun) GetCondIdx(t string) int
func (DataPipelineRun) HasFinalizer ¶
func (run DataPipelineRun) HasFinalizer() bool
func (DataPipelineRun) IsCompleted ¶
func (in DataPipelineRun) IsCompleted() bool
func (DataPipelineRun) IsFailed ¶
func (in DataPipelineRun) IsFailed() bool
func (DataPipelineRun) IsReady ¶
func (w DataPipelineRun) IsReady() bool
func (DataPipelineRun) IsRunning ¶
func (in DataPipelineRun) IsRunning() bool
func (DataPipelineRun) IsSaved ¶
func (w DataPipelineRun) IsSaved() bool
func (DataPipelineRun) ManifestURI ¶ added in v0.5.472
func (run DataPipelineRun) ManifestURI() string
func (*DataPipelineRun) MarkAborted ¶ added in v0.4.607
func (in *DataPipelineRun) MarkAborted(err error)
func (*DataPipelineRun) MarkComplete ¶
func (in *DataPipelineRun) MarkComplete()
func (*DataPipelineRun) MarkFailed ¶
func (in *DataPipelineRun) MarkFailed(err error)
func (*DataPipelineRun) MarkRunning ¶
func (r *DataPipelineRun) MarkRunning()
func (*DataPipelineRun) MarkSaved ¶
func (in *DataPipelineRun) MarkSaved()
func (*DataPipelineRun) Marshal ¶
func (m *DataPipelineRun) Marshal() (dAtA []byte, err error)
func (*DataPipelineRun) MarshalToSizedBuffer ¶
func (m *DataPipelineRun) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (DataPipelineRun) Paused ¶
func (in DataPipelineRun) Paused() bool
func (*DataPipelineRun) ProtoMessage ¶
func (*DataPipelineRun) ProtoMessage()
func (*DataPipelineRun) RemoveFinalizer ¶
func (run *DataPipelineRun) RemoveFinalizer()
func (*DataPipelineRun) Reset ¶
func (m *DataPipelineRun) Reset()
func (DataPipelineRun) RootURI ¶ added in v0.5.472
func (run DataPipelineRun) RootURI() string
func (DataPipelineRun) RunStatus ¶ added in v0.4.614
func (run DataPipelineRun) RunStatus() *catalog.LastRunStatus
Return the state of the run as RunStatus
func (*DataPipelineRun) SetupWebhookWithManager ¶
func (wr *DataPipelineRun) SetupWebhookWithManager(mgr ctrl.Manager) error
func (*DataPipelineRun) Size ¶
func (m *DataPipelineRun) Size() (n int)
func (DataPipelineRun) StatusString ¶
func (run DataPipelineRun) StatusString() string
func (*DataPipelineRun) String ¶
func (this *DataPipelineRun) String() string
func (*DataPipelineRun) Unmarshal ¶
func (m *DataPipelineRun) Unmarshal(dAtA []byte) error
func (DataPipelineRun) ValidateCreate ¶
func (run DataPipelineRun) ValidateCreate() error
ValidateCreate implements webhook.Validator so a webhook will be registered for the type
func (DataPipelineRun) ValidateDelete ¶
func (run DataPipelineRun) ValidateDelete() error
func (DataPipelineRun) ValidateUpdate ¶
func (run DataPipelineRun) ValidateUpdate(old runtime.Object) error
ValidateUpdate implements webhook.Validator so a webhook will be registered for the type
func (*DataPipelineRun) XXX_DiscardUnknown ¶
func (m *DataPipelineRun) XXX_DiscardUnknown()
func (*DataPipelineRun) XXX_Marshal ¶
func (m *DataPipelineRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataPipelineRun) XXX_Merge ¶
func (m *DataPipelineRun) XXX_Merge(src proto.Message)
func (*DataPipelineRun) XXX_Size ¶
func (m *DataPipelineRun) XXX_Size() int
func (*DataPipelineRun) XXX_Unmarshal ¶
func (m *DataPipelineRun) XXX_Unmarshal(b []byte) error
type DataPipelineRunCondition ¶
type DataPipelineRunCondition struct { // Type of account condition. Type DataPipelineRunConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=DataPipelineRunConditionType"` // Status of the condition, one of True, False, Unknown. Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"` // Last time the condition transitioned from one status to another. LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,4,opt,name=lastTransitionTime"` // The reason for the condition's last transition. Reason string `json:"reason,omitempty" protobuf:"bytes,5,opt,name=reason"` // A human readable message indicating details about the transition. Message string `json:"message,omitempty" protobuf:"bytes,6,opt,name=message"` }
DataPipelineRunCondition describes the state of a data processor run at a certain point.
func (*DataPipelineRunCondition) DeepCopy ¶
func (in *DataPipelineRunCondition) DeepCopy() *DataPipelineRunCondition
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRunCondition.
func (*DataPipelineRunCondition) DeepCopyInto ¶
func (in *DataPipelineRunCondition) DeepCopyInto(out *DataPipelineRunCondition)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataPipelineRunCondition) Descriptor ¶
func (*DataPipelineRunCondition) Descriptor() ([]byte, []int)
func (*DataPipelineRunCondition) Marshal ¶
func (m *DataPipelineRunCondition) Marshal() (dAtA []byte, err error)
func (*DataPipelineRunCondition) MarshalTo ¶
func (m *DataPipelineRunCondition) MarshalTo(dAtA []byte) (int, error)
func (*DataPipelineRunCondition) MarshalToSizedBuffer ¶
func (m *DataPipelineRunCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataPipelineRunCondition) ProtoMessage ¶
func (*DataPipelineRunCondition) ProtoMessage()
func (*DataPipelineRunCondition) Reset ¶
func (m *DataPipelineRunCondition) Reset()
func (*DataPipelineRunCondition) Size ¶
func (m *DataPipelineRunCondition) Size() (n int)
func (*DataPipelineRunCondition) String ¶
func (this *DataPipelineRunCondition) String() string
func (*DataPipelineRunCondition) Unmarshal ¶
func (m *DataPipelineRunCondition) Unmarshal(dAtA []byte) error
func (*DataPipelineRunCondition) XXX_DiscardUnknown ¶
func (m *DataPipelineRunCondition) XXX_DiscardUnknown()
func (*DataPipelineRunCondition) XXX_Marshal ¶
func (m *DataPipelineRunCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataPipelineRunCondition) XXX_Merge ¶
func (m *DataPipelineRunCondition) XXX_Merge(src proto.Message)
func (*DataPipelineRunCondition) XXX_Size ¶
func (m *DataPipelineRunCondition) XXX_Size() int
func (*DataPipelineRunCondition) XXX_Unmarshal ¶
func (m *DataPipelineRunCondition) XXX_Unmarshal(b []byte) error
type DataPipelineRunConditionType ¶
type DataPipelineRunConditionType string
Condition on the dataset
type DataPipelineRunList ¶
type DataPipelineRunList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"` Items []DataPipelineRun `json:"items" protobuf:"bytes,2,rep,name=items"` }
+kubebuilder:object:root=true DataPipelineRunList contains a list of DataPipelineRun
func (*DataPipelineRunList) DeepCopy ¶
func (in *DataPipelineRunList) DeepCopy() *DataPipelineRunList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRunList.
func (*DataPipelineRunList) DeepCopyInto ¶
func (in *DataPipelineRunList) DeepCopyInto(out *DataPipelineRunList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataPipelineRunList) DeepCopyObject ¶
func (in *DataPipelineRunList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DataPipelineRunList) Descriptor ¶
func (*DataPipelineRunList) Descriptor() ([]byte, []int)
func (*DataPipelineRunList) Marshal ¶
func (m *DataPipelineRunList) Marshal() (dAtA []byte, err error)
func (*DataPipelineRunList) MarshalTo ¶
func (m *DataPipelineRunList) MarshalTo(dAtA []byte) (int, error)
func (*DataPipelineRunList) MarshalToSizedBuffer ¶
func (m *DataPipelineRunList) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataPipelineRunList) ProtoMessage ¶
func (*DataPipelineRunList) ProtoMessage()
func (*DataPipelineRunList) Reset ¶
func (m *DataPipelineRunList) Reset()
func (*DataPipelineRunList) Size ¶
func (m *DataPipelineRunList) Size() (n int)
func (*DataPipelineRunList) String ¶
func (this *DataPipelineRunList) String() string
func (*DataPipelineRunList) Unmarshal ¶
func (m *DataPipelineRunList) Unmarshal(dAtA []byte) error
func (*DataPipelineRunList) XXX_DiscardUnknown ¶
func (m *DataPipelineRunList) XXX_DiscardUnknown()
func (*DataPipelineRunList) XXX_Marshal ¶
func (m *DataPipelineRunList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataPipelineRunList) XXX_Merge ¶
func (m *DataPipelineRunList) XXX_Merge(src proto.Message)
func (*DataPipelineRunList) XXX_Size ¶
func (m *DataPipelineRunList) XXX_Size() int
func (*DataPipelineRunList) XXX_Unmarshal ¶
func (m *DataPipelineRunList) XXX_Unmarshal(b []byte) error
type DataPipelineRunPhase ¶
type DataPipelineRunPhase string
const ( DataPipelineRunPhasePending DataPipelineRunPhase = "Pending" DataPipelineRunPhaseRunning DataPipelineRunPhase = "Running" DataPipelineRunPhaseCompleted DataPipelineRunPhase = "Completed" DataPipelineRunPhaseFailed DataPipelineRunPhase = "Failed" DataPipelineRunPhaseAborted DataPipelineRunPhase = "Aborted" DataPipelineRunPhasePaused DataPipelineRunPhase = "Paused" )
type DataPipelineRunSpec ¶
type DataPipelineRunSpec struct { // The data product version of the run // +kubebuilder:default:="" // +kubebuilder:validation:Optional VersionName *string `json:"versionName,omitempty" protobuf:"bytes,1,opt,name=versionName"` // The data product // +kubebuilder:default:="" // +kubebuilder:validation:Optional DataPipelineName *string `json:"datapipelineName,omitempty" protobuf:"bytes,2,opt,name=datapipelineName"` // The owner of the run, set to the owner of the pipeline // +kubebuilder:default:="no-one" // +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*" // +kubebuilder:validation:Optional Owner *string `json:"owner,omitempty" protobuf:"bytes,4,opt,name=owner"` // Specify the resources for the data pipeline run // +kubebuilder:validation:Optional Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,5,opt,name=resources"` // The priority of this data pipeline. The default is medium. // +kubebuilder:default:=medium // +kubebuilder:validation:Optional Priority *catalog.PriorityLevel `json:"priority,omitempty" protobuf:"bytes,6,opt,name=priority"` // Set to true to pause the pipeline run // +kubebuilder:default:=false // +kubebuilder:validation:Optional Paused *bool `json:"paused,omitempty" protobuf:"varint,7,opt,name=paused"` // Set to true to abort the pipeline run // +kubebuilder:default:=false // +kubebuilder:validation:Optional Aborted *bool `json:"aborted,omitempty" protobuf:"varint,8,opt,name=aborted"` // The Lab where the data pipeline run. // +kubebuilder:validation:Optional LabRef v1.ObjectReference `json:"labRef,omitempty" protobuf:"bytes,9,opt,name=labRef"` // The model class for this data pipeline run if the dataset was created by a model class // +kubebuilder:validation:Optional ModelClassName *string `json:"modelClassName,omitempty" protobuf:"bytes,10,opt,name=modelClassName"` // If this report was created by a data pipeline run, this is the run name // +kubebuilder:validation:Optional ModelClassRunName *string `json:"modelClassRunName,omitempty" protobuf:"bytes,11,opt,name=modelClassRunName"` }
DataPipelineRunSpec defines the desired state of a schema
func (*DataPipelineRunSpec) DeepCopy ¶
func (in *DataPipelineRunSpec) DeepCopy() *DataPipelineRunSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRunSpec.
func (*DataPipelineRunSpec) DeepCopyInto ¶
func (in *DataPipelineRunSpec) DeepCopyInto(out *DataPipelineRunSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataPipelineRunSpec) Descriptor ¶
func (*DataPipelineRunSpec) Descriptor() ([]byte, []int)
func (*DataPipelineRunSpec) Marshal ¶
func (m *DataPipelineRunSpec) Marshal() (dAtA []byte, err error)
func (*DataPipelineRunSpec) MarshalTo ¶
func (m *DataPipelineRunSpec) MarshalTo(dAtA []byte) (int, error)
func (*DataPipelineRunSpec) MarshalToSizedBuffer ¶
func (m *DataPipelineRunSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataPipelineRunSpec) ProtoMessage ¶
func (*DataPipelineRunSpec) ProtoMessage()
func (*DataPipelineRunSpec) Reset ¶
func (m *DataPipelineRunSpec) Reset()
func (*DataPipelineRunSpec) Size ¶
func (m *DataPipelineRunSpec) Size() (n int)
func (*DataPipelineRunSpec) String ¶
func (this *DataPipelineRunSpec) String() string
func (*DataPipelineRunSpec) Unmarshal ¶
func (m *DataPipelineRunSpec) Unmarshal(dAtA []byte) error
func (*DataPipelineRunSpec) XXX_DiscardUnknown ¶
func (m *DataPipelineRunSpec) XXX_DiscardUnknown()
func (*DataPipelineRunSpec) XXX_Marshal ¶
func (m *DataPipelineRunSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataPipelineRunSpec) XXX_Merge ¶
func (m *DataPipelineRunSpec) XXX_Merge(src proto.Message)
func (*DataPipelineRunSpec) XXX_Size ¶
func (m *DataPipelineRunSpec) XXX_Size() int
func (*DataPipelineRunSpec) XXX_Unmarshal ¶
func (m *DataPipelineRunSpec) XXX_Unmarshal(b []byte) error
type DataPipelineRunStatus ¶
type DataPipelineRunStatus struct { // RecipeRuns is the names of the recipe runs that occur during running of the pipeline. //+kubebuilder:validation:Optional RecipeRuns []string `json:"recipeRuns" protobuf:"bytes,1,rep,name=recipeRuns"` // the resulting dataset from the flow //+kubebuilder:validation:Optional Output DataLocation `json:"output" protobuf:"bytes,2,opt,name=output"` // the phase of the run // +kubebuilder:default:="Pending" //+kubebuilder:validation:Optional Phase DataPipelineRunPhase `json:"phase" protobuf:"bytes,3,opt,name=phase"` // CompletedAt is the end time of the pipeline // +kubebuilder:validation:Optional CompletedAt *metav1.Time `json:"completedAt,omitempty" protobuf:"bytes,5,opt,name=completedAt"` //ObservedGeneration is the Last generation that was acted on //+kubebuilder:validation:Optional ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,6,opt,name=observedGeneration"` // UpdateUpdateStrategy in case of terminal failure // Borrowed from cluster api controller FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,7,opt,name=failureReason"` // UpdateUpdateStrategy in case of terminal failure message FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,8,opt,name=failureMessage"` // Pipeline progress Progress in percent, the progress takes into account the different stages of the pipeline // +kubebuilder:validation:Optional Progress *int32 `json:"progress" protobuf:"varint,9,opt,name=progress"` // Holds the location of log paths //+kubebuilder:validation:Optional Logs catalog.Logs `json:"logs,,omitempty" protobuf:"bytes,10,opt,name=logs"` // Last time the object was updated //+kubebuilder:validation:Optional UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,11,opt,name=updatedAt"` // +patchMergeKey=type // +patchStrategy=merge // +kubebuilder:validation:Optional Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,12,rep,name=conditions"` }
DataPipelineRunStatus defines the observed state of DataPipelineRun
func (*DataPipelineRunStatus) DeepCopy ¶
func (in *DataPipelineRunStatus) DeepCopy() *DataPipelineRunStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRunStatus.
func (*DataPipelineRunStatus) DeepCopyInto ¶
func (in *DataPipelineRunStatus) DeepCopyInto(out *DataPipelineRunStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataPipelineRunStatus) Descriptor ¶
func (*DataPipelineRunStatus) Descriptor() ([]byte, []int)
func (*DataPipelineRunStatus) Marshal ¶
func (m *DataPipelineRunStatus) Marshal() (dAtA []byte, err error)
func (*DataPipelineRunStatus) MarshalTo ¶
func (m *DataPipelineRunStatus) MarshalTo(dAtA []byte) (int, error)
func (*DataPipelineRunStatus) MarshalToSizedBuffer ¶
func (m *DataPipelineRunStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataPipelineRunStatus) ProtoMessage ¶
func (*DataPipelineRunStatus) ProtoMessage()
func (*DataPipelineRunStatus) Reset ¶
func (m *DataPipelineRunStatus) Reset()
func (*DataPipelineRunStatus) Size ¶
func (m *DataPipelineRunStatus) Size() (n int)
func (*DataPipelineRunStatus) String ¶
func (this *DataPipelineRunStatus) String() string
func (*DataPipelineRunStatus) Unmarshal ¶
func (m *DataPipelineRunStatus) Unmarshal(dAtA []byte) error
func (*DataPipelineRunStatus) XXX_DiscardUnknown ¶
func (m *DataPipelineRunStatus) XXX_DiscardUnknown()
func (*DataPipelineRunStatus) XXX_Marshal ¶
func (m *DataPipelineRunStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataPipelineRunStatus) XXX_Merge ¶
func (m *DataPipelineRunStatus) XXX_Merge(src proto.Message)
func (*DataPipelineRunStatus) XXX_Size ¶
func (m *DataPipelineRunStatus) XXX_Size() int
func (*DataPipelineRunStatus) XXX_Unmarshal ¶
func (m *DataPipelineRunStatus) XXX_Unmarshal(b []byte) error
type DataPipelineSpec ¶
type DataPipelineSpec struct { // VersionName is the data product version of the data pipeline // +kubebuilder:default:="" // +kubebuilder:validation:Optional VersionName *string `json:"versionName,omitempty" protobuf:"bytes,1,opt,name=versionName"` // The model class for this pipeline // +kubebuilder:validation:Optional ModelClassName *string `json:"modelClassName,omitempty" protobuf:"bytes,2,opt,name=modelClassName"` // Description of the data pipeline // +kubebuilder:default:="" // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"` // DatasetSelector is used to select datasets for processing in the pipeline // +kubebuilder:validation:Optional DatasetSelector map[string]string `json:"datasetSelector,omitempty" protobuf:"bytes,4,opt,name=datasetSelector"` // RecipeOrder defines the list of recipes and the order they need to run // +kubebuilder:validation:Optional Recipes []RecipePartSpec `json:"recipes,omitempty" protobuf:"bytes,5,rep,name=recipes"` // The output definition // +kubebuilder:validation:Optional Output DataOutputSpec `json:"output,omitempty" protobuf:"bytes,6,opt,name=output"` // Schedule for running the pipeline // +kubebuilder:validation:Optional Schedule catalog.RunSchedule `json:"schedule,omitempty" protobuf:"bytes,7,opt,name=schedule"` // Specification for notification // +kubebuilder:validation:Optional Notification catalog.NotificationSpec `json:"notification,omitempty" protobuf:"bytes,8,opt,name=notification"` // Owner of this data pipeline // +kubebuilder:default:="no-one" // +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*" // +kubebuilder:validation:Optional Owner *string `json:"owner,omitempty" protobuf:"bytes,9,opt,name=owner"` // Resources are hardware resource req. // +kubebuilder:validation:Optional Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,10,opt,name=resources"` // The priority of this data pipeline. The default is medium. // +kubebuilder:default:="medium" // +kubebuilder:validation:Optional Priority *catalog.PriorityLevel `json:"priority,omitempty" protobuf:"bytes,11,opt,name=priority"` // Set to true to pause the cron prediction // +kubebuilder:default:=false // +kubebuilder:validation:Optional Paused *bool `json:"paused,omitempty" protobuf:"varint,12,opt,name=paused"` // TTL for the data product run. Set to 0 if no garbage collected is needed. // +kubebuilder:default:=0 // +kubebuilder:validation:Optional TTL *int32 `json:"ttl,omitempty" protobuf:"varint,13,opt,name=ttl"` }
DataPipelineSpec defines the desired state of a DataPipeline
func (*DataPipelineSpec) DeepCopy ¶
func (in *DataPipelineSpec) DeepCopy() *DataPipelineSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineSpec.
func (*DataPipelineSpec) DeepCopyInto ¶
func (in *DataPipelineSpec) DeepCopyInto(out *DataPipelineSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataPipelineSpec) Descriptor ¶
func (*DataPipelineSpec) Descriptor() ([]byte, []int)
func (*DataPipelineSpec) Marshal ¶
func (m *DataPipelineSpec) Marshal() (dAtA []byte, err error)
func (*DataPipelineSpec) MarshalToSizedBuffer ¶
func (m *DataPipelineSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataPipelineSpec) ProtoMessage ¶
func (*DataPipelineSpec) ProtoMessage()
func (*DataPipelineSpec) Reset ¶
func (m *DataPipelineSpec) Reset()
func (*DataPipelineSpec) Size ¶
func (m *DataPipelineSpec) Size() (n int)
func (*DataPipelineSpec) String ¶
func (this *DataPipelineSpec) String() string
func (*DataPipelineSpec) Unmarshal ¶
func (m *DataPipelineSpec) Unmarshal(dAtA []byte) error
func (*DataPipelineSpec) XXX_DiscardUnknown ¶
func (m *DataPipelineSpec) XXX_DiscardUnknown()
func (*DataPipelineSpec) XXX_Marshal ¶
func (m *DataPipelineSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataPipelineSpec) XXX_Merge ¶
func (m *DataPipelineSpec) XXX_Merge(src proto.Message)
func (*DataPipelineSpec) XXX_Size ¶
func (m *DataPipelineSpec) XXX_Size() int
func (*DataPipelineSpec) XXX_Unmarshal ¶
func (m *DataPipelineSpec) XXX_Unmarshal(b []byte) error
type DataPipelineStatus ¶
type DataPipelineStatus struct { //ObservedGeneration is the Last generation that was acted on //+kubebuilder:validation:Optional ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"` // Last time the object was updated //+kubebuilder:validation:Optional UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,2,opt,name=updatedAt"` // Last run is the last time a data pipeline run was created //+kubebuilder:validation:Optional Schedule catalog.RunScheduleStatus `json:"schedule,omitempty" protobuf:"bytes,3,opt,name=schedule"` // The name of the last data pipeline count //+kubebuilder:validation:Optional LastRunName string `json:"lastRunName,omitempty" protobuf:"bytes,4,opt,name=lastRunName"` // The number of datapipline runs //+kubebuilder:validation:Optional RunsCount int32 `json:"runsCount,omitempty" protobuf:"bytes,5,opt,name=runsCount"` // +patchMergeKey=type // +patchStrategy=merge // +kubebuilder:validation:Optional Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,6,rep,name=conditions"` }
DataPipelineStatus is the observed state of the DataPipeline object.
func (*DataPipelineStatus) DeepCopy ¶
func (in *DataPipelineStatus) DeepCopy() *DataPipelineStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineStatus.
func (*DataPipelineStatus) DeepCopyInto ¶
func (in *DataPipelineStatus) DeepCopyInto(out *DataPipelineStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataPipelineStatus) Descriptor ¶
func (*DataPipelineStatus) Descriptor() ([]byte, []int)
func (*DataPipelineStatus) Marshal ¶
func (m *DataPipelineStatus) Marshal() (dAtA []byte, err error)
func (*DataPipelineStatus) MarshalTo ¶
func (m *DataPipelineStatus) MarshalTo(dAtA []byte) (int, error)
func (*DataPipelineStatus) MarshalToSizedBuffer ¶
func (m *DataPipelineStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataPipelineStatus) ProtoMessage ¶
func (*DataPipelineStatus) ProtoMessage()
func (*DataPipelineStatus) Reset ¶
func (m *DataPipelineStatus) Reset()
func (*DataPipelineStatus) Size ¶
func (m *DataPipelineStatus) Size() (n int)
func (*DataPipelineStatus) String ¶
func (this *DataPipelineStatus) String() string
func (*DataPipelineStatus) Unmarshal ¶
func (m *DataPipelineStatus) Unmarshal(dAtA []byte) error
func (*DataPipelineStatus) XXX_DiscardUnknown ¶
func (m *DataPipelineStatus) XXX_DiscardUnknown()
func (*DataPipelineStatus) XXX_Marshal ¶
func (m *DataPipelineStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataPipelineStatus) XXX_Merge ¶
func (m *DataPipelineStatus) XXX_Merge(src proto.Message)
func (*DataPipelineStatus) XXX_Size ¶
func (m *DataPipelineStatus) XXX_Size() int
func (*DataPipelineStatus) XXX_Unmarshal ¶
func (m *DataPipelineStatus) XXX_Unmarshal(b []byte) error
type DataProduct ¶
type DataProduct struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Spec DataProductSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"` //+optional Status DataProductStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"` }
+kubebuilder:object:root=true +kubebuilder:resource:path=dataproducts,shortName=prod,singular=dataproduct,shortName="prod",categories={data,modela,all} +kubebuilder:subresource:status +kubebuilder:storageversion +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Task",type="string",JSONPath=".spec.task" +kubebuilder:printcolumn:name="Public",type="boolean",JSONPath=".spec.public" +kubebuilder:printcolumn:name="Errors",type="integer",JSONPath=".status.totalErrorAlerts" +kubebuilder:printcolumn:name="Location Sources",type="integer",JSONPath=".status.totalDatasources",priority=1 +kubebuilder:printcolumn:name="Datasets",type="integer",JSONPath=".status.totalDatasets",priority=1 +kubebuilder:printcolumn:name="Studies",type="integer",JSONPath=".status.totalStudies",priority=1 +kubebuilder:printcolumn:name="ModelsCount",type="integer",JSONPath=".status.totalModels",priority=1 +kubebuilder:printcolumn:name="Predictors",type="integer",JSONPath=".status.totalPredictors",priority=1 +kubebuilder:printcolumn:name="Apps",type="integer",JSONPath=".status.totalApps",priority=1 +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" DataProduct represents a single DataProduct namespace, which contains all non-infrastructure resources. Additionally, it specifies default parameters for resources to be created under the namespace, such as workload class and storage location
func (*DataProduct) AddFinalizer ¶
func (product *DataProduct) AddFinalizer()
func (DataProduct) CreateNamespace ¶
func (product DataProduct) CreateNamespace() *v1.Namespace
func (*DataProduct) CreateOrUpdateCond ¶
func (product *DataProduct) CreateOrUpdateCond(cond metav1.Condition)
Merge or update condition
func (DataProduct) DataEngineer ¶
func (product DataProduct) DataEngineer() *rbacv1.Role
DataEngineer role
func (DataProduct) DataScientist ¶
func (product DataProduct) DataScientist() *rbacv1.Role
DataScientist role
func (*DataProduct) DeepCopy ¶
func (in *DataProduct) DeepCopy() *DataProduct
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProduct.
func (*DataProduct) DeepCopyInto ¶
func (in *DataProduct) DeepCopyInto(out *DataProduct)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataProduct) DeepCopyObject ¶
func (in *DataProduct) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DataProduct) Default ¶
func (product *DataProduct) Default()
No defaults in this current release
func (*DataProduct) Descriptor ¶
func (*DataProduct) Descriptor() ([]byte, []int)
func (DataProduct) GetCondIdx ¶
func (product DataProduct) GetCondIdx(t string) int
func (*DataProduct) GetRolesForAccount ¶ added in v0.4.547
func (product *DataProduct) GetRolesForAccount(account *infra.Account) []string
func (DataProduct) HasFinalizer ¶
func (product DataProduct) HasFinalizer() bool
func (DataProduct) IsClassification ¶
func (product DataProduct) IsClassification() bool
func (DataProduct) IsReady ¶
func (product DataProduct) IsReady() bool
func (*DataProduct) IsSaved ¶
func (product *DataProduct) IsSaved() bool
func (*DataProduct) MarkFailed ¶
func (product *DataProduct) MarkFailed(err error)
func (*DataProduct) MarkReady ¶ added in v0.4.467
func (product *DataProduct) MarkReady()
func (*DataProduct) MarkSaved ¶
func (product *DataProduct) MarkSaved()
func (*DataProduct) Marshal ¶
func (m *DataProduct) Marshal() (dAtA []byte, err error)
func (*DataProduct) MarshalToSizedBuffer ¶
func (m *DataProduct) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (DataProduct) PredictionConsumer ¶
func (product DataProduct) PredictionConsumer() *rbacv1.Role
PredictionConsumer role
func (DataProduct) PrefixArchiveURI ¶ added in v0.5.472
func (product DataProduct) PrefixArchiveURI(uri string) string
func (DataProduct) PrefixDepotURI ¶ added in v0.5.472
func (product DataProduct) PrefixDepotURI(uri string) string
func (DataProduct) PrefixLiveURI ¶ added in v0.5.472
func (product DataProduct) PrefixLiveURI(uri string) string
func (DataProduct) ProductAdmin ¶
func (product DataProduct) ProductAdmin() *rbacv1.Role
DataEnv product roles
func (*DataProduct) ProtoMessage ¶
func (*DataProduct) ProtoMessage()
func (*DataProduct) RemoveFinalizer ¶
func (product *DataProduct) RemoveFinalizer()
func (DataProduct) ReportConsumer ¶
func (product DataProduct) ReportConsumer() *rbacv1.Role
ReportsConsumer role
func (*DataProduct) Reset ¶
func (m *DataProduct) Reset()
func (*DataProduct) SetupWebhookWithManager ¶
func (product *DataProduct) SetupWebhookWithManager(mgr ctrl.Manager) error
func (*DataProduct) Size ¶
func (m *DataProduct) Size() (n int)
func (*DataProduct) String ¶
func (this *DataProduct) String() string
func (*DataProduct) Unmarshal ¶
func (m *DataProduct) Unmarshal(dAtA []byte) error
func (*DataProduct) UpdateBaselineVersion ¶ added in v0.4.671
func (product *DataProduct) UpdateBaselineVersion(versions DataProductVersionList)
func (DataProduct) ValidateCreate ¶
func (product DataProduct) ValidateCreate() error
ValidateCreate implements webhook.Validator so a webhook will be registered for the type
func (DataProduct) ValidateDelete ¶
func (product DataProduct) ValidateDelete() error
func (DataProduct) ValidateUpdate ¶
func (product DataProduct) ValidateUpdate(old runtime.Object) error
ValidateUpdate implements webhook.Validator so a webhook will be registered for the type
func (*DataProduct) XXX_DiscardUnknown ¶
func (m *DataProduct) XXX_DiscardUnknown()
func (*DataProduct) XXX_Marshal ¶
func (m *DataProduct) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataProduct) XXX_Merge ¶
func (m *DataProduct) XXX_Merge(src proto.Message)
func (*DataProduct) XXX_Size ¶
func (m *DataProduct) XXX_Size() int
func (*DataProduct) XXX_Unmarshal ¶
func (m *DataProduct) XXX_Unmarshal(b []byte) error
func (DataProduct) YamlURI ¶ added in v0.5.472
func (product DataProduct) YamlURI() string
type DataProductList ¶
type DataProductList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"` Items []DataProduct `json:"items" protobuf:"bytes,2,rep,name=items"` }
+kubebuilder:object:root=true DataProductList contains a list of DataProducts
func (*DataProductList) DeepCopy ¶
func (in *DataProductList) DeepCopy() *DataProductList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductList.
func (*DataProductList) DeepCopyInto ¶
func (in *DataProductList) DeepCopyInto(out *DataProductList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataProductList) DeepCopyObject ¶
func (in *DataProductList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DataProductList) Descriptor ¶
func (*DataProductList) Descriptor() ([]byte, []int)
func (*DataProductList) Marshal ¶
func (m *DataProductList) Marshal() (dAtA []byte, err error)
func (*DataProductList) MarshalToSizedBuffer ¶
func (m *DataProductList) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataProductList) ProtoMessage ¶
func (*DataProductList) ProtoMessage()
func (*DataProductList) Reset ¶
func (m *DataProductList) Reset()
func (*DataProductList) Size ¶
func (m *DataProductList) Size() (n int)
func (*DataProductList) String ¶
func (this *DataProductList) String() string
func (*DataProductList) Unmarshal ¶
func (m *DataProductList) Unmarshal(dAtA []byte) error
func (*DataProductList) XXX_DiscardUnknown ¶
func (m *DataProductList) XXX_DiscardUnknown()
func (*DataProductList) XXX_Marshal ¶
func (m *DataProductList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataProductList) XXX_Merge ¶
func (m *DataProductList) XXX_Merge(src proto.Message)
func (*DataProductList) XXX_Size ¶
func (m *DataProductList) XXX_Size() int
func (*DataProductList) XXX_Unmarshal ¶
func (m *DataProductList) XXX_Unmarshal(b []byte) error
type DataProductSpec ¶
type DataProductSpec struct { // The name of the Account which created the object, which exists in the same tenant as the object // +kubebuilder:default:="no-one" // +kubebuilder:validation:Optional Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"` // Indicates if the DataProduct is public and can be accessed without permissions // +kubebuilder:default:=false // +kubebuilder:validation:Optional Public *bool `json:"public,omitempty" protobuf:"varint,2,opt,name=public"` // The reference to the Tenant which owns the DataProduct. Defaults to `default-tenant` // +kubebuilder:validation:Optional TenantRef *v1.ObjectReference `json:"tenantRef,omitempty" protobuf:"bytes,3,opt,name=tenantRef"` // GitLocation is the default Git location where all child resources will be tracked as YAML // +kubebuilder:validation:Optional GitLocation GitLocation `json:"gitLocation,omitempty" protobuf:"bytes,4,opt,name=gitLocation"` // ImageLocation is the default Docker image repository where model images produced under the DataProduct will be stored // +kubebuilder:validation:Optional ImageLocation *ImageLocation `json:"imageLocation,omitempty" protobuf:"bytes,5,opt,name=imageLocation"` // The name of the Lab that will be used by default with all compute-requiring child resources // +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*" // +kubebuilder:validation:MaxLength=63 // +kubebuilder:validation:Optional LabName *string `json:"labName" protobuf:"bytes,7,opt,name=labName"` // The name of the Serving Site which will be used by default with all Predictor resources // +kubebuilder:validation:MaxLength=63 // +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*" // +kubebuilder:validation:Optional ServingSiteName *string `json:"servingSiteName" protobuf:"bytes,8,opt,name=servingSiteName"` // Task specifies the default machine learning task of the product (classification, regression, etc.) // +kubebuilder:validation:Optional Task *catalog.MLTask `json:"task,omitempty" protobuf:"bytes,9,opt,name=task"` // Subtask specifies the default subtask relevant to the primary task (text classification, image object detection, etc.) // +kubebuilder:default:=none // +kubebuilder:validation:Optional SubTask *catalog.MLSubtask `json:"subtask,omitempty" protobuf:"bytes,10,opt,name=subtask"` // User-provided description of the object // +kubebuilder:default:="" // +kubebuilder:validation:Optional // +kubebuilder:validation:MaxLength=512 Description *string `json:"description,omitempty" protobuf:"bytes,11,opt,name=description"` // The default location for all artifacts created under the DataProduct. All data-producing resources will // use the VirtualBucket specified by the Location by default // +kubebuilder:validation:Optional Location DataLocation `json:"location,omitempty" protobuf:"bytes,12,opt,name=location"` // The default notification specification for all resources under the DataProduct // +kubebuilder:validation:Optional Notification catalog.NotificationSpec `json:"notification,omitempty" protobuf:"bytes,13,opt,name=notification"` // The default resource allocation for model training and data workloads that takes place under the DataProduct // +kubebuilder:validation:Optional DefaultTrainingResources catalog.ResourceSpec `json:"trainingResources,omitempty" protobuf:"bytes,14,opt,name=trainingResources"` // The default resource allocation for model serving workloads that takes place under the DataProduct // +kubebuilder:validation:Optional DefaultServingResources catalog.ResourceSpec `json:"servingResources,omitempty" protobuf:"bytes,15,opt,name=servingResources"` // Specifies how many times Jobs created under the DataProduct namespace will retry after failure // +kubebuilder:default:=3 // +kubebuilder:validation:Minimum=0 // +kubebuilder:validation:Maximum=10 // +kubebuilder:validation:Optional RetriesOnFailure *int32 `json:"retriesOnFailure,omitempty" protobuf:"varint,16,opt,name=retriesOnFailure"` // KPIs define key performance indicators for the DataProduct (not functional as of the current release) //+kubebuilder:validation:Optional KPIs []KPI `json:"kpis,omitempty" protobuf:"bytes,17,rep,name=kpis"` // The name of the Account which should be responsible for events that occur under the DataProduct //+kubebuilder:validation:Optional OnCallAccountName string `json:"onCallAccountName,omitempty" protobuf:"bytes,18,opt,name=onCallAccountName"` // The default compilation specification for Study resources created under the DataProduct //+kubebuilder:validation:Optional Compilation catalog.CompilerSpec `json:"compilation,omitempty" protobuf:"bytes,19,opt,name=compilation"` // The clearance level required to access the DataProduct. Accounts which do not have a clearance level // greater than or equal to ClearanceLevel will be denied access to the DataProduct namespace // +kubebuilder:default:=unclassified // +kubebuilder:validation:Optional ClearanceLevel *catalog.SecurityClearanceLevel `json:"clearanceLevel,omitempty" protobuf:"bytes,20,opt,name=clearanceLevel"` // The default priority level assigned to Jobs created under the DataProduct namespace // +kubebuilder:validation:Optional // +kubebuilder:default:="medium" DefaultPriority *catalog.PriorityLevel `json:"priority,omitempty" protobuf:"bytes,21,opt,name=priority"` // The color assigned to the product, for visual purposes only // +kubebuilder:default:="none" // +kubebuilder:validation:Optional Color *catalog.Color `json:"color,omitempty" protobuf:"bytes,22,opt,name=color"` // The Governance requirements (not functional as of the current release) // +kubebuilder:validation:Optional Governance GovernanceSpec `json:"governance,omitempty" protobuf:"bytes,23,opt,name=governance"` // Permissions denotes the specification that determines which Accounts // can access the DataProduct and what actions they can perform // +kubebuilder:validation:Optional Permissions catalog.PermissionsSpec `json:"permissions,omitempty" protobuf:"bytes,24,opt,name=permissions"` // Assign tags to data product // +kubebuilder:validation:Optional Tags []string `json:"tags,omitempty" protobuf:"bytes,25,opt,name=tags"` }
DataProductSpec defines the desired state of the DataProduct
func (*DataProductSpec) DeepCopy ¶
func (in *DataProductSpec) DeepCopy() *DataProductSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductSpec.
func (*DataProductSpec) DeepCopyInto ¶
func (in *DataProductSpec) DeepCopyInto(out *DataProductSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataProductSpec) Descriptor ¶
func (*DataProductSpec) Descriptor() ([]byte, []int)
func (*DataProductSpec) Marshal ¶
func (m *DataProductSpec) Marshal() (dAtA []byte, err error)
func (*DataProductSpec) MarshalToSizedBuffer ¶
func (m *DataProductSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataProductSpec) ProtoMessage ¶
func (*DataProductSpec) ProtoMessage()
func (*DataProductSpec) Reset ¶
func (m *DataProductSpec) Reset()
func (*DataProductSpec) Size ¶
func (m *DataProductSpec) Size() (n int)
func (*DataProductSpec) String ¶
func (this *DataProductSpec) String() string
func (*DataProductSpec) Unmarshal ¶
func (m *DataProductSpec) Unmarshal(dAtA []byte) error
func (*DataProductSpec) XXX_DiscardUnknown ¶
func (m *DataProductSpec) XXX_DiscardUnknown()
func (*DataProductSpec) XXX_Marshal ¶
func (m *DataProductSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataProductSpec) XXX_Merge ¶
func (m *DataProductSpec) XXX_Merge(src proto.Message)
func (*DataProductSpec) XXX_Size ¶
func (m *DataProductSpec) XXX_Size() int
func (*DataProductSpec) XXX_Unmarshal ¶
func (m *DataProductSpec) XXX_Unmarshal(b []byte) error
type DataProductStatus ¶
type DataProductStatus struct { // ObservedGeneration is the last generation that was acted on //+kubebuilder:validation:Optional ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"` // The last time the object was updated //+kubebuilder:validation:Optional UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,2,opt,name=updatedAt"` // The number of DataSource resources that exist under the namespace //+kubebuilder:validation:Optional DatasourcesCount int32 `json:"datasourcesCount,omitempty" protobuf:"varint,3,opt,name=datasourcesCount"` // The number of Dataset resources that exist under the namespace //+kubebuilder:validation:Optional DatasetsCount int32 `json:"datasetsCount,omitempty" protobuf:"varint,4,opt,name=datasetsCount"` // The number of DataPipeline resources that exist under the namespace //+kubebuilder:validation:Optional DataPipelinesCount int32 `json:"dataPipelineCount,omitempty" protobuf:"varint,5,opt,name=dataPipelineCount"` // The number of DataPipelineRun resources that exist under the namespace //+kubebuilder:validation:Optional DataPipelineRunsCount int32 `json:"dataPipelineRunsCount,omitempty" protobuf:"varint,6,opt,name=totalDataPipelineRuns"` // The number of Study resources that exist under the namespace //+kubebuilder:validation:Optional StudiesCount int32 `json:"studiesCount,omitempty" protobuf:"varint,7,opt,name=studiesCount"` // The number of Model resources that exist under the namespace //+kubebuilder:validation:Optional ModelsCount int32 `json:"modelsCount,omitempty" protobuf:"varint,8,opt,name=modelsCount"` // The number of Predictor resources that exist under the namespace //+kubebuilder:validation:Optional PredictorsCount int32 `json:"predictorsCount,omitempty" protobuf:"varint,11,opt,name=predictorsCount"` // The number of DataApp resources that exist under the namespace //+kubebuilder:validation:Optional DataAppsCount int32 `json:"dataAppsCount,omitempty" protobuf:"varint,13,opt,name=dataAppsCount"` // The number of BatchPrediction resources that exist under the namespace //+kubebuilder:validation:Optional PredictionsCount int32 `json:"predictionsCount,omitempty" protobuf:"varint,14,opt,name=predictionsCount"` // The number of informative alerts produced under the namespace //+kubebuilder:validation:Optional InfoAlertsCount int32 `json:"infoAlertsCount,omitempty" protobuf:"varint,15,opt,name=infoAlertsCount"` // The number of error alerts produced under the namespace //+kubebuilder:validation:Optional ErrorsAlertsCount int32 `json:"errorAlertsCount,omitempty" protobuf:"varint,16,opt,name=errorAlertsCount"` // Count the number of model classes //+kubebuilder:validation:Optional ModelClassesCount int32 `json:"modelClassesCount,omitempty" protobuf:"varint,17,opt,name=modelClassesCount"` // In the case of failure, the DataProduct resource controller will set this field with a failure reason FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,18,opt,name=failureReason"` // In the case of failure, the DataProduct resource controller will set this field with a failure message FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,19,opt,name=failureMessage"` // The name of the DataProductVersion which currently represents the latest version of the DataProduct. // Newly-created resources will be instantiated with this version by default BaselineVersion *string `json:"baselineVersion,omitempty" protobuf:"bytes,20,opt,name=baselineVersion"` // +patchMergeKey=type // +patchStrategy=merge // +kubebuilder:validation:Optional Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,21,rep,name=conditions"` }
DataProductStatus defines the observed state of DataProduct
func (*DataProductStatus) DeepCopy ¶
func (in *DataProductStatus) DeepCopy() *DataProductStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductStatus.
func (*DataProductStatus) DeepCopyInto ¶
func (in *DataProductStatus) DeepCopyInto(out *DataProductStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataProductStatus) Descriptor ¶
func (*DataProductStatus) Descriptor() ([]byte, []int)
func (*DataProductStatus) Marshal ¶
func (m *DataProductStatus) Marshal() (dAtA []byte, err error)
func (*DataProductStatus) MarshalTo ¶
func (m *DataProductStatus) MarshalTo(dAtA []byte) (int, error)
func (*DataProductStatus) MarshalToSizedBuffer ¶
func (m *DataProductStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataProductStatus) ProtoMessage ¶
func (*DataProductStatus) ProtoMessage()
func (*DataProductStatus) Reset ¶
func (m *DataProductStatus) Reset()
func (*DataProductStatus) Size ¶
func (m *DataProductStatus) Size() (n int)
func (*DataProductStatus) String ¶
func (this *DataProductStatus) String() string
func (*DataProductStatus) Unmarshal ¶
func (m *DataProductStatus) Unmarshal(dAtA []byte) error
func (*DataProductStatus) XXX_DiscardUnknown ¶
func (m *DataProductStatus) XXX_DiscardUnknown()
func (*DataProductStatus) XXX_Marshal ¶
func (m *DataProductStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataProductStatus) XXX_Merge ¶
func (m *DataProductStatus) XXX_Merge(src proto.Message)
func (*DataProductStatus) XXX_Size ¶
func (m *DataProductStatus) XXX_Size() int
func (*DataProductStatus) XXX_Unmarshal ¶
func (m *DataProductStatus) XXX_Unmarshal(b []byte) error
type DataProductVersion ¶
type DataProductVersion struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Spec DataProductVersionSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"` //+optional Status DataProductVersionStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"` }
+kubebuilder:object:root=true +kubebuilder:resource:path=dataproductversions,shortName=dpv,singular=dataproductversion,categories={data,modela,all} +kubebuilder:subresource:status +kubebuilder:storageversion +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Product",type="string",JSONPath=".spec.productRef.name",description="" +kubebuilder:printcolumn:name="Base",type="boolean",JSONPath=".spec.baseline",description="" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" DataProductVersion represents a single version of a DataProduct, which should increment versions in response to changes in data
func ParseDataProductVersionYaml ¶
func ParseDataProductVersionYaml(content []byte) (*DataProductVersion, error)
func (*DataProductVersion) AddFinalizer ¶
func (version *DataProductVersion) AddFinalizer()
func (DataProductVersion) Archived ¶
func (version DataProductVersion) Archived() bool
func (*DataProductVersion) CreateOrUpdateCond ¶
func (version *DataProductVersion) CreateOrUpdateCond(cond metav1.Condition)
Merge or update condition
func (*DataProductVersion) DeepCopy ¶
func (in *DataProductVersion) DeepCopy() *DataProductVersion
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductVersion.
func (*DataProductVersion) DeepCopyInto ¶
func (in *DataProductVersion) DeepCopyInto(out *DataProductVersion)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataProductVersion) DeepCopyObject ¶
func (in *DataProductVersion) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DataProductVersion) Default ¶
func (dp *DataProductVersion) Default()
No defaults in this current release
func (*DataProductVersion) Descriptor ¶
func (*DataProductVersion) Descriptor() ([]byte, []int)
func (DataProductVersion) GetCond ¶
func (version DataProductVersion) GetCond(t string) metav1.Condition
func (DataProductVersion) GetCondIdx ¶
func (version DataProductVersion) GetCondIdx(t string) int
func (DataProductVersion) HasFinalizer ¶
func (version DataProductVersion) HasFinalizer() bool
func (DataProductVersion) IsReady ¶
func (version DataProductVersion) IsReady() bool
func (*DataProductVersion) MarkArchived ¶
func (version *DataProductVersion) MarkArchived()
func (*DataProductVersion) MarkFailed ¶
func (version *DataProductVersion) MarkFailed(err error)
func (*DataProductVersion) MarkReady ¶
func (version *DataProductVersion) MarkReady()
func (*DataProductVersion) Marshal ¶
func (m *DataProductVersion) Marshal() (dAtA []byte, err error)
func (*DataProductVersion) MarshalTo ¶
func (m *DataProductVersion) MarshalTo(dAtA []byte) (int, error)
func (*DataProductVersion) MarshalToSizedBuffer ¶
func (m *DataProductVersion) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (DataProductVersion) MessageURI ¶ added in v0.5.472
func (version DataProductVersion) MessageURI() string
func (*DataProductVersion) ProtoMessage ¶
func (*DataProductVersion) ProtoMessage()
func (*DataProductVersion) RemoveFinalizer ¶
func (version *DataProductVersion) RemoveFinalizer()
func (*DataProductVersion) Reset ¶
func (m *DataProductVersion) Reset()
func (DataProductVersion) SetupWebhookWithManager ¶
func (version DataProductVersion) SetupWebhookWithManager(mgr ctrl.Manager) error
func (*DataProductVersion) Size ¶
func (m *DataProductVersion) Size() (n int)
func (*DataProductVersion) String ¶
func (this *DataProductVersion) String() string
func (*DataProductVersion) Unmarshal ¶
func (m *DataProductVersion) Unmarshal(dAtA []byte) error
func (DataProductVersion) ValidateCreate ¶
func (version DataProductVersion) ValidateCreate() error
ValidateCreate implements webhook.Validator so a webhook will be registered for the type
func (DataProductVersion) ValidateDelete ¶
func (version DataProductVersion) ValidateDelete() error
func (DataProductVersion) ValidateUpdate ¶
func (version DataProductVersion) ValidateUpdate(old runtime.Object) error
ValidateUpdate implements webhook.Validator so a webhook will be registered for the type
func (*DataProductVersion) XXX_DiscardUnknown ¶
func (m *DataProductVersion) XXX_DiscardUnknown()
func (*DataProductVersion) XXX_Marshal ¶
func (m *DataProductVersion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataProductVersion) XXX_Merge ¶
func (m *DataProductVersion) XXX_Merge(src proto.Message)
func (*DataProductVersion) XXX_Size ¶
func (m *DataProductVersion) XXX_Size() int
func (*DataProductVersion) XXX_Unmarshal ¶
func (m *DataProductVersion) XXX_Unmarshal(b []byte) error
func (DataProductVersion) YamlURI ¶ added in v0.5.472
func (version DataProductVersion) YamlURI() string
type DataProductVersionConditionType ¶
type DataProductVersionConditionType string
Condition on the dataset
type DataProductVersionList ¶
type DataProductVersionList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"` Items []DataProductVersion `json:"items" protobuf:"bytes,2,rep,name=items"` }
+kubebuilder:object:root=true DataProductVersionList contains a list of DataProductVersion
func (*DataProductVersionList) DeepCopy ¶
func (in *DataProductVersionList) DeepCopy() *DataProductVersionList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductVersionList.
func (*DataProductVersionList) DeepCopyInto ¶
func (in *DataProductVersionList) DeepCopyInto(out *DataProductVersionList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataProductVersionList) DeepCopyObject ¶
func (in *DataProductVersionList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DataProductVersionList) Descriptor ¶
func (*DataProductVersionList) Descriptor() ([]byte, []int)
func (*DataProductVersionList) Marshal ¶
func (m *DataProductVersionList) Marshal() (dAtA []byte, err error)
func (*DataProductVersionList) MarshalTo ¶
func (m *DataProductVersionList) MarshalTo(dAtA []byte) (int, error)
func (*DataProductVersionList) MarshalToSizedBuffer ¶
func (m *DataProductVersionList) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataProductVersionList) ProtoMessage ¶
func (*DataProductVersionList) ProtoMessage()
func (*DataProductVersionList) Reset ¶
func (m *DataProductVersionList) Reset()
func (*DataProductVersionList) Size ¶
func (m *DataProductVersionList) Size() (n int)
func (*DataProductVersionList) String ¶
func (this *DataProductVersionList) String() string
func (*DataProductVersionList) Unmarshal ¶
func (m *DataProductVersionList) Unmarshal(dAtA []byte) error
func (*DataProductVersionList) XXX_DiscardUnknown ¶
func (m *DataProductVersionList) XXX_DiscardUnknown()
func (*DataProductVersionList) XXX_Marshal ¶
func (m *DataProductVersionList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataProductVersionList) XXX_Merge ¶
func (m *DataProductVersionList) XXX_Merge(src proto.Message)
func (*DataProductVersionList) XXX_Size ¶
func (m *DataProductVersionList) XXX_Size() int
func (*DataProductVersionList) XXX_Unmarshal ¶
func (m *DataProductVersionList) XXX_Unmarshal(b []byte) error
type DataProductVersionSpec ¶
type DataProductVersionSpec struct { // ProductRef contains the object reference to the DataProduct // resource which the DataProductVersion describes the version of // +kubebuilder:validation:Optional ProductRef v1.ObjectReference `json:"productRef,omitempty" protobuf:"bytes,1,opt,name=productRef"` // User-provided description of the object // +kubebuilder:validation:Optional // +kubebuilder:default:="" // +kubebuilder:validation:MaxLength=512 Description *string `json:"description,omitempty" protobuf:"bytes,2,opt,name=description"` // The name of the version which preceded the current version // +kubebuilder:validation:MaxLength=63 // +kubebuilder:default:="" // +kubebuilder:validation:Optional PrevVersionName *string `json:"prevVersionName,omitempty" protobuf:"bytes,3,opt,name=prevVersionName"` // Indicates if the version is a baseline, and if so will cause Modela to garbage collect the resources from previous versions // +kubebuilder:default:=false // +kubebuilder:validation:Optional Baseline *bool `json:"baseline,omitempty" protobuf:"varint,4,opt,name=baseline"` // The name of the Account which created the object, which exists in the same tenant as the object // +kubebuilder:default:="no-one" // +kubebuilder:validation:Optional Owner *string `json:"owner,omitempty" protobuf:"bytes,5,opt,name=owner"` }
DataProductVersionSpec defines the desired state of a DataProductVersion
func (*DataProductVersionSpec) DeepCopy ¶
func (in *DataProductVersionSpec) DeepCopy() *DataProductVersionSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductVersionSpec.
func (*DataProductVersionSpec) DeepCopyInto ¶
func (in *DataProductVersionSpec) DeepCopyInto(out *DataProductVersionSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataProductVersionSpec) Descriptor ¶
func (*DataProductVersionSpec) Descriptor() ([]byte, []int)
func (*DataProductVersionSpec) Marshal ¶
func (m *DataProductVersionSpec) Marshal() (dAtA []byte, err error)
func (*DataProductVersionSpec) MarshalTo ¶
func (m *DataProductVersionSpec) MarshalTo(dAtA []byte) (int, error)
func (*DataProductVersionSpec) MarshalToSizedBuffer ¶
func (m *DataProductVersionSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataProductVersionSpec) ProtoMessage ¶
func (*DataProductVersionSpec) ProtoMessage()
func (*DataProductVersionSpec) Reset ¶
func (m *DataProductVersionSpec) Reset()
func (*DataProductVersionSpec) Size ¶
func (m *DataProductVersionSpec) Size() (n int)
func (*DataProductVersionSpec) String ¶
func (this *DataProductVersionSpec) String() string
func (*DataProductVersionSpec) Unmarshal ¶
func (m *DataProductVersionSpec) Unmarshal(dAtA []byte) error
func (*DataProductVersionSpec) XXX_DiscardUnknown ¶
func (m *DataProductVersionSpec) XXX_DiscardUnknown()
func (*DataProductVersionSpec) XXX_Marshal ¶
func (m *DataProductVersionSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataProductVersionSpec) XXX_Merge ¶
func (m *DataProductVersionSpec) XXX_Merge(src proto.Message)
func (*DataProductVersionSpec) XXX_Size ¶
func (m *DataProductVersionSpec) XXX_Size() int
func (*DataProductVersionSpec) XXX_Unmarshal ¶
func (m *DataProductVersionSpec) XXX_Unmarshal(b []byte) error
type DataProductVersionStatus ¶
type DataProductVersionStatus struct { // ObservedGeneration is the last generation that was acted on //+kubebuilder:validation:Optional ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"` // The last time the object was updated //+kubebuilder:validation:Optional UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,2,opt,name=updatedAt"` // In the case of failure, the Dataset resource controller will set this field with a failure reason // Borrowed from cluster api controller FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,3,opt,name=failureReason"` // In the case of failure, the Dataset resource controller will set this field with a failure message FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,4,opt,name=failureMessage"` // +patchMergeKey=type // +patchStrategy=merge // +kubebuilder:validation:Optional Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,5,rep,name=conditions"` }
DataProductVersionStatus defines the observed state of a DataProductVersion
func (*DataProductVersionStatus) DeepCopy ¶
func (in *DataProductVersionStatus) DeepCopy() *DataProductVersionStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductVersionStatus.
func (*DataProductVersionStatus) DeepCopyInto ¶
func (in *DataProductVersionStatus) DeepCopyInto(out *DataProductVersionStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataProductVersionStatus) Descriptor ¶
func (*DataProductVersionStatus) Descriptor() ([]byte, []int)
func (*DataProductVersionStatus) Marshal ¶
func (m *DataProductVersionStatus) Marshal() (dAtA []byte, err error)
func (*DataProductVersionStatus) MarshalTo ¶
func (m *DataProductVersionStatus) MarshalTo(dAtA []byte) (int, error)
func (*DataProductVersionStatus) MarshalToSizedBuffer ¶
func (m *DataProductVersionStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataProductVersionStatus) ProtoMessage ¶
func (*DataProductVersionStatus) ProtoMessage()
func (*DataProductVersionStatus) Reset ¶
func (m *DataProductVersionStatus) Reset()
func (*DataProductVersionStatus) Size ¶
func (m *DataProductVersionStatus) Size() (n int)
func (*DataProductVersionStatus) String ¶
func (this *DataProductVersionStatus) String() string
func (*DataProductVersionStatus) Unmarshal ¶
func (m *DataProductVersionStatus) Unmarshal(dAtA []byte) error
func (*DataProductVersionStatus) XXX_DiscardUnknown ¶
func (m *DataProductVersionStatus) XXX_DiscardUnknown()
func (*DataProductVersionStatus) XXX_Marshal ¶
func (m *DataProductVersionStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataProductVersionStatus) XXX_Merge ¶
func (m *DataProductVersionStatus) XXX_Merge(src proto.Message)
func (*DataProductVersionStatus) XXX_Size ¶
func (m *DataProductVersionStatus) XXX_Size() int
func (*DataProductVersionStatus) XXX_Unmarshal ¶
func (m *DataProductVersionStatus) XXX_Unmarshal(b []byte) error
type DataSource ¶
type DataSource struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Spec DataSourceSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"` //+optional Status DataSourceStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"` }
+kubebuilder:object:root=true +kubebuilder:resource:path=datasources,singular=datasource,shortName="dsrc",categories={data,modela,all} +kubebuilder:storageversion +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Format",type="string",JSONPath=".spec.flatfile.fileType" +kubebuilder:printcolumn:name="Type",type="string",JSONPath=".spec.datasetType" +kubebuilder:printcolumn:name="Task",type="string",JSONPath=".spec.task" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp" DataSource defines the specification for the file format and column-level schema of data to be used within Modela
func (DataSource) ActiveColumns ¶
func (datasource DataSource) ActiveColumns() (string, error)
func (*DataSource) AddColumn ¶
func (datasource *DataSource) AddColumn( name string, dtype catalog.DataType, dformat catalog.DataDomain, Ignore bool, Target bool, Nullable bool)
func (*DataSource) AddFinalizer ¶
func (datasource *DataSource) AddFinalizer()
func (DataSource) CountActiveAttributes ¶
func (datasource DataSource) CountActiveAttributes() int
CountActiveAttributes counts the number of attributes that we should not ignore
func (DataSource) CountTargetAttributes ¶
func (datasource DataSource) CountTargetAttributes() int
Count the number of attributes that are mark as targets
func (*DataSource) CreateOrUpdateCond ¶
func (datasource *DataSource) CreateOrUpdateCond(cond metav1.Condition)
Merge or update condition Merge or update condition
func (*DataSource) DeepCopy ¶
func (in *DataSource) DeepCopy() *DataSource
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSource.
func (*DataSource) DeepCopyInto ¶
func (in *DataSource) DeepCopyInto(out *DataSource)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataSource) DeepCopyObject ¶
func (in *DataSource) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DataSource) Default ¶
func (datasource *DataSource) Default()
func (*DataSource) Descriptor ¶
func (*DataSource) Descriptor() ([]byte, []int)
func (DataSource) GetCondIdx ¶
func (datasource DataSource) GetCondIdx(t string) int
func (DataSource) HasFinalizer ¶
func (datasource DataSource) HasFinalizer() bool
func (*DataSource) HaveValidationRules ¶ added in v0.4.654
func (datasource *DataSource) HaveValidationRules() bool
func (DataSource) InferTask ¶
func (datasource DataSource) InferTask() catalog.MLTask
func (DataSource) IsReady ¶
func (datasource DataSource) IsReady() bool
func (DataSource) Key ¶
func (datasource DataSource) Key() string
func (DataSource) ManifestURI ¶ added in v0.5.472
func (datasource DataSource) ManifestURI() string
func (*DataSource) MarkFieldAsTarget ¶
func (datasource *DataSource) MarkFieldAsTarget(target string)
func (*DataSource) MarkLastFieldAsTarget ¶
func (datasource *DataSource) MarkLastFieldAsTarget()
func (*DataSource) MarkReady ¶
func (datasource *DataSource) MarkReady()
func (*DataSource) MarkSaved ¶
func (datasource *DataSource) MarkSaved()
func (*DataSource) Marshal ¶
func (m *DataSource) Marshal() (dAtA []byte, err error)
func (*DataSource) MarshalToSizedBuffer ¶
func (m *DataSource) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataSource) Poplulate ¶
func (datasource *DataSource) Poplulate()
func (*DataSource) ProtoMessage ¶
func (*DataSource) ProtoMessage()
func (*DataSource) RemoveFinalizer ¶
func (datasource *DataSource) RemoveFinalizer()
func (*DataSource) Reset ¶
func (m *DataSource) Reset()
func (DataSource) RootURI ¶ added in v0.5.472
func (datasource DataSource) RootURI() string
func (DataSource) Saved ¶
func (datasource DataSource) Saved() bool
func (*DataSource) SetupWebhookWithManager ¶
func (datasource *DataSource) SetupWebhookWithManager(mgr ctrl.Manager) error
func (*DataSource) Size ¶
func (m *DataSource) Size() (n int)
func (*DataSource) String ¶
func (this *DataSource) String() string
func (*DataSource) Unmarshal ¶
func (m *DataSource) Unmarshal(dAtA []byte) error
func (DataSource) Validate ¶
func (datasource DataSource) Validate() (bool, []metav1.StatusCause)
func (DataSource) ValidateCreate ¶
func (datasource DataSource) ValidateCreate() error
ValidateCreate implements webhook.Validator so a webhook will be registered for the type
func (DataSource) ValidateDelete ¶
func (datasource DataSource) ValidateDelete() error
func (DataSource) ValidateUpdate ¶
func (datasource DataSource) ValidateUpdate(old runtime.Object) error
ValidateUpdate implements webhook.Validator so a webhook will be registered for the type
func (*DataSource) XXX_DiscardUnknown ¶
func (m *DataSource) XXX_DiscardUnknown()
func (*DataSource) XXX_Marshal ¶
func (m *DataSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataSource) XXX_Merge ¶
func (m *DataSource) XXX_Merge(src proto.Message)
func (*DataSource) XXX_Size ¶
func (m *DataSource) XXX_Size() int
func (*DataSource) XXX_Unmarshal ¶
func (m *DataSource) XXX_Unmarshal(b []byte) error
type DataSourceList ¶
type DataSourceList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"` Items []DataSource `json:"items" protobuf:"bytes,2,rep,name=items"` }
DataSourceList contains a list of DataSource objects +kubebuilder:object:root=true
func (*DataSourceList) DeepCopy ¶
func (in *DataSourceList) DeepCopy() *DataSourceList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSourceList.
func (*DataSourceList) DeepCopyInto ¶
func (in *DataSourceList) DeepCopyInto(out *DataSourceList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataSourceList) DeepCopyObject ¶
func (in *DataSourceList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DataSourceList) Descriptor ¶
func (*DataSourceList) Descriptor() ([]byte, []int)
func (*DataSourceList) Marshal ¶
func (m *DataSourceList) Marshal() (dAtA []byte, err error)
func (*DataSourceList) MarshalToSizedBuffer ¶
func (m *DataSourceList) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataSourceList) ProtoMessage ¶
func (*DataSourceList) ProtoMessage()
func (*DataSourceList) Reset ¶
func (m *DataSourceList) Reset()
func (*DataSourceList) Size ¶
func (m *DataSourceList) Size() (n int)
func (*DataSourceList) String ¶
func (this *DataSourceList) String() string
func (*DataSourceList) Unmarshal ¶
func (m *DataSourceList) Unmarshal(dAtA []byte) error
func (*DataSourceList) XXX_DiscardUnknown ¶
func (m *DataSourceList) XXX_DiscardUnknown()
func (*DataSourceList) XXX_Marshal ¶
func (m *DataSourceList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataSourceList) XXX_Merge ¶
func (m *DataSourceList) XXX_Merge(src proto.Message)
func (*DataSourceList) XXX_Size ¶
func (m *DataSourceList) XXX_Size() int
func (*DataSourceList) XXX_Unmarshal ¶
func (m *DataSourceList) XXX_Unmarshal(b []byte) error
type DataSourceSpec ¶
type DataSourceSpec struct { // The name of the Account which created the object, which exists in the same tenant as the object // +kubebuilder:default:="no-one" // +kubebuilder:validation:Optional Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"` // The name of the DataProductVersion which describes the version of the resource // that exists in the same DataProduct namespace as the resource // +kubebuilder:validation:MaxLength=63 // +kubebuilder:default:="" // +kubebuilder:validation:Optional VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"` // User-provided description of the object // +kubebuilder:validation:MaxLength=512 // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"` // The type of dataset; currently, the only supported type is `tabular` // +kubebuilder:default:="tabular" // +kubebuilder:validation:Optional DatasetType *catalog.DatasetType `json:"datasetType,omitempty" protobuf:"bytes,4,opt,name=datasetType"` // The schema which will be used during the ingestion process of any Dataset resources which specify the DataSource Schema Schema `json:"schema,omitempty" protobuf:"bytes,5,opt,name=schema"` // Flat file spec define the paramter needed to read a flat file. // +kubebuilder:validation:Optional FlatFile *FlatFileFormatSpec `json:"flatfile,omitempty" protobuf:"bytes,6,opt,name=flatfile"` // If true, this datasource is for labeled data. // +kubebuilder:default:=true // +kubebuilder:validation:Optional Labeled *bool `json:"labeled,omitempty" protobuf:"varint,7,opt,name=labeled"` // The specification for how incoming data should be sampled (i.e. how many rows should be used). Applicable // primarily for very large datasets Sample SampleSpec `json:"sample,omitempty" protobuf:"bytes,9,opt,name=sample"` // The default task for Dataset resources created from the Location Source. If null, the task type will default to the // the default task type of the Location Product which contains the Location Source // +kubebuilder:validation:Optional Task *catalog.MLTask `json:"task,omitempty" protobuf:"bytes,10,opt,name=task"` // The machine learning subtask relevant to the primary task (text classification, image object detection, etc.) // +kubebuilder:default:="none" // +kubebuilder:validation:Optional SubTask *catalog.MLSubtask `json:"subtask,omitempty" protobuf:"bytes,11,opt,name=subtask"` // List of relationships to other data sources // +kubebuilder:validation:Optional Relationships []RelationshipSpec `json:"relationships,omitempty" protobuf:"bytes,12,rep,name=relationships"` // Labeling specificies how to automatically label the dataset using positive and negative rules // +kubebuilder:validation:Optional Labeling LabelingSpec `json:"labeling,omitempty" protobuf:"bytes,13,rep,name=labeling"` // InferredFrom specifies the location of the data that was used to generate the schema of the Location Source // +kubebuilder:validation:Optional InferredFrom *DataLocation `json:"inferredFrom,omitempty" protobuf:"bytes,14,opt,name=inferredFrom"` // The specification for tests for a new dataset // +kubebuilder:validation:Optional UnitTestsTemplate catalog.TestSuite `json:"unitTestsTemplate,omitempty" protobuf:"bytes,15,opt,name=unitTestsTemplate"` // +kubebuilder:validation:Optional IngestMethod *catalog.FeatureStoreIngestType `json:"ingestMethod,omitempty" protobuf:"bytes,16,opt,name=ingestMethod"` }
DataSourceSpec defines the desired state of the DataSource
func (*DataSourceSpec) DeepCopy ¶
func (in *DataSourceSpec) DeepCopy() *DataSourceSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSourceSpec.
func (*DataSourceSpec) DeepCopyInto ¶
func (in *DataSourceSpec) DeepCopyInto(out *DataSourceSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataSourceSpec) Descriptor ¶
func (*DataSourceSpec) Descriptor() ([]byte, []int)
func (*DataSourceSpec) Marshal ¶
func (m *DataSourceSpec) Marshal() (dAtA []byte, err error)
func (*DataSourceSpec) MarshalToSizedBuffer ¶
func (m *DataSourceSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataSourceSpec) ProtoMessage ¶
func (*DataSourceSpec) ProtoMessage()
func (*DataSourceSpec) Reset ¶
func (m *DataSourceSpec) Reset()
func (*DataSourceSpec) Size ¶
func (m *DataSourceSpec) Size() (n int)
func (*DataSourceSpec) String ¶
func (this *DataSourceSpec) String() string
func (*DataSourceSpec) Unmarshal ¶
func (m *DataSourceSpec) Unmarshal(dAtA []byte) error
func (*DataSourceSpec) XXX_DiscardUnknown ¶
func (m *DataSourceSpec) XXX_DiscardUnknown()
func (*DataSourceSpec) XXX_Marshal ¶
func (m *DataSourceSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataSourceSpec) XXX_Merge ¶
func (m *DataSourceSpec) XXX_Merge(src proto.Message)
func (*DataSourceSpec) XXX_Size ¶
func (m *DataSourceSpec) XXX_Size() int
func (*DataSourceSpec) XXX_Unmarshal ¶
func (m *DataSourceSpec) XXX_Unmarshal(b []byte) error
type DataSourceStatus ¶
type DataSourceStatus struct { // The number of columns determined to be present in the DataSource's schema // +kubebuilder:validation:Minimum=0 // +kubebuilder:validation:Optional Cols int32 `json:"cols,omitempty" protobuf:"varint,1,opt,name=cols"` // ObservedGeneration is the last generation that was acted on //+kubebuilder:validation:Optional ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,2,opt,name=observedGeneration"` // Last time the DataSource was used to create a Dataset //+kubebuilder:validation:Optional LastDatasetCreatedAt *metav1.Time `json:"lastDatasetCreatedAt,omitempty" protobuf:"bytes,3,opt,name=lastDatasetCreatedAt"` // The name of the last Dataset created using the DataSource //+kubebuilder:validation:Optional LastDatasetName string `json:"lastDatasetName,omitempty" protobuf:"bytes,4,opt,name=lastDatasetName"` // Last time the object was updated //+kubebuilder:validation:Optional UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,5,opt,name=updatedAt"` // In the case of failure, the DataSource resource controller will set this field with a failure reason //+kubebuilder:validation:Optional FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,6,opt,name=failureReason"` // In the case of failure, the DataSource resource controller will set this field with a failure message //+kubebuilder:validation:Optional FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,7,opt,name=failureMessage"` // +patchMergeKey=type // +patchStrategy=merge // +kubebuilder:validation:Optional Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,8,rep,name=conditions"` }
DataSourceStatus defines the observed state of a DataSource object
func (*DataSourceStatus) DeepCopy ¶
func (in *DataSourceStatus) DeepCopy() *DataSourceStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSourceStatus.
func (*DataSourceStatus) DeepCopyInto ¶
func (in *DataSourceStatus) DeepCopyInto(out *DataSourceStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataSourceStatus) Descriptor ¶
func (*DataSourceStatus) Descriptor() ([]byte, []int)
func (*DataSourceStatus) Marshal ¶
func (m *DataSourceStatus) Marshal() (dAtA []byte, err error)
func (*DataSourceStatus) MarshalToSizedBuffer ¶
func (m *DataSourceStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DataSourceStatus) ProtoMessage ¶
func (*DataSourceStatus) ProtoMessage()
func (*DataSourceStatus) Reset ¶
func (m *DataSourceStatus) Reset()
func (*DataSourceStatus) Size ¶
func (m *DataSourceStatus) Size() (n int)
func (*DataSourceStatus) String ¶
func (this *DataSourceStatus) String() string
func (*DataSourceStatus) Unmarshal ¶
func (m *DataSourceStatus) Unmarshal(dAtA []byte) error
func (*DataSourceStatus) XXX_DiscardUnknown ¶
func (m *DataSourceStatus) XXX_DiscardUnknown()
func (*DataSourceStatus) XXX_Marshal ¶
func (m *DataSourceStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DataSourceStatus) XXX_Merge ¶
func (m *DataSourceStatus) XXX_Merge(src proto.Message)
func (*DataSourceStatus) XXX_Size ¶
func (m *DataSourceStatus) XXX_Size() int
func (*DataSourceStatus) XXX_Unmarshal ¶
func (m *DataSourceStatus) XXX_Unmarshal(b []byte) error
type DatabaseServerType ¶
type DatabaseServerType string
DatabaseServerType is the type of the database server +kubebuilder:validation:Enum="sqlserver";"mysql";"postgress";"sqlite";"oracle";"presto";"redshift";"apache-hive";"snowflake";"sybase";"vertica";"cockroach-db";"elasticsearch";"informix";"sap-hana";"teradata";"gcp-spanner";"apache-spark";"clickhouse";"greenplum";"couchbase";"exasol"
const ( MsSqlServer DatabaseServerType = "sqlserver" MySql DatabaseServerType = "mysql" Postgres DatabaseServerType = "postgres" SqlLite DatabaseServerType = "sqlite" Oracle DatabaseServerType = "oracle" Presto DatabaseServerType = "presto" AmazonRedShift DatabaseServerType = "redshift" ApacheHive DatabaseServerType = "apache-hive" Snowflake DatabaseServerType = "snowflake" Sybase DatabaseServerType = "sybase" Vertica DatabaseServerType = "vertica" CockroachDB DatabaseServerType = "cockroach-db" ElasticSearch DatabaseServerType = "elasticsearch" Informix DatabaseServerType = "informix" SAPHana DatabaseServerType = "sap-hana" Teradata DatabaseServerType = "teradata" GcpSpanner DatabaseServerType = "gcp-spanner" ApacheSpark DatabaseServerType = "apache-spark" ClickHouse DatabaseServerType = "clickhouse" GreenPlum DatabaseServerType = "greenplum" Couchbase DatabaseServerType = "couchbase" Exasol DatabaseServerType = "exasol" )
type Dataset ¶
type Dataset struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Spec DatasetSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"` //+optional Status DatasetStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"` }
+kubebuilder:object:root=true +kubebuilder:resource:path=datasets,shortName=ds,singular=dataset,categories={data,modela,all} +kubebuilder:subresource:status +kubebuilder:storageversion +kubebuilder:printcolumn:name="Status",type="string",JSONPath=".status.phase" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Location Source",type="string",JSONPath=".spec.datasourceName" +kubebuilder:printcolumn:name="Type",type="string",JSONPath=".spec.type" +kubebuilder:printcolumn:name="Rows",type="integer",JSONPath=".status.statistics.rows" +kubebuilder:printcolumn:name="Columns",type="integer",JSONPath=".status.statistics.cols" +kubebuilder:printcolumn:name="Size",type="integer",JSONPath=".status.statistics.fileSize" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" Dataset represents a chunk of data that has been analyzed and stored inside a managed bucket
func ParseDatasetYaml ¶
func (*Dataset) AddFinalizer ¶
func (dataset *Dataset) AddFinalizer()
func (Dataset) CompletionAlert ¶ added in v0.4.601
func (dataset Dataset) CompletionAlert(tenantRef *v1.ObjectReference, notifierName *string) *infra.Alert
Generate a dataset completion alert
func (Dataset) ConstuctFeatureHistogram ¶ added in v0.5.76
func (dataset Dataset) ConstuctFeatureHistogram() (*FeatureHistogram, error)
func (*Dataset) CreateOrUpdateCond ¶
Merge or update condition
func (*Dataset) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Dataset.
func (*Dataset) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Dataset) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*Dataset) Descriptor ¶
func (Dataset) DriftColumnNames ¶ added in v0.4.969
return the list of drift. Currently return the drift columns
func (Dataset) ErrorAlert ¶ added in v0.4.601
func (Dataset) GetColumn ¶ added in v0.4.969
func (dataset Dataset) GetColumn(name string) (*ColumnStatistics, error)
Search for a column stat, based on name
func (Dataset) GetCondIdx ¶
func (Dataset) GroupDataFile ¶ added in v0.5.212
func (Dataset) GroupDataFolder ¶ added in v0.5.209
func (Dataset) GroupFolder ¶ added in v0.5.209
func (*Dataset) GroupForecastFile ¶ added in v0.5.212
func (Dataset) GroupProfileFolder ¶ added in v0.5.211
func (Dataset) GroupReportFile ¶ added in v0.5.212
func (Dataset) GroupsFolder ¶ added in v0.5.214
func (Dataset) HasFinalizer ¶
func (Dataset) IndexFileKey ¶ added in v0.5.190
func (Dataset) IsFeatureGroup ¶ added in v0.5.387
Answer true if the dataset is used for feature groupp monitoring.
func (Dataset) ManifestURI ¶ added in v0.5.472
func (*Dataset) MarkArchived ¶
func (dataset *Dataset) MarkArchived()
func (*Dataset) MarkGenerated ¶
func (dataset *Dataset) MarkGenerated()
func (*Dataset) MarkGeneratedFailed ¶
func (*Dataset) MarkGenerting ¶
func (dataset *Dataset) MarkGenerting()
func (*Dataset) MarkGroupFailed ¶ added in v0.5.161
func (*Dataset) MarkGroupSuccess ¶ added in v0.5.161
func (dataset *Dataset) MarkGroupSuccess()
func (*Dataset) MarkGrouping ¶ added in v0.5.162
func (dataset *Dataset) MarkGrouping()
func (*Dataset) MarkIngestFailed ¶
func (*Dataset) MarkIngested ¶
func (dataset *Dataset) MarkIngested()
func (*Dataset) MarkIngesting ¶
func (dataset *Dataset) MarkIngesting()
func (*Dataset) MarkProfiled ¶
func (*Dataset) MarkProfiledFailed ¶
func (*Dataset) MarkProfiling ¶
func (dataset *Dataset) MarkProfiling()
func (*Dataset) MarkReportFailed ¶
func (*Dataset) MarkReported ¶
func (dataset *Dataset) MarkReported()
func (*Dataset) MarkReporting ¶
func (dataset *Dataset) MarkReporting()
func (*Dataset) MarkSkewColumns ¶
func (dataset *Dataset) MarkSkewColumns()
func (*Dataset) MarkSnapshotFailed ¶
func (*Dataset) MarkSnapshotSuccess ¶
func (dataset *Dataset) MarkSnapshotSuccess()
func (*Dataset) MarkTakingSnapshot ¶
func (dataset *Dataset) MarkTakingSnapshot()
func (*Dataset) MarkUnitTestFailed ¶ added in v0.5.13
func (*Dataset) MarkUnitTested ¶ added in v0.5.13
func (dataset *Dataset) MarkUnitTested()
func (*Dataset) MarkUnitTesting ¶ added in v0.5.18
func (dataset *Dataset) MarkUnitTesting()
func (*Dataset) MarshalToSizedBuffer ¶
func (Dataset) PrintConditions ¶
func (dataset Dataset) PrintConditions()
func (Dataset) ProfileURI ¶ added in v0.5.472
func (*Dataset) ProtoMessage ¶
func (*Dataset) ProtoMessage()
func (*Dataset) RemoveFinalizer ¶
func (dataset *Dataset) RemoveFinalizer()
func (Dataset) ReportName ¶
func (*Dataset) SetupWebhookWithManager ¶
func (Dataset) Snapshotted ¶
func (Dataset) StatusString ¶
func (Dataset) TaskIndexFileKey ¶ added in v0.5.196
This is the index file for task
func (Dataset) UnitTested ¶ added in v0.5.13
func (*Dataset) UpdatePhaseFromConditions ¶
func (dataset *Dataset) UpdatePhaseFromConditions()
update the phase of the study based on its condition
func (Dataset) ValidateCreate ¶
ValidateCreate implements webhook.Validator so a webhook will be registered for the type
func (Dataset) ValidateDelete ¶
func (Dataset) ValidateUpdate ¶
ValidateUpdate implements webhook.Validator so a webhook will be registered for the type
func (Dataset) WorkerIndexFileKey ¶ added in v0.5.193
Return the location of the worker index file for the key
func (*Dataset) XXX_DiscardUnknown ¶
func (m *Dataset) XXX_DiscardUnknown()
func (*Dataset) XXX_Marshal ¶
func (*Dataset) XXX_Unmarshal ¶
type DatasetGroupByStatus ¶ added in v0.5.172
type DatasetGroupByStatus struct { // The locations of the datasets files. Each file is the group // +kubebuilder:validation:Optional DatasetsURI string `json:"datasetsURI,omitempty" protobuf:"bytes,1,opt,name=datasetsURI"` // The locations of the datasets files. // +kubebuilder:validation:Optional ProfilesURI string `json:"profilesURI,omitempty" protobuf:"bytes,2,opt,name=profilesURI"` // The locations of the report file. One report for each key // +kubebuilder:validation:Optional ReportsURI string `json:"reportsURI,omitempty" protobuf:"bytes,3,opt,name=reportsURI"` // The locations of the report file. One report for each key // +kubebuilder:validation:Optional UnitTestsURI string `json:"unitTestsURI,omitempty" protobuf:"bytes,4,opt,name=unitTestsURI"` // The locations of the time series feature files. The file contain a line for each feature // +kubebuilder:validation:Optional FeaturesURI string `json:"featuresURI,omitempty" protobuf:"bytes,5,opt,name=featuresURI"` // Holds the worker on going result, when a worker finish, we update the location of thier result files // +kubebuilder:validation:Optional WorkerResults []catalog.WorkerRunResult `json:"workerResults,omitempty" protobuf:"bytes,6,rep,name=workerResults"` }
Represent the status of a groupby
func (*DatasetGroupByStatus) DeepCopy ¶ added in v0.5.173
func (in *DatasetGroupByStatus) DeepCopy() *DatasetGroupByStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetGroupByStatus.
func (*DatasetGroupByStatus) DeepCopyInto ¶ added in v0.5.173
func (in *DatasetGroupByStatus) DeepCopyInto(out *DatasetGroupByStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetGroupByStatus) Descriptor ¶ added in v0.5.172
func (*DatasetGroupByStatus) Descriptor() ([]byte, []int)
func (*DatasetGroupByStatus) Marshal ¶ added in v0.5.172
func (m *DatasetGroupByStatus) Marshal() (dAtA []byte, err error)
func (*DatasetGroupByStatus) MarshalTo ¶ added in v0.5.172
func (m *DatasetGroupByStatus) MarshalTo(dAtA []byte) (int, error)
func (*DatasetGroupByStatus) MarshalToSizedBuffer ¶ added in v0.5.172
func (m *DatasetGroupByStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DatasetGroupByStatus) ProtoMessage ¶ added in v0.5.172
func (*DatasetGroupByStatus) ProtoMessage()
func (*DatasetGroupByStatus) Reset ¶ added in v0.5.172
func (m *DatasetGroupByStatus) Reset()
func (*DatasetGroupByStatus) Size ¶ added in v0.5.172
func (m *DatasetGroupByStatus) Size() (n int)
func (*DatasetGroupByStatus) String ¶ added in v0.5.172
func (this *DatasetGroupByStatus) String() string
func (*DatasetGroupByStatus) Unmarshal ¶ added in v0.5.172
func (m *DatasetGroupByStatus) Unmarshal(dAtA []byte) error
func (*DatasetGroupByStatus) XXX_DiscardUnknown ¶ added in v0.5.172
func (m *DatasetGroupByStatus) XXX_DiscardUnknown()
func (*DatasetGroupByStatus) XXX_Marshal ¶ added in v0.5.172
func (m *DatasetGroupByStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DatasetGroupByStatus) XXX_Merge ¶ added in v0.5.172
func (m *DatasetGroupByStatus) XXX_Merge(src proto.Message)
func (*DatasetGroupByStatus) XXX_Size ¶ added in v0.5.172
func (m *DatasetGroupByStatus) XXX_Size() int
func (*DatasetGroupByStatus) XXX_Unmarshal ¶ added in v0.5.172
func (m *DatasetGroupByStatus) XXX_Unmarshal(b []byte) error
type DatasetList ¶
type DatasetList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"` Items []Dataset `json:"items" protobuf:"bytes,2,rep,name=items"` }
+kubebuilder:object:root=true DatasetList contains a list of Datasets
func (*DatasetList) DeepCopy ¶
func (in *DatasetList) DeepCopy() *DatasetList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetList.
func (*DatasetList) DeepCopyInto ¶
func (in *DatasetList) DeepCopyInto(out *DatasetList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetList) DeepCopyObject ¶
func (in *DatasetList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DatasetList) Descriptor ¶
func (*DatasetList) Descriptor() ([]byte, []int)
func (*DatasetList) Marshal ¶
func (m *DatasetList) Marshal() (dAtA []byte, err error)
func (*DatasetList) MarshalToSizedBuffer ¶
func (m *DatasetList) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DatasetList) ProtoMessage ¶
func (*DatasetList) ProtoMessage()
func (*DatasetList) Reset ¶
func (m *DatasetList) Reset()
func (*DatasetList) Size ¶
func (m *DatasetList) Size() (n int)
func (*DatasetList) String ¶
func (this *DatasetList) String() string
func (*DatasetList) Unmarshal ¶
func (m *DatasetList) Unmarshal(dAtA []byte) error
func (*DatasetList) XXX_DiscardUnknown ¶
func (m *DatasetList) XXX_DiscardUnknown()
func (*DatasetList) XXX_Marshal ¶
func (m *DatasetList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DatasetList) XXX_Merge ¶
func (m *DatasetList) XXX_Merge(src proto.Message)
func (*DatasetList) XXX_Size ¶
func (m *DatasetList) XXX_Size() int
func (*DatasetList) XXX_Unmarshal ¶
func (m *DatasetList) XXX_Unmarshal(b []byte) error
type DatasetPhase ¶
type DatasetPhase string
const ( DatasetPhasePending DatasetPhase = "Pending" // when generating DatasetPhaseGenerating DatasetPhase = "Generating" // when generating DatasetPhaseGenSuccess DatasetPhase = "GenSuccess" // when synthetic gen success DatasetPhaseIngestRunning DatasetPhase = "Ingesting" DatasetPhaseIngestSuccess DatasetPhase = "Ingested" DatasetPhaseGrouping DatasetPhase = "Grouping" DatasetPhaseGrouped DatasetPhase = "Grouped" DatasetPhaseReportRunning DatasetPhase = "Reporting" DatasetPhaseReportSuccess DatasetPhase = "Reported" DatasetPhaseProfileRunning DatasetPhase = "Profiling" DatasetPhaseProfileSuccess DatasetPhase = "Profiled" DatasetPhaseUnitTesting DatasetPhase = "UnitTesting" DatasetPhaseSnapshotRunning DatasetPhase = "TakingSnapshot" DatasetPhaseSnapshotSuccess DatasetPhase = "Snapshotted" DatasetPhaseFailed DatasetPhase = "Failed" DatasetPhaseAborted DatasetPhase = "Aborted" DatasetPhaseReady DatasetPhase = "Ready" )
type DatasetRole ¶ added in v0.4.914
type DatasetRole string
+kubebuilder:validation:Enum="unlabled";"serving";"feedback";"training";"prediction";"featuregroup";
const ( DatasetRoleUnlabeled DatasetRole = "unlabeled" // Unlabeled dataset DatasetRoleFeatureGroup DatasetRole = "featuregroup" // Unlabeled dataset DatasetRoleServing DatasetRole = "serving" // Dataset contain serving data. DatasetRoleFeedback DatasetRole = "feedback" // feedback dataset which was labeled DatasetRoleTraining DatasetRole = "training" // Regular Labeled dataset DatasetRolePrediction DatasetRole = "prediction" // Regular Labeled dataset )
type DatasetSpec ¶
type DatasetSpec struct { // The name of the Account which created the object, which exists in the same tenant as the object // +kubebuilder:default:="no-one" // +kubebuilder:validation:Optional Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"` // The name of the DataProductVersion which describes the version of the resource // that exists in the same DataProduct namespace as the resource // +kubebuilder:default:="" // +kubebuilder:validation:MaxLength=63 // +kubebuilder:validation:Required // +required VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"` // The reference to the Location Source resource which exists in the same Location Product namespace as the object. // The Location Source must represent the columns and the task type of the Dataset. The validation rules associated with // the Location Source will be validated against the raw data of the Dataset once it is created // +kubebuilder:validation:Required // +kubebuilder:validation:MaxLength=63 // +kubebuilder:default:="" // +required DataSourceName *string `json:"datasourceName,omitempty" protobuf:"bytes,3,opt,name=datasourceName"` // In case of training data, this is the model class name that created it. // +kubebuilder:validation:Optional FeatureGroupName *string `json:"featureGroupName,omitempty" protobuf:"bytes,4,opt,name=featureGroupName"` // User-provided description of the object // +kubebuilder:validation:MaxLength=512 // +kubebuilder:default:="" // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" protobuf:"bytes,7,opt,name=description"` // User-provided display name of the object // +kubebuilder:default:="" // +kubebuilder:validation:Optional DisplayName *string `json:"displayName,omitempty" protobuf:"bytes,8,opt,name=displayName"` // The dataset role // +kubebuilder:default:="training" // +kubebuilder:validation:Optional Role *DatasetRole `json:"role,omitempty" protobuf:"bytes,9,opt,name=role"` // Tags attached to the dataset // +kubebuilder:validation:Optional Tags []string `json:"tags,omitempty" protobuf:"bytes,10,rep,name=tags"` // Indicates if a PDF report containing the Dataset's profile should be generated // +kubebuilder:default:=true // +kubebuilder:validation:Optional Reported *bool `json:"reported,omitempty" protobuf:"varint,11,opt,name=reported"` // Indicates if the resource controller has created a snapshot of the data in the case that it is being read // directly from a database, and must be converted to a flat-file type such as a CSV as a result // +kubebuilder:default:=false // +kubebuilder:validation:Optional Snapshotted *bool `json:"snapshotted,omitempty" protobuf:"varint,12,opt,name=snapshotted"` // Indicates if the Dataset should be checked against the validation rules of its Location Source // +kubebuilder:default:=true // +kubebuilder:validation:Optional UnitTested *bool `json:"unitTested,omitempty" protobuf:"varint,13,opt,name=unitTested"` // Origin is the location of the data file or database query which holds the raw data of the Dataset. When the Dataset is // created, the resource controller will retrieve the data from the location, validate it against its Location Source // if applicable, and store it inside the `live` section of the Virtual Bucket resource specified by the location // +kubebuilder:validation:Optional Origin DataLocation `json:"origin,omitempty" protobuf:"bytes,14,opt,name=origin"` // Location is the final location of the data which was copied from the `Origin` location during the ingestion phase. // This field is set by the Dataset resource controller and should not be changed by any end-users // +kubebuilder:validation:Required // +required Location DataLocation `json:"location,omitempty" protobuf:"bytes,15,opt,name=location"` // Resources specifies the resource requirements which the Dataset will request when creating Jobs to analyze the data // +kubebuilder:validation:Optional Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,16,opt,name=resources"` // The deadline in seconds for all Jobs created by the Dataset // +kubebuilder:default:=600 // +kubebuilder:validation:Optional Timeout *int64 `json:"timeout,omitempty" protobuf:"varint,17,opt,name=timeout"` // The type of dataset which was uploaded. `tabular` is the only supported type as of the current release // +kubebuilder:default:="tabular" // +kubebuilder:validation:Optional Type *catalog.DatasetType `json:"type,omitempty" protobuf:"bytes,18,opt,name=type"` // The specification for how the data should be sampled, if applicable. Sampling may improve dataset and model creation // time in the case of very large datasets that are being rapidly prototyped and iterated on // +kubebuilder:validation:Optional Sample SampleSpec `json:"sample,omitempty" protobuf:"bytes,19,opt,name=sample"` // If the dataset is Synthetic , this is the syntactic spec // +kubebuilder:validation:Optional Synthetic SyntheticSpec `json:"synthetic,omitempty" protobuf:"bytes,20,opt,name=synthetic "` // The machine learning task relevant to the Dataset. This field *must* be the same as the Location Source of the object // +kubebuilder:validation:Optional Task *catalog.MLTask `json:"task,omitempty" protobuf:"bytes,21,opt,name=task"` // The machine learning sub task relevant to the Dataset. This field *must* be the same as the Location Source of the object // +kubebuilder:default:=none // +kubebuilder:validation:Optional SubTask *catalog.MLSubtask `json:"subtask,omitempty" protobuf:"bytes,22,opt,name=subtask"` // The specification for how to find the correlations of the Dataset's features during the profiling phase. // Based on the specification, the data plane will compute the correlation between each feature and will store the highest-scoring // +kubebuilder:validation:Optional Correlation CorrelationSpec `json:"correlation,omitempty" protobuf:"bytes,23,opt,name=correlation"` // Indicates if the Dataset should be quickly processed. // If enabled, the validation, profiling, and reporting phases will be skipped. // +kubebuilder:default:=false // +kubebuilder:validation:Optional Fast *bool `json:"fast,omitempty" protobuf:"varint,24,opt,name=fast"` // Indicates if the Dataset should be featurized. Features are computed using tsfresh. // If the dataset is grouped dataset, a feature will be computed to each group. // If enabled, the validation, profiling, and reporting phases will be skipped. // +kubebuilder:default:=false // +kubebuilder:validation:Optional Featurized *bool `json:"featurized,omitempty" protobuf:"varint,25,opt,name=featurized"` // The reference to the Lab under which Jobs created by the Dataset will be executed // +kubebuilder:validation:Optional LabRef v1.ObjectReference `json:"labRef,omitempty" protobuf:"bytes,26,opt,name=labRef"` // For dataset that contain feedback information, this is reference to the serving dataset // +kubebuilder:validation:Optional ServingDatasetRef v1.ObjectReference `json:"servingDatasetRef,omitempty" protobuf:"bytes,27,opt,name=servingDatasetRef"` // Used for prediction dataset, contain a reference to the predictor resource that created this dataset // +kubebuilder:validation:Optional PredictorRef v1.ObjectReference `json:"predictorRef,omitempty" protobuf:"bytes,28,opt,name=predictorRef"` // If true generate feature histogram object from this dataset columns. // +kubebuilder:default:=false // +kubebuilder:validation:Optional GenerateFeatureHistogram *bool `json:"generateFeatureHistogram,omitempty" protobuf:"varint,29,opt,name=generateFeatureHistogram"` // The specification for tests for a new dataset // +kubebuilder:validation:Optional UnitTests catalog.TestSuite `json:"unitTests,omitempty" protobuf:"bytes,30,opt,name=unitTests"` // Define how to group by the base dataset, before making the forecasts. // By default, this dataset is assigned GroupBy GroupBySpec `json:"groupBy,omitempty" protobuf:"bytes,31,opt,name=groupBy"` // Define how to group by the base dataset, before making the forecasts. // By default, this dataset is assigned GroupLocations GroupDatasetLocationsSpec `json:"groupLocations,omitempty" protobuf:"bytes,32,opt,name=groupLocations"` // If this dataset represent a group in a multi series dataset, these are the values of the group key. // +kubebuilder:validation:Optional Key []string `json:"key,omitempty" protobuf:"bytes,33,rep,name=key"` // For filtering // +kubebuilder:validation:Optional MinEventTime *metav1.Time `json:"minEventTime,omitempty" protobuf:"bytes,34,opt,name=minEventTime"` // For filtering // +kubebuilder:validation:Optional MaxEventTime *metav1.Time `json:"maxEventTime,omitempty" protobuf:"bytes,35,opt,name=maxEventTime"` // The model class for this dataset if the dataset was created by a model class // +kubebuilder:validation:Optional ModelClassName *string `json:"modelClassName,omitempty" protobuf:"bytes,36,opt,name=modelClassName"` // If this report was created by a model class run, this is the run name // +kubebuilder:validation:Optional ModelClassRunName *string `json:"modelClassRunName,omitempty" protobuf:"bytes,37,opt,name=modelClassRunName"` // List the feature groups that were used to create this dataset // This is used for lineage. // +kubebuilder:validation:Optional FeatureGroups []v1.ObjectReference `json:"featureGroups,omitempty" protobuf:"bytes,38,rep,name=featureGroups"` }
DatasetSpec defines the desired state of the Dataset
func (*DatasetSpec) DeepCopy ¶
func (in *DatasetSpec) DeepCopy() *DatasetSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSpec.
func (*DatasetSpec) DeepCopyInto ¶
func (in *DatasetSpec) DeepCopyInto(out *DatasetSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetSpec) Descriptor ¶
func (*DatasetSpec) Descriptor() ([]byte, []int)
func (*DatasetSpec) Marshal ¶
func (m *DatasetSpec) Marshal() (dAtA []byte, err error)
func (*DatasetSpec) MarshalToSizedBuffer ¶
func (m *DatasetSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DatasetSpec) ProtoMessage ¶
func (*DatasetSpec) ProtoMessage()
func (*DatasetSpec) Reset ¶
func (m *DatasetSpec) Reset()
func (*DatasetSpec) Size ¶
func (m *DatasetSpec) Size() (n int)
func (*DatasetSpec) String ¶
func (this *DatasetSpec) String() string
func (*DatasetSpec) Unmarshal ¶
func (m *DatasetSpec) Unmarshal(dAtA []byte) error
func (*DatasetSpec) XXX_DiscardUnknown ¶
func (m *DatasetSpec) XXX_DiscardUnknown()
func (*DatasetSpec) XXX_Marshal ¶
func (m *DatasetSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DatasetSpec) XXX_Merge ¶
func (m *DatasetSpec) XXX_Merge(src proto.Message)
func (*DatasetSpec) XXX_Size ¶
func (m *DatasetSpec) XXX_Size() int
func (*DatasetSpec) XXX_Unmarshal ¶
func (m *DatasetSpec) XXX_Unmarshal(b []byte) error
type DatasetStatistics ¶
type DatasetStatistics struct { // Columns contains the collection of statistics for each feature // +kubebuilder:validation:Optional Columns []ColumnStatistics `json:"columns,omitempty" protobuf:"bytes,1,rep,name=columns"` // Number of rows observed from the data // +kubebuilder:validation:Optional Rows int32 `json:"rows,omitempty" protobuf:"varint,3,opt,name=rows"` // Number of columns observed from the data // +kubebuilder:validation:Optional Cols int32 `json:"cols,omitempty" protobuf:"varint,4,opt,name=cols"` // The file size of the data in bytes // +kubebuilder:validation:Optional FileSize int32 `json:"fileSize,omitempty" protobuf:"varint,5,opt,name=fileSize"` // The top correlations between all features and the target feature // +kubebuilder:validation:Optional CorrelationsWithTarget []Correlation `json:"correlationsWithTarget,omitempty" protobuf:"bytes,6,rep,name=correlationsWithTarget"` // The top correlations between features, computed per the CorrelationSpec of the parent Dataset // +kubebuilder:validation:Optional TopCorrelations []Correlation `json:"topCorrelations,omitempty" protobuf:"bytes,7,rep,name=topCorrelations"` }
DatasetStatistics contains statistics about the Dataset's overall data, as well as every feature of the data. The data structure is populated with information during the `Profiling` phase of the parent Dataset.
func (*DatasetStatistics) DeepCopy ¶
func (in *DatasetStatistics) DeepCopy() *DatasetStatistics
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetStatistics.
func (*DatasetStatistics) DeepCopyInto ¶
func (in *DatasetStatistics) DeepCopyInto(out *DatasetStatistics)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetStatistics) Descriptor ¶
func (*DatasetStatistics) Descriptor() ([]byte, []int)
func (*DatasetStatistics) Marshal ¶
func (m *DatasetStatistics) Marshal() (dAtA []byte, err error)
func (*DatasetStatistics) MarshalTo ¶
func (m *DatasetStatistics) MarshalTo(dAtA []byte) (int, error)
func (*DatasetStatistics) MarshalToSizedBuffer ¶
func (m *DatasetStatistics) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DatasetStatistics) ProtoMessage ¶
func (*DatasetStatistics) ProtoMessage()
func (*DatasetStatistics) Reset ¶
func (m *DatasetStatistics) Reset()
func (*DatasetStatistics) Size ¶
func (m *DatasetStatistics) Size() (n int)
func (*DatasetStatistics) String ¶
func (this *DatasetStatistics) String() string
func (*DatasetStatistics) Unmarshal ¶
func (m *DatasetStatistics) Unmarshal(dAtA []byte) error
func (*DatasetStatistics) XXX_DiscardUnknown ¶
func (m *DatasetStatistics) XXX_DiscardUnknown()
func (*DatasetStatistics) XXX_Marshal ¶
func (m *DatasetStatistics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DatasetStatistics) XXX_Merge ¶
func (m *DatasetStatistics) XXX_Merge(src proto.Message)
func (*DatasetStatistics) XXX_Size ¶
func (m *DatasetStatistics) XXX_Size() int
func (*DatasetStatistics) XXX_Unmarshal ¶
func (m *DatasetStatistics) XXX_Unmarshal(b []byte) error
type DatasetStatus ¶
type DatasetStatus struct { // Statistics for each column of the Dataset, which are generated during the profiling phase. // +kubebuilder:validation:Optional Statistics DatasetStatistics `json:"statistics,omitempty" protobuf:"bytes,1,opt,name=statistics"` // The current phase of the Dataset progress // +kubebuilder:default:="Pending" // +kubebuilder:validation:Optional Phase DatasetPhase `json:"phase,omitempty" protobuf:"bytes,2,opt,name=phase"` // Reference to the report object that was generated for the dataset, which exists in the same Location Product namespace // as the object // +kubebuilder:validation:Optional ReportName string `json:"reportName,omitempty" protobuf:"bytes,3,opt,name=reportName"` // The location of report generated during the reporting phase. This field is intended for internal use // +kubebuilder:validation:Optional ReportURI string `json:"reportURI,omitempty" protobuf:"bytes,4,opt,name=reportURI"` // The location of raw profile data. This field is intended for internal use // +kubebuilder:validation:Optional ProfileURI string `json:"profileURI" protobuf:"bytes,5,opt,name=profileURI"` // Whether or not the data was detected as imbalanced //+kubebuilder:validation:Optional Imbalanced bool `json:"imbalanced,omitempty" protobuf:"varint,6,opt,name=imbalanced"` // The location of anomaly file. The file contain the list of rows that were marked as anomaly by an isolation forest. // algorithm // +kubebuilder:validation:Optional AnomaliesURI string `json:"anomaliesURI" protobuf:"bytes,7,opt,name=anomaliesURI"` // ObservedGeneration is the last generation that was acted on //+kubebuilder:validation:Optional ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,8,opt,name=observedGeneration"` // List of validation results which are generated for every validation rule associated with the Dataset's Location Source //+kubebuilder:validation:Optional TestResults catalog.TestSuiteResult `json:"testResults,omitempty" protobuf:"bytes,9,opt,name=testResults"` // Last time the Dataset was used with a Study //+kubebuilder:validation:Optional LastStudyAt *metav1.Time `json:"lastStudyAt,omitempty" protobuf:"bytes,10,opt,name=lastStudyAt"` // In the case of failure, the Dataset resource controller will set this field with a failure reason //+kubebuilder:validation:Optional FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,12,opt,name=failureReason"` // In the case of failure, the Dataset resource controller will set this field with a failure message //+kubebuilder:validation:Optional FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,13,opt,name=failureMessage"` // The current progress of the Dataset, with a maximum of 100, that is associated with the current phase //+kubebuilder:default:=0 // +kubebuilder:validation:Optional Progress int32 `json:"progress,omitempty" protobuf:"varint,14,opt,name=progress"` // Sha256 signature of the raw data. Intended for internal use // +kubebuilder:default:="" // +kubebuilder:validation:Optional Hash string `json:"hash,omitempty" protobuf:"bytes,15,opt,name=hash"` // The log file specification that determines the location of all logs produced by the object Logs catalog.Logs `json:"logs" protobuf:"bytes,16,opt,name=logs"` // If the dataset is derived, the name of the Dataset that the object is derived from // +kubebuilder:validation:Optional DerivedFromDataset *string `json:"derivedFromDataset,omitempty" protobuf:"bytes,17,opt,name=derivedFromDataset"` // The last time the object was updated //+kubebuilder:validation:Optional UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,18,opt,name=updatedAt"` // The Docker images used during the analysis of the Dataset // +kubebuilder:validation:Optional Images catalog.Images `json:"images,omitempty" protobuf:"bytes,19,opt,name=images"` // The time that the Dataset finished processing, either due to completion or failure // +kubebuilder:validation:Optional CompletedAt *metav1.Time `json:"completedAt,omitempty" protobuf:"bytes,21,opt,name=completedAt"` // The generated training feature histogram, Empty if no feature histogram generated // +kubebuilder:validation:Optional FeatureHistogramRef v1.ObjectReference `json:"featureHistogramRef,omitempty" protobuf:"bytes,22,opt,name=featureHistogramRef"` // The location of dataset index file // +kubebuilder:validation:Optional GroupBy DatasetGroupByStatus `json:"groupby,omitempty" protobuf:"bytes,23,opt,name=groupby"` // +patchMergeKey=type // +patchStrategy=merge // +kubebuilder:validation:Optional Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,24,rep,name=conditions"` }
DatasetStatus defines the observed state of a Dataset object
func (*DatasetStatus) DeepCopy ¶
func (in *DatasetStatus) DeepCopy() *DatasetStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetStatus.
func (*DatasetStatus) DeepCopyInto ¶
func (in *DatasetStatus) DeepCopyInto(out *DatasetStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetStatus) Descriptor ¶
func (*DatasetStatus) Descriptor() ([]byte, []int)
func (*DatasetStatus) Marshal ¶
func (m *DatasetStatus) Marshal() (dAtA []byte, err error)
func (*DatasetStatus) MarshalToSizedBuffer ¶
func (m *DatasetStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DatasetStatus) ProtoMessage ¶
func (*DatasetStatus) ProtoMessage()
func (*DatasetStatus) Reset ¶
func (m *DatasetStatus) Reset()
func (*DatasetStatus) Size ¶
func (m *DatasetStatus) Size() (n int)
func (*DatasetStatus) String ¶
func (this *DatasetStatus) String() string
func (*DatasetStatus) Unmarshal ¶
func (m *DatasetStatus) Unmarshal(dAtA []byte) error
func (*DatasetStatus) XXX_DiscardUnknown ¶
func (m *DatasetStatus) XXX_DiscardUnknown()
func (*DatasetStatus) XXX_Marshal ¶
func (m *DatasetStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DatasetStatus) XXX_Merge ¶
func (m *DatasetStatus) XXX_Merge(src proto.Message)
func (*DatasetStatus) XXX_Size ¶
func (m *DatasetStatus) XXX_Size() int
func (*DatasetStatus) XXX_Unmarshal ¶
func (m *DatasetStatus) XXX_Unmarshal(b []byte) error
type DatasetTemplate ¶
type DatasetTemplate struct { // Standard object's metadata. // More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata // +kubebuilder:validation:Optional metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Spec DatasetSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"` }
DatasetTemplate is used to generate new datasets
func (*DatasetTemplate) DeepCopy ¶
func (in *DatasetTemplate) DeepCopy() *DatasetTemplate
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetTemplate.
func (*DatasetTemplate) DeepCopyInto ¶
func (in *DatasetTemplate) DeepCopyInto(out *DatasetTemplate)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetTemplate) Descriptor ¶
func (*DatasetTemplate) Descriptor() ([]byte, []int)
func (*DatasetTemplate) Marshal ¶
func (m *DatasetTemplate) Marshal() (dAtA []byte, err error)
func (*DatasetTemplate) MarshalToSizedBuffer ¶
func (m *DatasetTemplate) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DatasetTemplate) ProtoMessage ¶
func (*DatasetTemplate) ProtoMessage()
func (*DatasetTemplate) Reset ¶
func (m *DatasetTemplate) Reset()
func (*DatasetTemplate) Size ¶
func (m *DatasetTemplate) Size() (n int)
func (*DatasetTemplate) String ¶
func (this *DatasetTemplate) String() string
func (*DatasetTemplate) Unmarshal ¶
func (m *DatasetTemplate) Unmarshal(dAtA []byte) error
func (*DatasetTemplate) XXX_DiscardUnknown ¶
func (m *DatasetTemplate) XXX_DiscardUnknown()
func (*DatasetTemplate) XXX_Marshal ¶
func (m *DatasetTemplate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DatasetTemplate) XXX_Merge ¶
func (m *DatasetTemplate) XXX_Merge(src proto.Message)
func (*DatasetTemplate) XXX_Size ¶
func (m *DatasetTemplate) XXX_Size() int
func (*DatasetTemplate) XXX_Unmarshal ¶
func (m *DatasetTemplate) XXX_Unmarshal(b []byte) error
type Delimiter ¶
type Delimiter string
Delimiter specify char or group of char +kubebuilder:validation:Enum="crlf";"cr";"lf";"semicolon";"colon";"comma";"tab";"space";"pipe"
const ( DelimiterCRLF Delimiter = "crlf" DelimiterCR Delimiter = "cr" DelimiterLF Delimiter = "lf" DelimiterSemicolon Delimiter = "semicolon" DelimiterColon Delimiter = "colon" DelimiterComma Delimiter = "comma" DelimiterTab Delimiter = "tab" DelimiterSpace Delimiter = "space" DelimiterPipe Delimiter = "pipe" )
type DriftThreshold ¶ added in v0.4.1017
type DriftThreshold struct { // The metric type name (e.g. F1 / Accuracy) // +kubebuilder:validation:Required Metric catalog.Metric `json:"metric" protobuf:"bytes,1,opt,name=metric"` // The value of the metric for quantitive observations // +kubebuilder:validation:Required // +required Value float64 `json:"value" protobuf:"bytes,2,opt,name=value"` }
Define a threshold
func (*DriftThreshold) DeepCopy ¶ added in v0.4.1017
func (in *DriftThreshold) DeepCopy() *DriftThreshold
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DriftThreshold.
func (*DriftThreshold) DeepCopyInto ¶ added in v0.4.1017
func (in *DriftThreshold) DeepCopyInto(out *DriftThreshold)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DriftThreshold) Descriptor ¶ added in v0.4.1017
func (*DriftThreshold) Descriptor() ([]byte, []int)
func (*DriftThreshold) Marshal ¶ added in v0.4.1017
func (m *DriftThreshold) Marshal() (dAtA []byte, err error)
func (*DriftThreshold) MarshalTo ¶ added in v0.4.1017
func (m *DriftThreshold) MarshalTo(dAtA []byte) (int, error)
func (*DriftThreshold) MarshalToSizedBuffer ¶ added in v0.4.1017
func (m *DriftThreshold) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*DriftThreshold) ProtoMessage ¶ added in v0.4.1017
func (*DriftThreshold) ProtoMessage()
func (*DriftThreshold) Reset ¶ added in v0.4.1017
func (m *DriftThreshold) Reset()
func (*DriftThreshold) Size ¶ added in v0.4.1017
func (m *DriftThreshold) Size() (n int)
func (*DriftThreshold) String ¶ added in v0.4.1017
func (this *DriftThreshold) String() string
func (*DriftThreshold) Unmarshal ¶ added in v0.4.1017
func (m *DriftThreshold) Unmarshal(dAtA []byte) error
func (*DriftThreshold) XXX_DiscardUnknown ¶ added in v0.4.1017
func (m *DriftThreshold) XXX_DiscardUnknown()
func (*DriftThreshold) XXX_Marshal ¶ added in v0.4.1017
func (m *DriftThreshold) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*DriftThreshold) XXX_Merge ¶ added in v0.4.1017
func (m *DriftThreshold) XXX_Merge(src proto.Message)
func (*DriftThreshold) XXX_Size ¶ added in v0.4.1017
func (m *DriftThreshold) XXX_Size() int
func (*DriftThreshold) XXX_Unmarshal ¶ added in v0.4.1017
func (m *DriftThreshold) XXX_Unmarshal(b []byte) error
type Entity ¶
type Entity struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Spec EntitySpec `json:"spec" protobuf:"bytes,2,opt,name=spec"` //+optional Status EntityStatus `json:"status" protobuf:"bytes,3,opt,name=status"` }
+kubebuilder:object:root=true +kubebuilder:storageversion +kubebuilder:resource:path=entities,singular=entity,shortName=et,categories={data,modela} +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" Entity represents a entity object
func (*Entity) AddFinalizer ¶
func (entity *Entity) AddFinalizer()
func (*Entity) CreateOrUpdateCond ¶
Merge or update condition
func (*Entity) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Entity.
func (*Entity) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Entity) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*Entity) Descriptor ¶
func (*Entity) GetCondIdx ¶
func (Entity) HasFinalizer ¶
func (*Entity) LabelWithCommit ¶
func (*Entity) MarkArchived ¶
func (entity *Entity) MarkArchived()
func (*Entity) MarshalToSizedBuffer ¶
func (*Entity) ProtoMessage ¶
func (*Entity) ProtoMessage()
func (*Entity) RemoveFinalizer ¶
func (entity *Entity) RemoveFinalizer()
func (*Entity) SetChanged ¶
func (entity *Entity) SetChanged()
func (*Entity) SetupWebhookWithManager ¶
Set up the webhook with the manager.
func (Entity) ValidateCreate ¶
ValidateCreate implements webhook.Validator so a webhook will be registered for the type
func (Entity) ValidateDelete ¶
func (Entity) ValidateUpdate ¶
ValidateUpdate implements webhook.Validator so a webhook will be registered for the type
func (*Entity) XXX_DiscardUnknown ¶
func (m *Entity) XXX_DiscardUnknown()
func (*Entity) XXX_Marshal ¶
func (*Entity) XXX_Unmarshal ¶
type EntityCondition ¶
type EntityCondition struct { // Type of account condition. Type EntityConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=EntityConditionType"` // Status of the condition, one of True, False, Unknown. Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"` // Last time the condition transitioned from one status to another. LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,7,opt,name=lastTransitionTime"` // The reason for the condition's last transition. Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"` // A human readable message indicating details about the transition. Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"` }
EntityCondition describes the state of a deployment at a certain point.
func (*EntityCondition) DeepCopy ¶
func (in *EntityCondition) DeepCopy() *EntityCondition
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EntityCondition.
func (*EntityCondition) DeepCopyInto ¶
func (in *EntityCondition) DeepCopyInto(out *EntityCondition)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*EntityCondition) Descriptor ¶
func (*EntityCondition) Descriptor() ([]byte, []int)
func (*EntityCondition) Marshal ¶
func (m *EntityCondition) Marshal() (dAtA []byte, err error)
func (*EntityCondition) MarshalToSizedBuffer ¶
func (m *EntityCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*EntityCondition) ProtoMessage ¶
func (*EntityCondition) ProtoMessage()
func (*EntityCondition) Reset ¶
func (m *EntityCondition) Reset()
func (*EntityCondition) Size ¶
func (m *EntityCondition) Size() (n int)
func (*EntityCondition) String ¶
func (this *EntityCondition) String() string
func (*EntityCondition) Unmarshal ¶
func (m *EntityCondition) Unmarshal(dAtA []byte) error
func (*EntityCondition) XXX_DiscardUnknown ¶
func (m *EntityCondition) XXX_DiscardUnknown()
func (*EntityCondition) XXX_Marshal ¶
func (m *EntityCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*EntityCondition) XXX_Merge ¶
func (m *EntityCondition) XXX_Merge(src proto.Message)
func (*EntityCondition) XXX_Size ¶
func (m *EntityCondition) XXX_Size() int
func (*EntityCondition) XXX_Unmarshal ¶
func (m *EntityCondition) XXX_Unmarshal(b []byte) error
type EntityList ¶
type EntityList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Items []Entity `json:"items" protobuf:"bytes,2,rep,name=items"` }
+kubebuilder:object:root=true EntityList contains a list of Entity
func (*EntityList) DeepCopy ¶
func (in *EntityList) DeepCopy() *EntityList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EntityList.
func (*EntityList) DeepCopyInto ¶
func (in *EntityList) DeepCopyInto(out *EntityList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*EntityList) DeepCopyObject ¶
func (in *EntityList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*EntityList) Descriptor ¶
func (*EntityList) Descriptor() ([]byte, []int)
func (*EntityList) Marshal ¶
func (m *EntityList) Marshal() (dAtA []byte, err error)
func (*EntityList) MarshalToSizedBuffer ¶
func (m *EntityList) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*EntityList) ProtoMessage ¶
func (*EntityList) ProtoMessage()
func (*EntityList) Reset ¶
func (m *EntityList) Reset()
func (*EntityList) Size ¶
func (m *EntityList) Size() (n int)
func (*EntityList) String ¶
func (this *EntityList) String() string
func (*EntityList) Unmarshal ¶
func (m *EntityList) Unmarshal(dAtA []byte) error
func (*EntityList) XXX_DiscardUnknown ¶
func (m *EntityList) XXX_DiscardUnknown()
func (*EntityList) XXX_Marshal ¶
func (m *EntityList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*EntityList) XXX_Merge ¶
func (m *EntityList) XXX_Merge(src proto.Message)
func (*EntityList) XXX_Size ¶
func (m *EntityList) XXX_Size() int
func (*EntityList) XXX_Unmarshal ¶
func (m *EntityList) XXX_Unmarshal(b []byte) error
type EntitySpec ¶
type EntitySpec struct { // The reference to the tenant which the object exists under // +kubebuilder:validation:Optional TenantRef *v1.ObjectReference `json:"tenantRef,omitempty" protobuf:"bytes,1,opt,name=tenantRef"` // The entity version // +kubebuilder:default:="" Version *string `json:"version" protobuf:"bytes,2,opt,name=version"` // Description of the entity // +kubebuilder:default:="" // +kubebuilder:validation:Optional // +kubebuilder:validation:MaxLength=512 Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"` // Join keys are the way to join all the feature groups // Join key can be either UUID or seq. // Entity must have a join key JoinKey string `json:"joinKey,omitempty" protobuf:"bytes,4,rep,name=joinKey"` // Owner of this Entity // +kubebuilder:default:="no-one" // +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*" // +kubebuilder:validation:Optional Owner *string `json:"owner,omitempty" protobuf:"bytes,5,opt,name=owner"` // Tags for this feature groups // +kubebuilder:validation:Optional Tags []string `json:"tags,omitempty" protobuf:"bytes,6,rep,name=tags"` }
EntitySpec contain the desired state of a Entity.
func (*EntitySpec) DeepCopy ¶
func (in *EntitySpec) DeepCopy() *EntitySpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EntitySpec.
func (*EntitySpec) DeepCopyInto ¶
func (in *EntitySpec) DeepCopyInto(out *EntitySpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*EntitySpec) Descriptor ¶
func (*EntitySpec) Descriptor() ([]byte, []int)
func (*EntitySpec) Marshal ¶
func (m *EntitySpec) Marshal() (dAtA []byte, err error)
func (*EntitySpec) MarshalToSizedBuffer ¶
func (m *EntitySpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*EntitySpec) ProtoMessage ¶
func (*EntitySpec) ProtoMessage()
func (*EntitySpec) Reset ¶
func (m *EntitySpec) Reset()
func (*EntitySpec) Size ¶
func (m *EntitySpec) Size() (n int)
func (*EntitySpec) String ¶
func (this *EntitySpec) String() string
func (*EntitySpec) Unmarshal ¶
func (m *EntitySpec) Unmarshal(dAtA []byte) error
func (*EntitySpec) XXX_DiscardUnknown ¶
func (m *EntitySpec) XXX_DiscardUnknown()
func (*EntitySpec) XXX_Marshal ¶
func (m *EntitySpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*EntitySpec) XXX_Merge ¶
func (m *EntitySpec) XXX_Merge(src proto.Message)
func (*EntitySpec) XXX_Size ¶
func (m *EntitySpec) XXX_Size() int
func (*EntitySpec) XXX_Unmarshal ¶
func (m *EntitySpec) XXX_Unmarshal(b []byte) error
type EntityStatus ¶
type EntityStatus struct { // ObservedGeneration is the Last generation that was acted on //+kubebuilder:validation:Optional ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"` // Last time the object was updated //+kubebuilder:validation:Optional UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,2,opt,name=updatedAt"` // +patchMergeKey=type // +patchStrategy=merge // +kubebuilder:validation:Optional Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,3,rep,name=conditions"` }
EntityStatus defines the observed state of Entity
func (*EntityStatus) DeepCopy ¶
func (in *EntityStatus) DeepCopy() *EntityStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EntityStatus.
func (*EntityStatus) DeepCopyInto ¶
func (in *EntityStatus) DeepCopyInto(out *EntityStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*EntityStatus) Descriptor ¶
func (*EntityStatus) Descriptor() ([]byte, []int)
func (*EntityStatus) Marshal ¶
func (m *EntityStatus) Marshal() (dAtA []byte, err error)
func (*EntityStatus) MarshalToSizedBuffer ¶
func (m *EntityStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*EntityStatus) ProtoMessage ¶
func (*EntityStatus) ProtoMessage()
func (*EntityStatus) Reset ¶
func (m *EntityStatus) Reset()
func (*EntityStatus) Size ¶
func (m *EntityStatus) Size() (n int)
func (*EntityStatus) String ¶
func (this *EntityStatus) String() string
func (*EntityStatus) Unmarshal ¶
func (m *EntityStatus) Unmarshal(dAtA []byte) error
func (*EntityStatus) XXX_DiscardUnknown ¶
func (m *EntityStatus) XXX_DiscardUnknown()
func (*EntityStatus) XXX_Marshal ¶
func (m *EntityStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*EntityStatus) XXX_Merge ¶
func (m *EntityStatus) XXX_Merge(src proto.Message)
func (*EntityStatus) XXX_Size ¶
func (m *EntityStatus) XXX_Size() int
func (*EntityStatus) XXX_Unmarshal ¶
func (m *EntityStatus) XXX_Unmarshal(b []byte) error
type EscapeChar ¶
type EscapeChar string
+kubebuilder:validation:Enum="single-quote";"double-quote";"tilda";"none"
const ( SingleEscapeChar EscapeChar = "single-quote" DoubleEscapeChar EscapeChar = "double-quote" TildaEscapeChar EscapeChar = "tilda" NoneEscapeChar EscapeChar = "none" )
type ExcelNotebookSpec ¶
type ExcelNotebookSpec struct { // Indicates if the excel reader should use the first sheet that contains data // +kubebuilder:default:=false // +kubebuilder:validation:Optional FirstSheetWithData *bool `json:"firstSheetWithData,omitempty" protobuf:"varint,1,opt,name=firstSheetWithData"` // The name of the sheet that exists in the excel file to read data from // +kubebuilder:validation:Optional SheetName *string `json:"sheetName,omitempty" protobuf:"bytes,2,opt,name=sheetName"` // The index of the sheet in the excel file to read data from // +kubebuilder:validation:Optional SheetIndex *int32 `json:"sheetIndex,omitempty" protobuf:"varint,3,opt,name=sheetIndex"` // The position of the row that contains the column names (i.e. the header) // +kubebuilder:validation:Optional ColumnNamesRow *int32 `json:"columnNameRow,omitempty" protobuf:"varint,4,opt,name=columnNameRow"` // The specification for the bounds of the data // +kubebuilder:validation:Optional Data ExcelSheetArea `json:"data,omitempty" protobuf:"bytes,5,opt,name=data"` }
ExcelNotebookSpec specifies the format of an excel file
func (*ExcelNotebookSpec) DeepCopy ¶
func (in *ExcelNotebookSpec) DeepCopy() *ExcelNotebookSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExcelNotebookSpec.
func (*ExcelNotebookSpec) DeepCopyInto ¶
func (in *ExcelNotebookSpec) DeepCopyInto(out *ExcelNotebookSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*ExcelNotebookSpec) Descriptor ¶
func (*ExcelNotebookSpec) Descriptor() ([]byte, []int)
func (*ExcelNotebookSpec) Marshal ¶
func (m *ExcelNotebookSpec) Marshal() (dAtA []byte, err error)
func (*ExcelNotebookSpec) MarshalTo ¶
func (m *ExcelNotebookSpec) MarshalTo(dAtA []byte) (int, error)
func (*ExcelNotebookSpec) MarshalToSizedBuffer ¶
func (m *ExcelNotebookSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*ExcelNotebookSpec) ProtoMessage ¶
func (*ExcelNotebookSpec) ProtoMessage()
func (*ExcelNotebookSpec) Reset ¶
func (m *ExcelNotebookSpec) Reset()
func (*ExcelNotebookSpec) Size ¶
func (m *ExcelNotebookSpec) Size() (n int)
func (*ExcelNotebookSpec) String ¶
func (this *ExcelNotebookSpec) String() string
func (*ExcelNotebookSpec) Unmarshal ¶
func (m *ExcelNotebookSpec) Unmarshal(dAtA []byte) error
func (*ExcelNotebookSpec) XXX_DiscardUnknown ¶
func (m *ExcelNotebookSpec) XXX_DiscardUnknown()
func (*ExcelNotebookSpec) XXX_Marshal ¶
func (m *ExcelNotebookSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*ExcelNotebookSpec) XXX_Merge ¶
func (m *ExcelNotebookSpec) XXX_Merge(src proto.Message)
func (*ExcelNotebookSpec) XXX_Size ¶
func (m *ExcelNotebookSpec) XXX_Size() int
func (*ExcelNotebookSpec) XXX_Unmarshal ¶
func (m *ExcelNotebookSpec) XXX_Unmarshal(b []byte) error
type ExcelSheetArea ¶
type ExcelSheetArea struct { // Indicates if the excel reader should read the entire sheet; if false, it will read only within the bounds // specified by the `To` and `From` fields of the ExcelSheetArea // +kubebuilder:default:=false EntireSheet *bool `json:"entireSheet,omitempty" protobuf:"varint,1,opt,name=entireSheet"` // If reading part of the excel sheet, start with the column in this position FromColumn *int32 `json:"fromColumn,omitempty" protobuf:"varint,2,opt,name=fromColumn"` // If reading part of the excel sheet, end with the column in this position ToColumn *int32 `json:"toColumn,omitempty" protobuf:"varint,3,opt,name=toColumn"` // If reading part of the excel sheet, start with the row in this position FromRow *int32 `json:"fromRow,omitempty" protobuf:"varint,4,opt,name=fromRow"` // If reading part of the excel sheet, end with the row in this position ToRow *int32 `json:"toRow,omitempty" protobuf:"varint,5,opt,name=toRow"` }
ExcelSheetArea specifies the bounds of the data within an excel sheet
func (*ExcelSheetArea) DeepCopy ¶
func (in *ExcelSheetArea) DeepCopy() *ExcelSheetArea
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExcelSheetArea.
func (*ExcelSheetArea) DeepCopyInto ¶
func (in *ExcelSheetArea) DeepCopyInto(out *ExcelSheetArea)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*ExcelSheetArea) Descriptor ¶
func (*ExcelSheetArea) Descriptor() ([]byte, []int)
func (*ExcelSheetArea) Marshal ¶
func (m *ExcelSheetArea) Marshal() (dAtA []byte, err error)
func (*ExcelSheetArea) MarshalToSizedBuffer ¶
func (m *ExcelSheetArea) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*ExcelSheetArea) ProtoMessage ¶
func (*ExcelSheetArea) ProtoMessage()
func (*ExcelSheetArea) Reset ¶
func (m *ExcelSheetArea) Reset()
func (*ExcelSheetArea) Size ¶
func (m *ExcelSheetArea) Size() (n int)
func (*ExcelSheetArea) String ¶
func (this *ExcelSheetArea) String() string
func (*ExcelSheetArea) Unmarshal ¶
func (m *ExcelSheetArea) Unmarshal(dAtA []byte) error
func (*ExcelSheetArea) XXX_DiscardUnknown ¶
func (m *ExcelSheetArea) XXX_DiscardUnknown()
func (*ExcelSheetArea) XXX_Marshal ¶
func (m *ExcelSheetArea) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*ExcelSheetArea) XXX_Merge ¶
func (m *ExcelSheetArea) XXX_Merge(src proto.Message)
func (*ExcelSheetArea) XXX_Size ¶
func (m *ExcelSheetArea) XXX_Size() int
func (*ExcelSheetArea) XXX_Unmarshal ¶
func (m *ExcelSheetArea) XXX_Unmarshal(b []byte) error
type FeatureGroup ¶ added in v0.5.282
type FeatureGroup struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Spec FeatureGroupSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"` //+optional Status FeatureGroupStatus `json:"status" protobuf:"bytes,3,opt,name=status"` }
+kubebuilder:object:root=true +kubebuilder:storageversion +kubebuilder:resource:path=featuregroups,singular=featuregroup,shortName="fg",categories={data,modela} +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Entity",type="string",JSONPath=".spec.entityName" +kubebuilder:printcolumn:name="Schedule",type="string",JSONPath=".spec.schedule",description="" +kubebuilder:printcolumn:name="Last Ingest",type="date",JSONPath=".status.ingestSchedule.lastRun",description="" +kubebuilder:printcolumn:name="Last Sync",type="date",JSONPath=".status.syncScedule.lastRun",description="" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description" FeatureGroup represent a group of features
func ParseFeatureGroupYaml ¶ added in v0.5.282
func ParseFeatureGroupYaml(content []byte) (*FeatureGroup, error)
func (*FeatureGroup) AddConfiditions ¶ added in v0.5.282
func (fg *FeatureGroup) AddConfiditions()
func (*FeatureGroup) AddFinalizer ¶ added in v0.5.282
func (fg *FeatureGroup) AddFinalizer()
func (FeatureGroup) Archived ¶ added in v0.5.282
func (fg FeatureGroup) Archived() bool
func (*FeatureGroup) CreateOrUpdateCond ¶ added in v0.5.282
func (fg *FeatureGroup) CreateOrUpdateCond(cond metav1.Condition)
Merge or update condition Merge or update condition
func (*FeatureGroup) DeepCopy ¶ added in v0.5.282
func (in *FeatureGroup) DeepCopy() *FeatureGroup
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureGroup.
func (*FeatureGroup) DeepCopyInto ¶ added in v0.5.282
func (in *FeatureGroup) DeepCopyInto(out *FeatureGroup)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*FeatureGroup) DeepCopyObject ¶ added in v0.5.282
func (in *FeatureGroup) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*FeatureGroup) Default ¶ added in v0.5.282
func (fg *FeatureGroup) Default()
No defaults in this current release
func (*FeatureGroup) Descriptor ¶ added in v0.5.282
func (*FeatureGroup) Descriptor() ([]byte, []int)
func (FeatureGroup) ErrorAlert ¶ added in v0.5.309
func (fh FeatureGroup) ErrorAlert(tenantRef *v1.ObjectReference, notifierName *string, err error) *infra.Alert
func (FeatureGroup) GetCond ¶ added in v0.5.282
func (fg FeatureGroup) GetCond(t string) metav1.Condition
func (FeatureGroup) GetCondIdx ¶ added in v0.5.282
func (fg FeatureGroup) GetCondIdx(t string) int
func (*FeatureGroup) HasFinalizer ¶ added in v0.5.282
func (fg *FeatureGroup) HasFinalizer() bool
func (FeatureGroup) IsDeleted ¶ added in v0.5.315
func (fg FeatureGroup) IsDeleted() bool
func (*FeatureGroup) IsGitObj ¶ added in v0.5.282
func (fg *FeatureGroup) IsGitObj() bool
func (*FeatureGroup) IsIngesting ¶ added in v0.5.373
func (fg *FeatureGroup) IsIngesting() bool
func (FeatureGroup) IsReady ¶ added in v0.5.282
func (fg FeatureGroup) IsReady() bool
func (*FeatureGroup) IsSynced ¶ added in v0.5.315
func (fg *FeatureGroup) IsSynced() bool
func (*FeatureGroup) IsSynching ¶ added in v0.5.373
func (fg *FeatureGroup) IsSynching() bool
func (FeatureGroup) Key ¶ added in v0.5.282
func (fg FeatureGroup) Key() string
func (*FeatureGroup) LabelWithCommit ¶ added in v0.5.282
func (fg *FeatureGroup) LabelWithCommit(commit string, uname string, branch string)
func (*FeatureGroup) MarkArchived ¶ added in v0.5.282
func (fg *FeatureGroup) MarkArchived()
func (*FeatureGroup) MarkIngestFailed ¶ added in v0.5.327
func (fg *FeatureGroup) MarkIngestFailed(msg string)
func (*FeatureGroup) MarkIngested ¶ added in v0.5.327
func (fg *FeatureGroup) MarkIngested()
func (*FeatureGroup) MarkIngesting ¶ added in v0.5.327
func (fg *FeatureGroup) MarkIngesting()
////////////////////////////////////////////// Ingest //////////////////////////////////////////////
func (*FeatureGroup) MarkReady ¶ added in v0.5.282
func (fg *FeatureGroup) MarkReady()
func (*FeatureGroup) MarkSyncFailed ¶ added in v0.5.308
func (fg *FeatureGroup) MarkSyncFailed(msg string)
func (*FeatureGroup) MarkSynced ¶ added in v0.5.306
func (fg *FeatureGroup) MarkSynced()
func (*FeatureGroup) MarkSyncing ¶ added in v0.5.315
func (fg *FeatureGroup) MarkSyncing()
///////////////////////////////////////////// Sync ////////////////////////////////////////////
func (*FeatureGroup) Marshal ¶ added in v0.5.282
func (m *FeatureGroup) Marshal() (dAtA []byte, err error)
func (*FeatureGroup) MarshalTo ¶ added in v0.5.282
func (m *FeatureGroup) MarshalTo(dAtA []byte) (int, error)
func (*FeatureGroup) MarshalToSizedBuffer ¶ added in v0.5.282
func (m *FeatureGroup) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*FeatureGroup) PrefixLiveURI ¶ added in v0.5.472
func (fg *FeatureGroup) PrefixLiveURI(path string) string
func (*FeatureGroup) ProtoMessage ¶ added in v0.5.282
func (*FeatureGroup) ProtoMessage()
func (*FeatureGroup) RemoveFinalizer ¶ added in v0.5.282
func (fg *FeatureGroup) RemoveFinalizer()
func (*FeatureGroup) RepEntry ¶ added in v0.5.282
func (fg *FeatureGroup) RepEntry() (string, error)
func (*FeatureGroup) RepPath ¶ added in v0.5.282
func (fg *FeatureGroup) RepPath(root string) (string, error)
Return the on disk rep location
func (*FeatureGroup) Reset ¶ added in v0.5.282
func (m *FeatureGroup) Reset()
func (*FeatureGroup) SetChanged ¶ added in v0.5.282
func (fg *FeatureGroup) SetChanged()
func (*FeatureGroup) SetupWebhookWithManager ¶ added in v0.5.282
func (fg *FeatureGroup) SetupWebhookWithManager(mgr ctrl.Manager) error
func (*FeatureGroup) Size ¶ added in v0.5.282
func (m *FeatureGroup) Size() (n int)
func (*FeatureGroup) String ¶ added in v0.5.282
func (this *FeatureGroup) String() string
func (FeatureGroup) TenantName ¶ added in v0.5.386
func (fg FeatureGroup) TenantName() string
func (*FeatureGroup) Unmarshal ¶ added in v0.5.282
func (m *FeatureGroup) Unmarshal(dAtA []byte) error
func (FeatureGroup) ValidateCreate ¶ added in v0.5.282
func (fg FeatureGroup) ValidateCreate() error
ValidateCreate implements webhook.Validator so a webhook will be registered for the type
func (FeatureGroup) ValidateDelete ¶ added in v0.5.282
func (fg FeatureGroup) ValidateDelete() error
func (FeatureGroup) ValidateUpdate ¶ added in v0.5.282
func (fg FeatureGroup) ValidateUpdate(old runtime.Object) error
ValidateUpdate implements webhook.Validator so a webhook will be registered for the type
func (*FeatureGroup) XXX_DiscardUnknown ¶ added in v0.5.282
func (m *FeatureGroup) XXX_DiscardUnknown()
func (*FeatureGroup) XXX_Marshal ¶ added in v0.5.282
func (m *FeatureGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*FeatureGroup) XXX_Merge ¶ added in v0.5.282
func (m *FeatureGroup) XXX_Merge(src proto.Message)
func (*FeatureGroup) XXX_Size ¶ added in v0.5.282
func (m *FeatureGroup) XXX_Size() int
func (*FeatureGroup) XXX_Unmarshal ¶ added in v0.5.282
func (m *FeatureGroup) XXX_Unmarshal(b []byte) error
type FeatureGroupConditionType ¶ added in v0.5.282
type FeatureGroupConditionType string
FeatureGroupConditionType
type FeatureGroupList ¶ added in v0.5.282
type FeatureGroupList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Items []FeatureGroup `json:"items" protobuf:"bytes,2,rep,name=items"` }
+kubebuilder:object:root=true FeatureList contain a list of feature objects
func (*FeatureGroupList) DeepCopy ¶ added in v0.5.282
func (in *FeatureGroupList) DeepCopy() *FeatureGroupList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureGroupList.
func (*FeatureGroupList) DeepCopyInto ¶ added in v0.5.282
func (in *FeatureGroupList) DeepCopyInto(out *FeatureGroupList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*FeatureGroupList) DeepCopyObject ¶ added in v0.5.282
func (in *FeatureGroupList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*FeatureGroupList) Descriptor ¶ added in v0.5.282
func (*FeatureGroupList) Descriptor() ([]byte, []int)
func (*FeatureGroupList) Marshal ¶ added in v0.5.282
func (m *FeatureGroupList) Marshal() (dAtA []byte, err error)
func (*FeatureGroupList) MarshalTo ¶ added in v0.5.282
func (m *FeatureGroupList) MarshalTo(dAtA []byte) (int, error)
func (*FeatureGroupList) MarshalToSizedBuffer ¶ added in v0.5.282
func (m *FeatureGroupList) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*FeatureGroupList) ProtoMessage ¶ added in v0.5.282
func (*FeatureGroupList) ProtoMessage()
func (*FeatureGroupList) Reset ¶ added in v0.5.282
func (m *FeatureGroupList) Reset()
func (*FeatureGroupList) Size ¶ added in v0.5.282
func (m *FeatureGroupList) Size() (n int)
func (*FeatureGroupList) String ¶ added in v0.5.282
func (this *FeatureGroupList) String() string
func (*FeatureGroupList) Unmarshal ¶ added in v0.5.282
func (m *FeatureGroupList) Unmarshal(dAtA []byte) error
func (*FeatureGroupList) XXX_DiscardUnknown ¶ added in v0.5.282
func (m *FeatureGroupList) XXX_DiscardUnknown()
func (*FeatureGroupList) XXX_Marshal ¶ added in v0.5.282
func (m *FeatureGroupList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*FeatureGroupList) XXX_Merge ¶ added in v0.5.282
func (m *FeatureGroupList) XXX_Merge(src proto.Message)
func (*FeatureGroupList) XXX_Size ¶ added in v0.5.282
func (m *FeatureGroupList) XXX_Size() int
func (*FeatureGroupList) XXX_Unmarshal ¶ added in v0.5.282
func (m *FeatureGroupList) XXX_Unmarshal(b []byte) error
type FeatureGroupPhase ¶ added in v0.5.306
type FeatureGroupPhase string
const ( FeatureGroupPhaseSyncing FeatureGroupPhase = "Syncing" FeatureGroupPhaseIngesting FeatureGroupPhase = "Ingesting" FeatureGroupPhaseFailed FeatureGroupPhase = "Failed" FeatureGroupPhaseReady FeatureGroupPhase = "Ready" )
type FeatureGroupSpec ¶ added in v0.5.282
type FeatureGroupSpec struct { // The reference to the tenant which the object exists under // +kubebuilder:validation:Optional TenantRef *v1.ObjectReference `json:"tenantRef,omitempty" protobuf:"bytes,1,opt,name=tenantRef"` // Owner is the owner of the feature group. // +kubebuilder:default:="no-one" // +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*" // +kubebuilder:validation:Optional Owner *string `json:"owner,omitempty" protobuf:"bytes,2,opt,name=owner"` // Version name is the product version for the feature group. // +kubebuilder:default:="" // +kubebuilder:validation:Optional Version *string `json:"version,omitempty" protobuf:"bytes,3,opt,name=version"` // Description of the feature group. // +kubebuilder:default:="" // +kubebuilder:validation:Optional // +kubebuilder:validation:MaxLength=512 Description string `json:"description,omitempty" protobuf:"bytes,4,opt,name=description"` // How this group is ingested // +kubebuilder:default:="batch" // +kubebuilder:validation:Optional IngestType *catalog.FeatureStoreIngestType `json:"ingestType,omitempty" protobuf:"bytes,5,opt,name=ingestType"` // A feature group must be part of an entity. EntityName string `json:"entityName,omitempty" protobuf:"bytes,6,opt,name=entityName"` // Tags for this feature groups // +kubebuilder:validation:Optional Tags []string `json:"tags,omitempty" protobuf:"bytes,7,rep,name=tags"` // Schedule for running ingesting the data from the feature. // On virtual features (e.g. where the data already reside in a table) // The ingest will just perform feature profile, and run the feature group unit tests. // +kubebuilder:validation:Optional IngestSchedule catalog.RunSchedule `json:"ingestSchedule,omitempty" protobuf:"bytes,8,opt,name=ingestSchedule"` // Schedule for running ingesting the data from the feature. // On virtual features (e.g. where the data already reside in a table) // The ingest will just perform feature profile, and run the feature group unit tests. // +kubebuilder:validation:Optional SyncSchedule catalog.RunSchedule `json:"syncSchedule,omitempty" protobuf:"bytes,9,opt,name=syncSchedule"` // In case where the feature group data is stored as flat file. the flat file format // define how to read the file. // +kubebuilder:validation:Optional FlatFile *FlatFileFormatSpec `json:"flatfile,omitempty" protobuf:"bytes,10,opt,name=flatfile"` // The name of the data source which contain the schema for this entity // +kubebuilder:validation:Optional Schema Schema `json:"schema,omitempty" protobuf:"bytes,11,opt,name=schema"` // Unit test to run on data from this feature group upon ingrest. // +kubebuilder:validation:Optional Tests catalog.TestSuite `json:"tests,omitempty" protobuf:"bytes,12,opt,name=tests"` // Specify the data for this feature group // This can be a table, a view or a file on S3. // +kubebuilder:validation:Optional Location DataLocation `json:"location,omitempty" protobuf:"bytes,13,opt,name=location"` // the time column index. Might be null, if the fg does not have time column. // +kubebuilder:validation:Optional TimeColumn *string `json:"timeColumn,omitempty" protobuf:"bytes,14,opt,name=timeColumn"` // The time column format // +kubebuilder:validation:Optional TimeColumnFormat *string `json:"timeColumnFormat,omitempty" protobuf:"bytes,15,opt,name=timeColumnFormat"` // The feature group primary key. This is usually the key that is used to join the feature groups // to other feature groups in the entity KeyColumn *string `json:"keyColumn,omitempty" protobuf:"bytes,16,opt,name=keyColumn"` // Materialization // +kubebuilder:validation:Optional Materialization MaterializationSpec `json:"materialization,omitempty" protobuf:"bytes,17,opt,name=materialization"` // Resources used for ingest and the sync //+kubebuilder:validation:Optional Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,18,opt,name=resources"` }
FeatureGroupSpec contain the desired state of a FeatureGroup
func (*FeatureGroupSpec) DeepCopy ¶ added in v0.5.282
func (in *FeatureGroupSpec) DeepCopy() *FeatureGroupSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureGroupSpec.
func (*FeatureGroupSpec) DeepCopyInto ¶ added in v0.5.282
func (in *FeatureGroupSpec) DeepCopyInto(out *FeatureGroupSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*FeatureGroupSpec) Descriptor ¶ added in v0.5.282
func (*FeatureGroupSpec) Descriptor() ([]byte, []int)
func (*FeatureGroupSpec) Marshal ¶ added in v0.5.282
func (m *FeatureGroupSpec) Marshal() (dAtA []byte, err error)
func (*FeatureGroupSpec) MarshalTo ¶ added in v0.5.282
func (m *FeatureGroupSpec) MarshalTo(dAtA []byte) (int, error)
func (*FeatureGroupSpec) MarshalToSizedBuffer ¶ added in v0.5.282
func (m *FeatureGroupSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*FeatureGroupSpec) ProtoMessage ¶ added in v0.5.282
func (*FeatureGroupSpec) ProtoMessage()
func (*FeatureGroupSpec) Reset ¶ added in v0.5.282
func (m *FeatureGroupSpec) Reset()
func (*FeatureGroupSpec) Size ¶ added in v0.5.282
func (m *FeatureGroupSpec) Size() (n int)
func (*FeatureGroupSpec) String ¶ added in v0.5.282
func (this *FeatureGroupSpec) String() string
func (*FeatureGroupSpec) Unmarshal ¶ added in v0.5.282
func (m *FeatureGroupSpec) Unmarshal(dAtA []byte) error
func (*FeatureGroupSpec) XXX_DiscardUnknown ¶ added in v0.5.282
func (m *FeatureGroupSpec) XXX_DiscardUnknown()
func (*FeatureGroupSpec) XXX_Marshal ¶ added in v0.5.282
func (m *FeatureGroupSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*FeatureGroupSpec) XXX_Merge ¶ added in v0.5.282
func (m *FeatureGroupSpec) XXX_Merge(src proto.Message)
func (*FeatureGroupSpec) XXX_Size ¶ added in v0.5.282
func (m *FeatureGroupSpec) XXX_Size() int
func (*FeatureGroupSpec) XXX_Unmarshal ¶ added in v0.5.282
func (m *FeatureGroupSpec) XXX_Unmarshal(b []byte) error
type FeatureGroupStatus ¶ added in v0.5.282
type FeatureGroupStatus struct { Phase FeatureGroupPhase `json:"phase,omitempty" protobuf:"bytes,2,opt,name=phase"` // ObservedGeneration is the Last generation that was acted on //+kubebuilder:validation:Optional ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,3,opt,name=observedGeneration"` // Last time the object was updated //+kubebuilder:validation:Optional UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,4,opt,name=updatedAt"` // The current number of rows in the feature group. //+kubebuilder:validation:Optional Rows int32 `json:"rows,omitempty" protobuf:"varint,5,opt,name=rows"` // the ingest schedule status. During ingest we test and profile the current feature group content. //+kubebuilder:validation:Optional IngestSchedule catalog.RunScheduleStatus `json:"ingestSchedule,omitempty" protobuf:"bytes,7,opt,name=ingestSchedule"` // The status of the sync schedule //+kubebuilder:validation:Optional SyncSchedule catalog.RunScheduleStatus `json:"syncSchedule,omitempty" protobuf:"bytes,8,opt,name=syncSchedule"` // Holds the last online table location. set the null when running the sync process //+kubebuilder:validation:Optional OnlineTable DataLocation `json:"onlineTable,omitempty" protobuf:"bytes,9,opt,name=onelineTable"` // The last time an online table was created //+kubebuilder:validation:Optional OnlineTableCreated *metav1.Time `json:"onlineTableCreated,omitempty" protobuf:"bytes,10,opt,name=onelineTableCreated"` // The last monitor dataset name //+kubebuilder:validation:Optional IngestDatasetName string `json:"ingestDatasetName,omitempty" protobuf:"bytes,11,opt,name=ingestDatasetName"` // In the case of failure, the Study resource controller will set this field with a failure reason //+kubebuilder:validation:Optional FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,12,opt,name=failureReason"` // In the case of failure, the Study resource controller will set this field with a failure message //+kubebuilder:validation:Optional FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,13,opt,name=failureMessage"` // +patchMergeKey=type // +patchStrategy=merge // +kubebuilder:validation:Optional Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,14,rep,name=conditions"` }
FeatureStatus defines the observed state of Feature
func (*FeatureGroupStatus) DeepCopy ¶ added in v0.5.282
func (in *FeatureGroupStatus) DeepCopy() *FeatureGroupStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureGroupStatus.
func (*FeatureGroupStatus) DeepCopyInto ¶ added in v0.5.282
func (in *FeatureGroupStatus) DeepCopyInto(out *FeatureGroupStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*FeatureGroupStatus) Descriptor ¶ added in v0.5.282
func (*FeatureGroupStatus) Descriptor() ([]byte, []int)
func (*FeatureGroupStatus) Marshal ¶ added in v0.5.282
func (m *FeatureGroupStatus) Marshal() (dAtA []byte, err error)
func (*FeatureGroupStatus) MarshalTo ¶ added in v0.5.282
func (m *FeatureGroupStatus) MarshalTo(dAtA []byte) (int, error)
func (*FeatureGroupStatus) MarshalToSizedBuffer ¶ added in v0.5.282
func (m *FeatureGroupStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*FeatureGroupStatus) ProtoMessage ¶ added in v0.5.282
func (*FeatureGroupStatus) ProtoMessage()
func (*FeatureGroupStatus) Reset ¶ added in v0.5.282
func (m *FeatureGroupStatus) Reset()
func (*FeatureGroupStatus) Size ¶ added in v0.5.282
func (m *FeatureGroupStatus) Size() (n int)
func (*FeatureGroupStatus) String ¶ added in v0.5.282
func (this *FeatureGroupStatus) String() string
func (*FeatureGroupStatus) Unmarshal ¶ added in v0.5.282
func (m *FeatureGroupStatus) Unmarshal(dAtA []byte) error
func (*FeatureGroupStatus) XXX_DiscardUnknown ¶ added in v0.5.282
func (m *FeatureGroupStatus) XXX_DiscardUnknown()
func (*FeatureGroupStatus) XXX_Marshal ¶ added in v0.5.282
func (m *FeatureGroupStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*FeatureGroupStatus) XXX_Merge ¶ added in v0.5.282
func (m *FeatureGroupStatus) XXX_Merge(src proto.Message)
func (*FeatureGroupStatus) XXX_Size ¶ added in v0.5.282
func (m *FeatureGroupStatus) XXX_Size() int
func (*FeatureGroupStatus) XXX_Unmarshal ¶ added in v0.5.282
func (m *FeatureGroupStatus) XXX_Unmarshal(b []byte) error
type FeatureHistogram ¶
type FeatureHistogram struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Spec FeatureHistogramSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"` //+optional Status FeatureHistogramStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"` }
+kubebuilder:object:root=true +kubebuilder:resource:path=featurehistograms,shortName=fh,singular=featurehistogram,categories={data,modela} +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Status",type="string",JSONPath=".status.phase" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Live",type="boolean",JSONPath=".spec.live" +kubebuilder:printcolumn:name="Start",type="date",JSONPath=".spec.start" +kubebuilder:printcolumn:name="End",type="date",JSONPath=".spec.end" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" FeatureHistogram represent a single feature in the feature store.
func ParseFeatureHistogramYaml ¶
func ParseFeatureHistogramYaml(content []byte) (*FeatureHistogram, error)
func (*FeatureHistogram) AddConditions ¶ added in v0.4.514
func (fh *FeatureHistogram) AddConditions()
func (*FeatureHistogram) AddFinalizer ¶
func (fh *FeatureHistogram) AddFinalizer()
func (FeatureHistogram) Archived ¶
func (fh FeatureHistogram) Archived() bool
func (*FeatureHistogram) CreateOrUpdateCond ¶
func (fh *FeatureHistogram) CreateOrUpdateCond(cond metav1.Condition)
Merge or update condition Merge or update condition
func (*FeatureHistogram) DeepCopy ¶
func (in *FeatureHistogram) DeepCopy() *FeatureHistogram
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureHistogram.
func (*FeatureHistogram) DeepCopyInto ¶
func (in *FeatureHistogram) DeepCopyInto(out *FeatureHistogram)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*FeatureHistogram) DeepCopyObject ¶
func (in *FeatureHistogram) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*FeatureHistogram) Default ¶
func (fh *FeatureHistogram) Default()
No defaults in this current release
func (*FeatureHistogram) DefaultDriftThreshold ¶ added in v0.5.59
func (fh *FeatureHistogram) DefaultDriftThreshold(metric catalog.Metric) float64
Used during drift unit test generation
func (*FeatureHistogram) Descriptor ¶
func (*FeatureHistogram) Descriptor() ([]byte, []int)
func (FeatureHistogram) DriftAlert ¶ added in v0.4.976
func (fh FeatureHistogram) DriftAlert(tenantRef *v1.ObjectReference, notifierName *string, columns []string) *infra.Alert
func (*FeatureHistogram) Drifted ¶ added in v0.5.74
func (fh *FeatureHistogram) Drifted() bool
func (FeatureHistogram) ErrorAlert ¶ added in v0.4.976
func (fh FeatureHistogram) ErrorAlert(tenantRef *v1.ObjectReference, notifierName *string, err error) *infra.Alert
func (*FeatureHistogram) Expired ¶ added in v0.5.74
func (fh *FeatureHistogram) Expired() bool
Return true if the feature histogram is expired
func (FeatureHistogram) GetCondIdx ¶
func (fh FeatureHistogram) GetCondIdx(t string) int
func (*FeatureHistogram) HasFinalizer ¶
func (fh *FeatureHistogram) HasFinalizer() bool
func (FeatureHistogram) IsArchived ¶ added in v0.4.1023
func (fh FeatureHistogram) IsArchived() bool
func (*FeatureHistogram) IsGitObj ¶
func (fh *FeatureHistogram) IsGitObj() bool
func (FeatureHistogram) IsReady ¶
func (fh FeatureHistogram) IsReady() bool
func (FeatureHistogram) Key ¶
func (fh FeatureHistogram) Key() string
func (*FeatureHistogram) LabelWithCommit ¶
func (fh *FeatureHistogram) LabelWithCommit(commit string, uname string, branch string)
func (FeatureHistogram) Live ¶ added in v0.5.74
func (fh FeatureHistogram) Live() bool
func (*FeatureHistogram) MarkArchived ¶
func (fh *FeatureHistogram) MarkArchived()
func (*FeatureHistogram) MarkDrift ¶ added in v0.5.74
func (fh *FeatureHistogram) MarkDrift()
func (*FeatureHistogram) MarkExpired ¶ added in v0.5.74
func (fh *FeatureHistogram) MarkExpired()
Mark Expired
func (*FeatureHistogram) MarkFailed ¶ added in v0.4.975
func (fh *FeatureHistogram) MarkFailed(msg string)
func (*FeatureHistogram) MarkGenTest ¶ added in v0.5.74
func (fh *FeatureHistogram) MarkGenTest()
MarkGenTest
func (*FeatureHistogram) MarkLive ¶ added in v0.5.74
func (fh *FeatureHistogram) MarkLive()
MarkLive
func (*FeatureHistogram) MarkReady ¶
func (fh *FeatureHistogram) MarkReady()
func (*FeatureHistogram) MarkReadyToTest ¶ added in v0.5.74
func (fh *FeatureHistogram) MarkReadyToTest()
MarkReadyToTest
func (*FeatureHistogram) MarkUnitTestFailed ¶ added in v0.5.44
func (fh *FeatureHistogram) MarkUnitTestFailed(msg string, stop bool)
func (*FeatureHistogram) MarkUnitTested ¶ added in v0.5.44
func (fh *FeatureHistogram) MarkUnitTested()
func (*FeatureHistogram) MarkUnitTesting ¶ added in v0.5.44
func (fh *FeatureHistogram) MarkUnitTesting()
MarkUnitTesting
func (*FeatureHistogram) Marshal ¶
func (m *FeatureHistogram) Marshal() (dAtA []byte, err error)
func (*FeatureHistogram) MarshalToSizedBuffer ¶
func (m *FeatureHistogram) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*FeatureHistogram) ProtoMessage ¶
func (*FeatureHistogram) ProtoMessage()
func (*FeatureHistogram) RemoveFinalizer ¶
func (fh *FeatureHistogram) RemoveFinalizer()
func (*FeatureHistogram) RepEntry ¶
func (fh *FeatureHistogram) RepEntry() (string, error)
func (*FeatureHistogram) RepPath ¶
func (fh *FeatureHistogram) RepPath(root string) (string, error)
Return the on disk rep location
func (*FeatureHistogram) Reset ¶
func (m *FeatureHistogram) Reset()
func (*FeatureHistogram) SetChanged ¶
func (fh *FeatureHistogram) SetChanged()
func (*FeatureHistogram) SetupWebhookWithManager ¶
func (fh *FeatureHistogram) SetupWebhookWithManager(mgr ctrl.Manager) error
func (*FeatureHistogram) ShouldDetectDriftForColumn ¶ added in v0.5.59
func (fh *FeatureHistogram) ShouldDetectDriftForColumn(column string) bool
Return true if we should detect drift for column
func (*FeatureHistogram) ShouldExpire ¶ added in v0.5.77
func (fh *FeatureHistogram) ShouldExpire(maxPredictions int32) bool
check if we need to expire the feature histogram
func (*FeatureHistogram) ShouldGenerateUnitTest ¶ added in v0.5.57
func (fh *FeatureHistogram) ShouldGenerateUnitTest() bool
Check if we need to generate unit test for this feature histogram
func (*FeatureHistogram) ShouldUnitTest ¶ added in v0.5.45
func (fh *FeatureHistogram) ShouldUnitTest() bool
check if we should compute drift, we should compute drift, if we have no drift parameters, and we pass the historam
func (*FeatureHistogram) Size ¶
func (m *FeatureHistogram) Size() (n int)
func (*FeatureHistogram) String ¶
func (this *FeatureHistogram) String() string
func (FeatureHistogram) UnitTested ¶ added in v0.5.44
func (fh FeatureHistogram) UnitTested() bool
func (*FeatureHistogram) Unmarshal ¶
func (m *FeatureHistogram) Unmarshal(dAtA []byte) error
func (FeatureHistogram) ValidateCreate ¶
func (fh FeatureHistogram) ValidateCreate() error
ValidateCreate implements webhook.Validator so a webhook will be registered for the type
func (FeatureHistogram) ValidateDelete ¶
func (fh FeatureHistogram) ValidateDelete() error
func (FeatureHistogram) ValidateUpdate ¶
func (fh FeatureHistogram) ValidateUpdate(old runtime.Object) error
ValidateUpdate implements webhook.Validator so a webhook will be registered for the type
func (*FeatureHistogram) XXX_DiscardUnknown ¶
func (m *FeatureHistogram) XXX_DiscardUnknown()
func (*FeatureHistogram) XXX_Marshal ¶
func (m *FeatureHistogram) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*FeatureHistogram) XXX_Merge ¶
func (m *FeatureHistogram) XXX_Merge(src proto.Message)
func (*FeatureHistogram) XXX_Size ¶
func (m *FeatureHistogram) XXX_Size() int
func (*FeatureHistogram) XXX_Unmarshal ¶
func (m *FeatureHistogram) XXX_Unmarshal(b []byte) error
type FeatureHistogramConditionType ¶
type FeatureHistogramConditionType string
FeatureHistogramConditionType is the condition of the feature
type FeatureHistogramList ¶
type FeatureHistogramList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Items []FeatureHistogram `json:"items" protobuf:"bytes,2,rep,name=items"` }
+kubebuilder:object:root=true FeatureHistogramList contain a list of feature objects
func (*FeatureHistogramList) DeepCopy ¶
func (in *FeatureHistogramList) DeepCopy() *FeatureHistogramList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureHistogramList.
func (*FeatureHistogramList) DeepCopyInto ¶
func (in *FeatureHistogramList) DeepCopyInto(out *FeatureHistogramList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*FeatureHistogramList) DeepCopyObject ¶
func (in *FeatureHistogramList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*FeatureHistogramList) Descriptor ¶
func (*FeatureHistogramList) Descriptor() ([]byte, []int)
func (*FeatureHistogramList) Marshal ¶
func (m *FeatureHistogramList) Marshal() (dAtA []byte, err error)
func (*FeatureHistogramList) MarshalTo ¶
func (m *FeatureHistogramList) MarshalTo(dAtA []byte) (int, error)
func (*FeatureHistogramList) MarshalToSizedBuffer ¶
func (m *FeatureHistogramList) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*FeatureHistogramList) ProtoMessage ¶
func (*FeatureHistogramList) ProtoMessage()
func (*FeatureHistogramList) Reset ¶
func (m *FeatureHistogramList) Reset()
func (*FeatureHistogramList) Size ¶
func (m *FeatureHistogramList) Size() (n int)
func (*FeatureHistogramList) String ¶
func (this *FeatureHistogramList) String() string
func (*FeatureHistogramList) Unmarshal ¶
func (m *FeatureHistogramList) Unmarshal(dAtA []byte) error
func (*FeatureHistogramList) XXX_DiscardUnknown ¶
func (m *FeatureHistogramList) XXX_DiscardUnknown()
func (*FeatureHistogramList) XXX_Marshal ¶
func (m *FeatureHistogramList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*FeatureHistogramList) XXX_Merge ¶
func (m *FeatureHistogramList) XXX_Merge(src proto.Message)
func (*FeatureHistogramList) XXX_Size ¶
func (m *FeatureHistogramList) XXX_Size() int
func (*FeatureHistogramList) XXX_Unmarshal ¶
func (m *FeatureHistogramList) XXX_Unmarshal(b []byte) error
type FeatureHistogramPhase ¶ added in v0.4.975
type FeatureHistogramPhase string
const ( FeatureHistogramPhasePending FeatureHistogramPhase = "Pending" // when generating FeatureHistogramPhaseLive FeatureHistogramPhase = "Live" // when the feature histogram is live. I.e. get updated by predictions FeatureHistogramPhaseExpired FeatureHistogramPhase = "Expired" // when the feature histogram is expired. I.e. get updated by predictions FeatureHistogramPhaseGenTest FeatureHistogramPhase = "GeneratingTest" // when the unit tests are generated FeatureHistogramPhaseReadyToTest FeatureHistogramPhase = "ReadyToTest" // when the unit tests were generated or the histogram has tests FeatureHistogramPhaseUnitTesting FeatureHistogramPhase = "UnitTesting" // when running the unit tests FeatureHistogramPhaseDrift FeatureHistogramPhase = "Drift" // when one or more column drifted FeatureHistogramPhaseReady FeatureHistogramPhase = "Ready" // when ready and not drift. FeatureHistogramPhaseFailed FeatureHistogramPhase = "Failed" // failed in the process. )
type FeatureHistogramSpec ¶
type FeatureHistogramSpec struct { // The feature owner // +kubebuilder:validation:Optional // +kubebuilder:default:="no-one" Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"` // The product version for the feature. // +kubebuilder:default:="" // +kubebuilder:validation:Optional VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"` // Comments is a description of the feature // +kubebuilder:validation:Optional // +kubebuilder:default:="" // +kubebuilder:validation:MaxLength=512 Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"` // The list of columns to generate the histograms. // +kubebuilder:validation:Optional Columns []string `json:"columns,omitempty" protobuf:"bytes,5,rep,name=columns"` // A reference to the dataset or predictor that contain the column with this histogram // +kubebuilder:validation:Optional SourceRef *v1.ObjectReference `json:"sourceRef,omitempty" protobuf:"bytes,6,opt,name=sourceRef"` // If true, this is a training dataset feature histogram. If false the histogram was generated during serving. // +kubebuilder:default:=false // +kubebuilder:validation:Optional Training *bool `json:"training,omitempty" protobuf:"varint,7,opt,name=training"` // If true, this is an active feature histogram. This feature histogram is being live updated by the predictorlet // +kubebuilder:default:=false // +kubebuilder:validation:Optional Live *bool `json:"live,omitempty" protobuf:"varint,9,opt,name=live"` // The start time of this feature histogram. For training dataset histogram this is set to the creation // time of the dataset // +kubebuilder:validation:Optional Start *metav1.Time `json:"start,omitempty" protobuf:"bytes,10,opt,name=start"` // The end time of the feature histogram. If reached, the predictor will start a new feature histogram // +kubebuilder:validation:Optional End *metav1.Time `json:"end,omitempty" protobuf:"bytes,11,opt,name=end"` // The histogram to compare to for data drift calc // +kubebuilder:validation:Optional BaseRef v1.ObjectReference `json:"baseRef,omitempty" protobuf:"bytes,12,opt,name=baseRef"` // Define drift thresholds. This is usually assigned from the predictor. // +kubebuilder:validation:Optional DriftThresholds []DriftThreshold `json:"driftThresholds" protobuf:"bytes,13,rep,name=driftThresholds"` // How much time in seconds, we should sync the im memory histograms to etcd // Default is one minute. // +kubebuilder:default:=60 // +kubebuilder:validation:Optional SyncIntervalSec *int32 `json:"syncIntervalSec,omitempty" protobuf:"varint,14,opt,name=syncIntervalSec"` // Test suite for this histogram. // +kubebuilder:validation:Optional UnitTests catalog.TestSuite `json:"unitTests,omitempty" protobuf:"bytes,15,opt,name=unitTests"` // If true, generate the unit tests // +kubebuilder:default:=false // +kubebuilder:validation:Optional GenUnitTests *bool `json:"genUnitTests,omitempty" protobuf:"varint,16,opt,name=genUnitTests"` // Filter the filter for this unit test. // +kubebuilder:validation:Optional FeatureFilter catalog.FeatureFilterType `json:"featureFilter,omitempty" protobuf:"bytes,17,opt,name=featureFilter"` // Set the reference type for this unit test // +kubebuilder:validation:Optional ReferenceType catalog.ReferenceDataType `json:"referenceType,omitempty" protobuf:"bytes,18,opt,name=referenceType"` }
FeatureHistogramSpec contain the desired state of a FeatureHistogram
func (*FeatureHistogramSpec) DeepCopy ¶
func (in *FeatureHistogramSpec) DeepCopy() *FeatureHistogramSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureHistogramSpec.
func (*FeatureHistogramSpec) DeepCopyInto ¶
func (in *FeatureHistogramSpec) DeepCopyInto(out *FeatureHistogramSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*FeatureHistogramSpec) Descriptor ¶
func (*FeatureHistogramSpec) Descriptor() ([]byte, []int)
func (*FeatureHistogramSpec) Marshal ¶
func (m *FeatureHistogramSpec) Marshal() (dAtA []byte, err error)
func (*FeatureHistogramSpec) MarshalTo ¶
func (m *FeatureHistogramSpec) MarshalTo(dAtA []byte) (int, error)
func (*FeatureHistogramSpec) MarshalToSizedBuffer ¶
func (m *FeatureHistogramSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*FeatureHistogramSpec) ProtoMessage ¶
func (*FeatureHistogramSpec) ProtoMessage()
func (*FeatureHistogramSpec) Reset ¶
func (m *FeatureHistogramSpec) Reset()
func (*FeatureHistogramSpec) Size ¶
func (m *FeatureHistogramSpec) Size() (n int)
func (*FeatureHistogramSpec) String ¶
func (this *FeatureHistogramSpec) String() string
func (*FeatureHistogramSpec) Unmarshal ¶
func (m *FeatureHistogramSpec) Unmarshal(dAtA []byte) error
func (*FeatureHistogramSpec) XXX_DiscardUnknown ¶
func (m *FeatureHistogramSpec) XXX_DiscardUnknown()
func (*FeatureHistogramSpec) XXX_Marshal ¶
func (m *FeatureHistogramSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*FeatureHistogramSpec) XXX_Merge ¶
func (m *FeatureHistogramSpec) XXX_Merge(src proto.Message)
func (*FeatureHistogramSpec) XXX_Size ¶
func (m *FeatureHistogramSpec) XXX_Size() int
func (*FeatureHistogramSpec) XXX_Unmarshal ¶
func (m *FeatureHistogramSpec) XXX_Unmarshal(b []byte) error
type FeatureHistogramStatus ¶
type FeatureHistogramStatus struct { // ObservedGeneration is the Last generation that was acted on //+kubebuilder:validation:Optional ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"` // The histogram values, map from column name to an histogram // +kubebuilder:validation:Optional Columns []ColumnHistogram `json:"columns,omitempty" protobuf:"bytes,2,rep,name=columns"` // Last time the object was updated //+kubebuilder:validation:Optional UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,3,opt,name=updatedAt"` // The log file specification that determines the location of all logs produced by the object // +kubebuilder:validation:Optional Logs catalog.Logs `json:"logs,omitempty" protobuf:"bytes,5,opt,name=logs"` // The phase of the feature histogram // +kubebuilder:validation:Optional Phase FeatureHistogramPhase `json:"phase,omitempty" protobuf:"bytes,6,opt,name=phase"` // In the case of failure, the Dataset resource controller will set this field with a failure reason //+kubebuilder:validation:Optional FailureReason catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,7,opt,name=failureReason"` // In the case of failure, the Dataset resource controller will set this field with a failure message //+kubebuilder:validation:Optional FailureMessage string `json:"failureMessage,omitempty" protobuf:"bytes,8,opt,name=failureMessage"` // Test suite for this histogram. //+kubebuilder:validation:Optional UnitTestsResult catalog.TestSuiteResult `json:"unitTestsResult,omitempty" protobuf:"bytes,9,opt,name=unitTestsResult"` // Total prediction recorded by this feature histograms //+kubebuilder:validation:Optional Total int32 `json:"total,omitempty" protobuf:"varint,10,opt,name=total"` // Errors predictions //+kubebuilder:validation:Optional Errors int32 `json:"errors,omitempty" protobuf:"varint,11,opt,name=errors"` // +patchMergeKey=type // +patchStrategy=merge // +kubebuilder:validation:Optional Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,12,rep,name=conditions"` }
FeatureHistogramStatus defines the observed state of FeatureHistogram
func (*FeatureHistogramStatus) DeepCopy ¶
func (in *FeatureHistogramStatus) DeepCopy() *FeatureHistogramStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureHistogramStatus.
func (*FeatureHistogramStatus) DeepCopyInto ¶
func (in *FeatureHistogramStatus) DeepCopyInto(out *FeatureHistogramStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*FeatureHistogramStatus) Descriptor ¶
func (*FeatureHistogramStatus) Descriptor() ([]byte, []int)
func (*FeatureHistogramStatus) Marshal ¶
func (m *FeatureHistogramStatus) Marshal() (dAtA []byte, err error)
func (*FeatureHistogramStatus) MarshalTo ¶
func (m *FeatureHistogramStatus) MarshalTo(dAtA []byte) (int, error)
func (*FeatureHistogramStatus) MarshalToSizedBuffer ¶
func (m *FeatureHistogramStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*FeatureHistogramStatus) ProtoMessage ¶
func (*FeatureHistogramStatus) ProtoMessage()
func (*FeatureHistogramStatus) Reset ¶
func (m *FeatureHistogramStatus) Reset()
func (*FeatureHistogramStatus) Size ¶
func (m *FeatureHistogramStatus) Size() (n int)
func (*FeatureHistogramStatus) String ¶
func (this *FeatureHistogramStatus) String() string
func (*FeatureHistogramStatus) Unmarshal ¶
func (m *FeatureHistogramStatus) Unmarshal(dAtA []byte) error
func (*FeatureHistogramStatus) XXX_DiscardUnknown ¶
func (m *FeatureHistogramStatus) XXX_DiscardUnknown()
func (*FeatureHistogramStatus) XXX_Marshal ¶
func (m *FeatureHistogramStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*FeatureHistogramStatus) XXX_Merge ¶
func (m *FeatureHistogramStatus) XXX_Merge(src proto.Message)
func (*FeatureHistogramStatus) XXX_Size ¶
func (m *FeatureHistogramStatus) XXX_Size() int
func (*FeatureHistogramStatus) XXX_Unmarshal ¶
func (m *FeatureHistogramStatus) XXX_Unmarshal(b []byte) error
type FlatFileFormatSpec ¶ added in v0.4.861
type FlatFileFormatSpec struct { // The file type of incoming data which uses the DataSource (by default, a CSV file) // +kubebuilder:default:="csv" // +kubebuilder:validation:Optional FileType *FlatFileType `json:"fileType,omitempty" protobuf:"bytes,1,opt,name=fileType"` // The file format for CSV files, if applicable // +kubebuilder:validation:Optional Csv CsvFileSpec `json:"csv,omitempty" protobuf:"bytes,2,opt,name=csv"` // The file format for Excel files, if applicable // +kubebuilder:validation:Optional Excel ExcelNotebookSpec `json:"excel,omitempty" protobuf:"bytes,3,opt,name=excel"` // The file format for Parquet files, if applicable // +kubebuilder:validation:Optional Parquet ParquetFileSpec `json:"parquet,omitempty" protobuf:"bytes,4,opt,name=parquet"` }
FlatFileFormatSpec defines the format for incoming flat-files to be parsed
func (*FlatFileFormatSpec) DeepCopy ¶ added in v0.4.861
func (in *FlatFileFormatSpec) DeepCopy() *FlatFileFormatSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FlatFileFormatSpec.
func (*FlatFileFormatSpec) DeepCopyInto ¶ added in v0.4.861
func (in *FlatFileFormatSpec) DeepCopyInto(out *FlatFileFormatSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*FlatFileFormatSpec) Descriptor ¶ added in v0.4.861
func (*FlatFileFormatSpec) Descriptor() ([]byte, []int)
func (*FlatFileFormatSpec) Marshal ¶ added in v0.4.861
func (m *FlatFileFormatSpec) Marshal() (dAtA []byte, err error)
func (*FlatFileFormatSpec) MarshalTo ¶ added in v0.4.861
func (m *FlatFileFormatSpec) MarshalTo(dAtA []byte) (int, error)
func (*FlatFileFormatSpec) MarshalToSizedBuffer ¶ added in v0.4.861
func (m *FlatFileFormatSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*FlatFileFormatSpec) ProtoMessage ¶ added in v0.4.861
func (*FlatFileFormatSpec) ProtoMessage()
func (*FlatFileFormatSpec) Reset ¶ added in v0.4.861
func (m *FlatFileFormatSpec) Reset()
func (*FlatFileFormatSpec) Size ¶ added in v0.4.861
func (m *FlatFileFormatSpec) Size() (n int)
func (*FlatFileFormatSpec) String ¶ added in v0.4.861
func (this *FlatFileFormatSpec) String() string
func (*FlatFileFormatSpec) Unmarshal ¶ added in v0.4.861
func (m *FlatFileFormatSpec) Unmarshal(dAtA []byte) error
func (*FlatFileFormatSpec) XXX_DiscardUnknown ¶ added in v0.4.861
func (m *FlatFileFormatSpec) XXX_DiscardUnknown()
func (*FlatFileFormatSpec) XXX_Marshal ¶ added in v0.4.861
func (m *FlatFileFormatSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*FlatFileFormatSpec) XXX_Merge ¶ added in v0.4.861
func (m *FlatFileFormatSpec) XXX_Merge(src proto.Message)
func (*FlatFileFormatSpec) XXX_Size ¶ added in v0.4.861
func (m *FlatFileFormatSpec) XXX_Size() int
func (*FlatFileFormatSpec) XXX_Unmarshal ¶ added in v0.4.861
func (m *FlatFileFormatSpec) XXX_Unmarshal(b []byte) error
type FlatFileType ¶
type FlatFileType string
FlatFileType represent the type of the file +kubebuilder:validation:Enum="csv";"tsv";"excel";"fwf";"hdf";"html";"json";"pickle";"sas";"stata";"feather";"parquet";
const ( FlatFileTypeCsv FlatFileType = "csv" FlatFileTypeTable FlatFileType = "tsv" FlatFileTypeExcel FlatFileType = "excel" FlatFileTypeFwf FlatFileType = "fwf" FlatFileTypeHdf5 FlatFileType = "hdf" FlatFileTypeHtml FlatFileType = "html" FlatFileTypeJson FlatFileType = "json" FlatFileTypePickle FlatFileType = "pickle" FlatFileTypeSas FlatFileType = "sas" FlatFileTypeStata FlatFileType = "stata" FlatFileTypeFeather FlatFileType = "feather" FlatFileTypeParquet FlatFileType = "parquet" )
type GaugeSpec ¶
type GaugeSpec struct { // Dataset is the name of the dataset // +kubebuilder:validation:Optional DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"` // Column is the name of the column // +kubebuilder:validation:Optional Column *string `json:"column,omitempty" protobuf:"bytes,2,opt,name=column"` // Row is the row number // +kubebuilder:validation:Optional Row *int32 `json:"row,omitempty" protobuf:"varint,3,opt,name=row"` // Scalar the a const value // +kubebuilder:validation:Optional Scalar *string `json:"scalar,omitempty" protobuf:"bytes,4,opt,name=scalar"` }
func (*GaugeSpec) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GaugeSpec.
func (*GaugeSpec) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*GaugeSpec) Descriptor ¶
func (*GaugeSpec) MarshalToSizedBuffer ¶
func (*GaugeSpec) ProtoMessage ¶
func (*GaugeSpec) ProtoMessage()
func (*GaugeSpec) XXX_DiscardUnknown ¶
func (m *GaugeSpec) XXX_DiscardUnknown()
func (*GaugeSpec) XXX_Marshal ¶
func (*GaugeSpec) XXX_Unmarshal ¶
type GitLocation ¶
type GitLocation struct { // The Git Connection resource which exists in the same tenant as the parent DataProduct // +kubebuilder:default:="" // +kubebuilder:validation:Optional GitConnectionName *string `json:"gitConnectionName,omitempty" protobuf:"bytes,1,opt,name=gitConnectionName"` // The URL to the destination Git repository // +kubebuilder:default:="" // +kubebuilder:validation:MaxLength=256 URL *string `json:"url,omitempty" protobuf:"bytes,2,opt,name=url"` // The branch inside the Git repository // +kubebuilder:default:="" // +kubebuilder:validation:MaxLength=256 Branch *string `json:"branch,omitempty" protobuf:"bytes,3,opt,name=branch"` // Indicates if the repository is private // +kubebuilder:default:=true Private *bool `json:"private,omitempty" protobuf:"varint,4,opt,name=private"` }
GitLocation specifies the Git location where Modela will track resources as YAML
func (*GitLocation) DeepCopy ¶
func (in *GitLocation) DeepCopy() *GitLocation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GitLocation.
func (*GitLocation) DeepCopyInto ¶
func (in *GitLocation) DeepCopyInto(out *GitLocation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*GitLocation) Descriptor ¶
func (*GitLocation) Descriptor() ([]byte, []int)
func (*GitLocation) Marshal ¶
func (m *GitLocation) Marshal() (dAtA []byte, err error)
func (*GitLocation) MarshalToSizedBuffer ¶
func (m *GitLocation) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*GitLocation) ProtoMessage ¶
func (*GitLocation) ProtoMessage()
func (*GitLocation) Reset ¶
func (m *GitLocation) Reset()
func (*GitLocation) Size ¶
func (m *GitLocation) Size() (n int)
func (*GitLocation) String ¶
func (this *GitLocation) String() string
func (*GitLocation) Unmarshal ¶
func (m *GitLocation) Unmarshal(dAtA []byte) error
func (*GitLocation) XXX_DiscardUnknown ¶
func (m *GitLocation) XXX_DiscardUnknown()
func (*GitLocation) XXX_Marshal ¶
func (m *GitLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*GitLocation) XXX_Merge ¶
func (m *GitLocation) XXX_Merge(src proto.Message)
func (*GitLocation) XXX_Size ¶
func (m *GitLocation) XXX_Size() int
func (*GitLocation) XXX_Unmarshal ¶
func (m *GitLocation) XXX_Unmarshal(b []byte) error
type GovernanceReviewStatus ¶
type GovernanceReviewStatus struct { // The approval status, which can be approved or rejected Result ApprovalType `json:"result,omitempty" protobuf:"bytes,1,opt,name=result"` // The date of the approval ApprovedAt *metav1.Time `json:"approvedAt,omitempty" protobuf:"bytes,2,opt,name=approvedAt"` // Notes taken during the review Notes string `json:"notes,omitempty" protobuf:"bytes,3,opt,name=notes"` }
func (*GovernanceReviewStatus) DeepCopy ¶
func (in *GovernanceReviewStatus) DeepCopy() *GovernanceReviewStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GovernanceReviewStatus.
func (*GovernanceReviewStatus) DeepCopyInto ¶
func (in *GovernanceReviewStatus) DeepCopyInto(out *GovernanceReviewStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*GovernanceReviewStatus) Descriptor ¶
func (*GovernanceReviewStatus) Descriptor() ([]byte, []int)
func (*GovernanceReviewStatus) Marshal ¶
func (m *GovernanceReviewStatus) Marshal() (dAtA []byte, err error)
func (*GovernanceReviewStatus) MarshalTo ¶
func (m *GovernanceReviewStatus) MarshalTo(dAtA []byte) (int, error)
func (*GovernanceReviewStatus) MarshalToSizedBuffer ¶
func (m *GovernanceReviewStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*GovernanceReviewStatus) ProtoMessage ¶
func (*GovernanceReviewStatus) ProtoMessage()
func (*GovernanceReviewStatus) Reset ¶
func (m *GovernanceReviewStatus) Reset()
func (*GovernanceReviewStatus) Size ¶
func (m *GovernanceReviewStatus) Size() (n int)
func (*GovernanceReviewStatus) String ¶
func (this *GovernanceReviewStatus) String() string
func (*GovernanceReviewStatus) Unmarshal ¶
func (m *GovernanceReviewStatus) Unmarshal(dAtA []byte) error
func (*GovernanceReviewStatus) XXX_DiscardUnknown ¶
func (m *GovernanceReviewStatus) XXX_DiscardUnknown()
func (*GovernanceReviewStatus) XXX_Marshal ¶
func (m *GovernanceReviewStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*GovernanceReviewStatus) XXX_Merge ¶
func (m *GovernanceReviewStatus) XXX_Merge(src proto.Message)
func (*GovernanceReviewStatus) XXX_Size ¶
func (m *GovernanceReviewStatus) XXX_Size() int
func (*GovernanceReviewStatus) XXX_Unmarshal ¶
func (m *GovernanceReviewStatus) XXX_Unmarshal(b []byte) error
type GovernanceSpec ¶
type GovernanceSpec struct { // Indicates if governance is enabled // +kubebuilder:default:=false // +kubebuilder:validation:Optional Enabled *bool `json:"enabled,omitempty" protobuf:"varint,1,opt,name=enabled"` // The country whose regulations are under consideration // +kubebuilder:validation:Optional Country *string `json:"country,omitempty" protobuf:"bytes,2,opt,name=country"` // The account name of the IT reviewer // +kubebuilder:validation:Optional ITReviewer *string `json:"itReviewer,omitempty" protobuf:"bytes,3,opt,name=itReviewer"` // The account name of the compliance reviewer // +kubebuilder:validation:Optional ComplianceReviewer *string `json:"complianceReviewer,omitempty" protobuf:"bytes,4,opt,name=complianceReviewer"` // The account name of the business reviewer // +kubebuilder:validation:Optional BusinessReviewer *string `json:"businessReviewer,omitempty" protobuf:"bytes,5,opt,name=businessReviewer"` // The name of the team members account that goveren this data product. // +kubebuilder:validation:Optional Members []string `json:"members,omitempty" protobuf:"bytes,6,rep,name=members"` }
GovernanceSpec describes the governance requirements for models produced under a DataProduct
func (*GovernanceSpec) DeepCopy ¶
func (in *GovernanceSpec) DeepCopy() *GovernanceSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GovernanceSpec.
func (*GovernanceSpec) DeepCopyInto ¶
func (in *GovernanceSpec) DeepCopyInto(out *GovernanceSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*GovernanceSpec) Descriptor ¶
func (*GovernanceSpec) Descriptor() ([]byte, []int)
func (*GovernanceSpec) Marshal ¶
func (m *GovernanceSpec) Marshal() (dAtA []byte, err error)
func (*GovernanceSpec) MarshalToSizedBuffer ¶
func (m *GovernanceSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*GovernanceSpec) ProtoMessage ¶
func (*GovernanceSpec) ProtoMessage()
func (*GovernanceSpec) Reset ¶
func (m *GovernanceSpec) Reset()
func (*GovernanceSpec) Size ¶
func (m *GovernanceSpec) Size() (n int)
func (*GovernanceSpec) String ¶
func (this *GovernanceSpec) String() string
func (*GovernanceSpec) Unmarshal ¶
func (m *GovernanceSpec) Unmarshal(dAtA []byte) error
func (*GovernanceSpec) XXX_DiscardUnknown ¶
func (m *GovernanceSpec) XXX_DiscardUnknown()
func (*GovernanceSpec) XXX_Marshal ¶
func (m *GovernanceSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*GovernanceSpec) XXX_Merge ¶
func (m *GovernanceSpec) XXX_Merge(src proto.Message)
func (*GovernanceSpec) XXX_Size ¶
func (m *GovernanceSpec) XXX_Size() int
func (*GovernanceSpec) XXX_Unmarshal ¶
func (m *GovernanceSpec) XXX_Unmarshal(b []byte) error
type GovernanceStatus ¶
type GovernanceStatus struct { // The review status for IT department // +kubebuilder:validation:Optional ITReviewStatus GovernanceReviewStatus `json:"ITReviewStatus,omitempty" protobuf:"bytes,1,opt,name=ITReviewStatus"` // The review status for the compliance department // +kubebuilder:validation:Optional ComplianceReviewStatus GovernanceReviewStatus `json:"complianceReviewStatus,omitempty" protobuf:"bytes,2,opt,name=complianceReviewStatus"` // The review status for the management department // +kubebuilder:validation:Optional BusinessReviewStatus GovernanceReviewStatus `json:"businessReviewStatus,omitempty" protobuf:"bytes,3,opt,name=businessReviewStatus"` }
GovernanceStatus describes the current state of a governance review for a model
func (*GovernanceStatus) DeepCopy ¶
func (in *GovernanceStatus) DeepCopy() *GovernanceStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GovernanceStatus.
func (*GovernanceStatus) DeepCopyInto ¶
func (in *GovernanceStatus) DeepCopyInto(out *GovernanceStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*GovernanceStatus) Descriptor ¶
func (*GovernanceStatus) Descriptor() ([]byte, []int)
func (*GovernanceStatus) Marshal ¶
func (m *GovernanceStatus) Marshal() (dAtA []byte, err error)
func (*GovernanceStatus) MarshalToSizedBuffer ¶
func (m *GovernanceStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*GovernanceStatus) ProtoMessage ¶
func (*GovernanceStatus) ProtoMessage()
func (*GovernanceStatus) Reset ¶
func (m *GovernanceStatus) Reset()
func (*GovernanceStatus) Size ¶
func (m *GovernanceStatus) Size() (n int)
func (*GovernanceStatus) String ¶
func (this *GovernanceStatus) String() string
func (*GovernanceStatus) Unmarshal ¶
func (m *GovernanceStatus) Unmarshal(dAtA []byte) error
func (*GovernanceStatus) XXX_DiscardUnknown ¶
func (m *GovernanceStatus) XXX_DiscardUnknown()
func (*GovernanceStatus) XXX_Marshal ¶
func (m *GovernanceStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*GovernanceStatus) XXX_Merge ¶
func (m *GovernanceStatus) XXX_Merge(src proto.Message)
func (*GovernanceStatus) XXX_Size ¶
func (m *GovernanceStatus) XXX_Size() int
func (*GovernanceStatus) XXX_Unmarshal ¶
func (m *GovernanceStatus) XXX_Unmarshal(b []byte) error
type GroupBySpec ¶ added in v0.5.174
type GroupBySpec struct { // Enabled the group by, if not enabled, the dataset is treated as defined in the data source. // +kubebuilder:validation:Optional Enabled *bool `json:"enabled,omitempty" protobuf:"varint,1,opt,name=enabled"` // For group forecasting, this is the key of the group // If not specify this will be the key from the data source. // +kubebuilder:validation:Optional GroupBy []string `json:"groupby,omitempty" protobuf:"bytes,2,rep,name=groupby"` // The time series frequency, if not specify they freq will be the base freq from the data source. // +kubebuilder:default:="day" // +kubebuilder:validation:Optional Freq *catalog.Freq `json:"freq,omitempty" protobuf:"bytes,3,opt,name=freq"` // The interval to forecast at this level. If not specify the interval will be the base interval // the data source // +kubebuilder:default:=1 // +kubebuilder:validation:Optional Interval *int32 `json:"interval,omitempty" protobuf:"varint,4,opt,name=interval"` // Aggregation function. Define how to aggregate // By default this is the aggregation function from the data source. // +kubebuilder:default:="sum" // +kubebuilder:validation:Optional Aggr catalog.Aggregate `json:"aggr,omitempty" protobuf:"bytes,5,opt,name=aggr"` }
Define how to group by the data, before processing.
func (*GroupBySpec) DeepCopy ¶ added in v0.5.174
func (in *GroupBySpec) DeepCopy() *GroupBySpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GroupBySpec.
func (*GroupBySpec) DeepCopyInto ¶ added in v0.5.174
func (in *GroupBySpec) DeepCopyInto(out *GroupBySpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*GroupBySpec) Descriptor ¶ added in v0.5.174
func (*GroupBySpec) Descriptor() ([]byte, []int)
func (*GroupBySpec) Marshal ¶ added in v0.5.174
func (m *GroupBySpec) Marshal() (dAtA []byte, err error)
func (*GroupBySpec) MarshalTo ¶ added in v0.5.174
func (m *GroupBySpec) MarshalTo(dAtA []byte) (int, error)
func (*GroupBySpec) MarshalToSizedBuffer ¶ added in v0.5.174
func (m *GroupBySpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*GroupBySpec) ProtoMessage ¶ added in v0.5.174
func (*GroupBySpec) ProtoMessage()
func (*GroupBySpec) Reset ¶ added in v0.5.174
func (m *GroupBySpec) Reset()
func (*GroupBySpec) Size ¶ added in v0.5.174
func (m *GroupBySpec) Size() (n int)
func (*GroupBySpec) String ¶ added in v0.5.174
func (this *GroupBySpec) String() string
func (*GroupBySpec) Unmarshal ¶ added in v0.5.174
func (m *GroupBySpec) Unmarshal(dAtA []byte) error
func (GroupBySpec) Validate ¶ added in v0.5.515
func (gb GroupBySpec) Validate(field string) ([]metav1.StatusCause, bool)
func (*GroupBySpec) XXX_DiscardUnknown ¶ added in v0.5.174
func (m *GroupBySpec) XXX_DiscardUnknown()
func (*GroupBySpec) XXX_Marshal ¶ added in v0.5.174
func (m *GroupBySpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*GroupBySpec) XXX_Merge ¶ added in v0.5.174
func (m *GroupBySpec) XXX_Merge(src proto.Message)
func (*GroupBySpec) XXX_Size ¶ added in v0.5.174
func (m *GroupBySpec) XXX_Size() int
func (*GroupBySpec) XXX_Unmarshal ¶ added in v0.5.174
func (m *GroupBySpec) XXX_Unmarshal(b []byte) error
type GroupDatasetLocationsSpec ¶ added in v0.5.210
type GroupDatasetLocationsSpec struct { // The root folder of all the groups GroupsRoot *string `json:"groupsRoot,omitempty" protobuf:"bytes,1,opt,name=groupsRoot"` // The root of the group folders and files // +kubebuilder:validation:Optional GroupRoot *string `json:"groupRoot,omitempty" protobuf:"bytes,2,opt,name=groupRoot"` // The folder of group data // +kubebuilder:validation:Optional GroupDataFolder *string `json:"groupDataFolder,omitempty" protobuf:"bytes,3,opt,name=groupDataFolder"` // The folder of group data // +kubebuilder:validation:Optional GroupDataFile *string `json:"groupDataFile,omitempty" protobuf:"bytes,4,opt,name=groupDataFile"` // The folder group data profile // +kubebuilder:validation:Optional GroupProfileFolder *string `json:"groupProfileFolder,omitempty" protobuf:"bytes,5,opt,name=groupProfileFolder"` // The path of the group report // +kubebuilder:validation:Optional GroupReportFile *string `json:"groupReportFile,omitempty" protobuf:"bytes,6,opt,name=groupReportFile"` // The path of the group features files // +kubebuilder:validation:Optional GroupFeaturesFile *string `json:"groupFeaturesFile,omitempty" protobuf:"bytes,7,opt,name=groupFeaturesFile"` }
If this is a group dataset , the group spec contain the location of the different artifacts
func (*GroupDatasetLocationsSpec) DeepCopy ¶ added in v0.5.249
func (in *GroupDatasetLocationsSpec) DeepCopy() *GroupDatasetLocationsSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GroupDatasetLocationsSpec.
func (*GroupDatasetLocationsSpec) DeepCopyInto ¶ added in v0.5.249
func (in *GroupDatasetLocationsSpec) DeepCopyInto(out *GroupDatasetLocationsSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*GroupDatasetLocationsSpec) Descriptor ¶ added in v0.5.210
func (*GroupDatasetLocationsSpec) Descriptor() ([]byte, []int)
func (*GroupDatasetLocationsSpec) Marshal ¶ added in v0.5.210
func (m *GroupDatasetLocationsSpec) Marshal() (dAtA []byte, err error)
func (*GroupDatasetLocationsSpec) MarshalTo ¶ added in v0.5.210
func (m *GroupDatasetLocationsSpec) MarshalTo(dAtA []byte) (int, error)
func (*GroupDatasetLocationsSpec) MarshalToSizedBuffer ¶ added in v0.5.210
func (m *GroupDatasetLocationsSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*GroupDatasetLocationsSpec) ProtoMessage ¶ added in v0.5.210
func (*GroupDatasetLocationsSpec) ProtoMessage()
func (*GroupDatasetLocationsSpec) Reset ¶ added in v0.5.210
func (m *GroupDatasetLocationsSpec) Reset()
func (*GroupDatasetLocationsSpec) Size ¶ added in v0.5.210
func (m *GroupDatasetLocationsSpec) Size() (n int)
func (*GroupDatasetLocationsSpec) String ¶ added in v0.5.210
func (this *GroupDatasetLocationsSpec) String() string
func (*GroupDatasetLocationsSpec) Unmarshal ¶ added in v0.5.210
func (m *GroupDatasetLocationsSpec) Unmarshal(dAtA []byte) error
func (*GroupDatasetLocationsSpec) XXX_DiscardUnknown ¶ added in v0.5.210
func (m *GroupDatasetLocationsSpec) XXX_DiscardUnknown()
func (*GroupDatasetLocationsSpec) XXX_Marshal ¶ added in v0.5.210
func (m *GroupDatasetLocationsSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*GroupDatasetLocationsSpec) XXX_Merge ¶ added in v0.5.210
func (m *GroupDatasetLocationsSpec) XXX_Merge(src proto.Message)
func (*GroupDatasetLocationsSpec) XXX_Size ¶ added in v0.5.210
func (m *GroupDatasetLocationsSpec) XXX_Size() int
func (*GroupDatasetLocationsSpec) XXX_Unmarshal ¶ added in v0.5.210
func (m *GroupDatasetLocationsSpec) XXX_Unmarshal(b []byte) error
type HistogramSpec ¶
type HistogramSpec struct { // Dataset is the name of the dataset // +kubebuilder:validation:Optional DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"` // name of the X column // +kubebuilder:validation:Optional X *string `json:"x,omitempty" protobuf:"bytes,2,opt,name=x"` // Show borther // +kubebuilder:validation:Optional Bins *int32 `json:"bins,omitempty" protobuf:"varint,3,opt,name=bins"` }
func (*HistogramSpec) DeepCopy ¶
func (in *HistogramSpec) DeepCopy() *HistogramSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HistogramSpec.
func (*HistogramSpec) DeepCopyInto ¶
func (in *HistogramSpec) DeepCopyInto(out *HistogramSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*HistogramSpec) Descriptor ¶
func (*HistogramSpec) Descriptor() ([]byte, []int)
func (*HistogramSpec) Marshal ¶
func (m *HistogramSpec) Marshal() (dAtA []byte, err error)
func (*HistogramSpec) MarshalToSizedBuffer ¶
func (m *HistogramSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*HistogramSpec) ProtoMessage ¶
func (*HistogramSpec) ProtoMessage()
func (*HistogramSpec) Reset ¶
func (m *HistogramSpec) Reset()
func (*HistogramSpec) Size ¶
func (m *HistogramSpec) Size() (n int)
func (*HistogramSpec) String ¶
func (this *HistogramSpec) String() string
func (*HistogramSpec) Unmarshal ¶
func (m *HistogramSpec) Unmarshal(dAtA []byte) error
func (*HistogramSpec) XXX_DiscardUnknown ¶
func (m *HistogramSpec) XXX_DiscardUnknown()
func (*HistogramSpec) XXX_Marshal ¶
func (m *HistogramSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*HistogramSpec) XXX_Merge ¶
func (m *HistogramSpec) XXX_Merge(src proto.Message)
func (*HistogramSpec) XXX_Size ¶
func (m *HistogramSpec) XXX_Size() int
func (*HistogramSpec) XXX_Unmarshal ¶
func (m *HistogramSpec) XXX_Unmarshal(b []byte) error
type ImageLocation ¶
type ImageLocation struct { // The canonical name of the image repository. If not set, it will default to docker/{dataproduct_name} // +kubebuilder:default:="" // +kubebuilder:validation:MaxLength=256 Name *string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"` // The image repository Connection resource which exists in the same tenant as the parent DataProduct. If the field // is not set, Modela will ignore the image location and not push images // +kubebuilder:default:="" RegistryConnectionName *string `json:"registryConnectionName,omitempty" protobuf:"bytes,2,opt,name=registryConnectionName"` }
ImageLocation specifies the destination for all model images produced under a DataProduct
func (*ImageLocation) DeepCopy ¶
func (in *ImageLocation) DeepCopy() *ImageLocation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ImageLocation.
func (*ImageLocation) DeepCopyInto ¶
func (in *ImageLocation) DeepCopyInto(out *ImageLocation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*ImageLocation) Descriptor ¶
func (*ImageLocation) Descriptor() ([]byte, []int)
func (*ImageLocation) Marshal ¶
func (m *ImageLocation) Marshal() (dAtA []byte, err error)
func (*ImageLocation) MarshalToSizedBuffer ¶
func (m *ImageLocation) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*ImageLocation) ProtoMessage ¶
func (*ImageLocation) ProtoMessage()
func (*ImageLocation) Reset ¶
func (m *ImageLocation) Reset()
func (*ImageLocation) Size ¶
func (m *ImageLocation) Size() (n int)
func (*ImageLocation) String ¶
func (this *ImageLocation) String() string
func (*ImageLocation) Unmarshal ¶
func (m *ImageLocation) Unmarshal(dAtA []byte) error
func (*ImageLocation) XXX_DiscardUnknown ¶
func (m *ImageLocation) XXX_DiscardUnknown()
func (*ImageLocation) XXX_Marshal ¶
func (m *ImageLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*ImageLocation) XXX_Merge ¶
func (m *ImageLocation) XXX_Merge(src proto.Message)
func (*ImageLocation) XXX_Size ¶
func (m *ImageLocation) XXX_Size() int
func (*ImageLocation) XXX_Unmarshal ¶
func (m *ImageLocation) XXX_Unmarshal(b []byte) error
type KPI ¶
type KPI struct { // The name of the KPI // +kubebuilder:default:="" // +kubebuilder:validation:Optional Name *string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"` // The value of the KPI Value *float64 `json:"value,omitempty" protobuf:"bytes,2,opt,name=value"` }
KPI specifies a key performance indicator for a DataProduct. Currently not implemented.
func (*KPI) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KPI.
func (*KPI) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*KPI) Descriptor ¶
func (*KPI) ProtoMessage ¶
func (*KPI) ProtoMessage()
func (*KPI) XXX_DiscardUnknown ¶
func (m *KPI) XXX_DiscardUnknown()
func (*KPI) XXX_Unmarshal ¶
type LabelingRule ¶ added in v0.4.821
type LabelingRule struct { //+kubebuilder:validation:Optional Column string `json:"column,omitempty" protobuf:"bytes,1,opt,name=column"` //+kubebuilder:validation:Optional Operator catalog.Op `json:"operator,omitempty" protobuf:"bytes,2,opt,name=operator"` //+kubebuilder:validation:Optional Value string `json:"value,omitempty" protobuf:"bytes,3,opt,name=value"` }
Labeling rule define a column expression
func (*LabelingRule) DeepCopy ¶ added in v0.4.821
func (in *LabelingRule) DeepCopy() *LabelingRule
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingRule.
func (*LabelingRule) DeepCopyInto ¶ added in v0.4.821
func (in *LabelingRule) DeepCopyInto(out *LabelingRule)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*LabelingRule) Descriptor ¶ added in v0.4.821
func (*LabelingRule) Descriptor() ([]byte, []int)
func (*LabelingRule) Marshal ¶ added in v0.4.821
func (m *LabelingRule) Marshal() (dAtA []byte, err error)
func (*LabelingRule) MarshalTo ¶ added in v0.4.821
func (m *LabelingRule) MarshalTo(dAtA []byte) (int, error)
func (*LabelingRule) MarshalToSizedBuffer ¶ added in v0.4.821
func (m *LabelingRule) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*LabelingRule) ProtoMessage ¶ added in v0.4.821
func (*LabelingRule) ProtoMessage()
func (*LabelingRule) Reset ¶ added in v0.4.821
func (m *LabelingRule) Reset()
func (*LabelingRule) Size ¶ added in v0.4.821
func (m *LabelingRule) Size() (n int)
func (*LabelingRule) String ¶ added in v0.4.821
func (this *LabelingRule) String() string
func (*LabelingRule) Unmarshal ¶ added in v0.4.821
func (m *LabelingRule) Unmarshal(dAtA []byte) error
func (*LabelingRule) XXX_DiscardUnknown ¶ added in v0.4.821
func (m *LabelingRule) XXX_DiscardUnknown()
func (*LabelingRule) XXX_Marshal ¶ added in v0.4.821
func (m *LabelingRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*LabelingRule) XXX_Merge ¶ added in v0.4.821
func (m *LabelingRule) XXX_Merge(src proto.Message)
func (*LabelingRule) XXX_Size ¶ added in v0.4.821
func (m *LabelingRule) XXX_Size() int
func (*LabelingRule) XXX_Unmarshal ¶ added in v0.4.821
func (m *LabelingRule) XXX_Unmarshal(b []byte) error
type LabelingSpec ¶ added in v0.4.821
type LabelingSpec struct { // If true enable labeling. Enabled *bool `json:"enabled,omitempty" protobuf:"varint,1,opt,name=enabled"` // The name of the column that will hold the result. ResultColumn string `json:"resultColumn,omitempty" protobuf:"bytes,2,opt,name=resultColumn"` // List of rules for positive rules. Positive []LabelingRule `json:"positive,omitempty" protobuf:"bytes,3,rep,name=positive"` // List of negative rules Negative []LabelingRule `json:"negative,omitempty" protobuf:"bytes,4,rep,name=negative"` }
func (*LabelingSpec) DeepCopy ¶ added in v0.4.821
func (in *LabelingSpec) DeepCopy() *LabelingSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingSpec.
func (*LabelingSpec) DeepCopyInto ¶ added in v0.4.821
func (in *LabelingSpec) DeepCopyInto(out *LabelingSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*LabelingSpec) Descriptor ¶ added in v0.4.821
func (*LabelingSpec) Descriptor() ([]byte, []int)
func (*LabelingSpec) Marshal ¶ added in v0.4.821
func (m *LabelingSpec) Marshal() (dAtA []byte, err error)
func (*LabelingSpec) MarshalTo ¶ added in v0.4.821
func (m *LabelingSpec) MarshalTo(dAtA []byte) (int, error)
func (*LabelingSpec) MarshalToSizedBuffer ¶ added in v0.4.821
func (m *LabelingSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*LabelingSpec) ProtoMessage ¶ added in v0.4.821
func (*LabelingSpec) ProtoMessage()
func (*LabelingSpec) Reset ¶ added in v0.4.821
func (m *LabelingSpec) Reset()
func (*LabelingSpec) Size ¶ added in v0.4.821
func (m *LabelingSpec) Size() (n int)
func (*LabelingSpec) String ¶ added in v0.4.821
func (this *LabelingSpec) String() string
func (*LabelingSpec) Unmarshal ¶ added in v0.4.821
func (m *LabelingSpec) Unmarshal(dAtA []byte) error
func (*LabelingSpec) XXX_DiscardUnknown ¶ added in v0.4.821
func (m *LabelingSpec) XXX_DiscardUnknown()
func (*LabelingSpec) XXX_Marshal ¶ added in v0.4.821
func (m *LabelingSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*LabelingSpec) XXX_Merge ¶ added in v0.4.821
func (m *LabelingSpec) XXX_Merge(src proto.Message)
func (*LabelingSpec) XXX_Size ¶ added in v0.4.821
func (m *LabelingSpec) XXX_Size() int
func (*LabelingSpec) XXX_Unmarshal ¶ added in v0.4.821
func (m *LabelingSpec) XXX_Unmarshal(b []byte) error
type LineChartSpec ¶
type LineChartSpec struct { // Dataset is the name of the dataset DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"` // name of the X column // +kubebuilder:validation:Optional X *string `json:"x,omitempty" protobuf:"bytes,2,opt,name=x"` // Y column // +kubebuilder:validation:Optional Y *string `json:"y,omitempty" protobuf:"bytes,3,opt,name=y"` // Show borther // +kubebuilder:validation:Optional Legend *bool `json:"legend,omitempty" protobuf:"varint,4,opt,name=legend"` }
func (*LineChartSpec) DeepCopy ¶
func (in *LineChartSpec) DeepCopy() *LineChartSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LineChartSpec.
func (*LineChartSpec) DeepCopyInto ¶
func (in *LineChartSpec) DeepCopyInto(out *LineChartSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*LineChartSpec) Descriptor ¶
func (*LineChartSpec) Descriptor() ([]byte, []int)
func (*LineChartSpec) Marshal ¶
func (m *LineChartSpec) Marshal() (dAtA []byte, err error)
func (*LineChartSpec) MarshalToSizedBuffer ¶
func (m *LineChartSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*LineChartSpec) ProtoMessage ¶
func (*LineChartSpec) ProtoMessage()
func (*LineChartSpec) Reset ¶
func (m *LineChartSpec) Reset()
func (*LineChartSpec) Size ¶
func (m *LineChartSpec) Size() (n int)
func (*LineChartSpec) String ¶
func (this *LineChartSpec) String() string
func (*LineChartSpec) Unmarshal ¶
func (m *LineChartSpec) Unmarshal(dAtA []byte) error
func (*LineChartSpec) XXX_DiscardUnknown ¶
func (m *LineChartSpec) XXX_DiscardUnknown()
func (*LineChartSpec) XXX_Marshal ¶
func (m *LineChartSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*LineChartSpec) XXX_Merge ¶
func (m *LineChartSpec) XXX_Merge(src proto.Message)
func (*LineChartSpec) XXX_Size ¶
func (m *LineChartSpec) XXX_Size() int
func (*LineChartSpec) XXX_Unmarshal ¶
func (m *LineChartSpec) XXX_Unmarshal(b []byte) error
type MaterializationSpec ¶
type MaterializationSpec struct { // If true, update the online store // +kubebuilder:default:=false // +kubebuilder:validation:Optional Online *bool `json:"online,omitempty" protobuf:"varint,1,opt,name=online"` // If true update the offline store. // +kubebuilder:default:=false // +kubebuilder:validation:Optional Offline *bool `json:"offline,omitempty" protobuf:"varint,2,opt,name=offline"` // +kubebuilder:validation:Optional StartDate *metav1.Time `json:"startDate,omitempty" protobuf:"bytes,3,opt,name=startDate"` // +kubebuilder:validation:Optional // +kubebuilder:validation:Minimum=0 // +kubebuilder:default:=0 OfflineTTL *int32 `json:"offlineTTL,omitempty" protobuf:"varint,4,opt,name=offlineTTL"` // +kubebuilder:validation:Optional // +kubebuilder:validation:Minimum=0 // +kubebuilder:default:=0 OnlineTTL *int32 `json:"onlineTTL,omitempty" protobuf:"varint,5,opt,name=onlineTTL"` // Number of days to store information from the past in the feature store. // +kubebuilder:validation:Optional // +kubebuilder:default:=21 // +kubebuilder:validation:Minimum=0 Backfill *int32 `json:"backfill,omitempty" protobuf:"varint,6,opt,name=backfill"` // Set the sync schedule between offline store and online store. //+kubebuilder:validation:Optional Schedule catalog.RunSchedule `json:"schedule,omitempty" protobuf:"bytes,7,opt,name=schedule"` }
func (*MaterializationSpec) DeepCopy ¶
func (in *MaterializationSpec) DeepCopy() *MaterializationSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaterializationSpec.
func (*MaterializationSpec) DeepCopyInto ¶
func (in *MaterializationSpec) DeepCopyInto(out *MaterializationSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*MaterializationSpec) Descriptor ¶
func (*MaterializationSpec) Descriptor() ([]byte, []int)
func (*MaterializationSpec) Marshal ¶
func (m *MaterializationSpec) Marshal() (dAtA []byte, err error)
func (*MaterializationSpec) MarshalTo ¶
func (m *MaterializationSpec) MarshalTo(dAtA []byte) (int, error)
func (*MaterializationSpec) MarshalToSizedBuffer ¶
func (m *MaterializationSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*MaterializationSpec) ProtoMessage ¶
func (*MaterializationSpec) ProtoMessage()
func (*MaterializationSpec) Reset ¶
func (m *MaterializationSpec) Reset()
func (*MaterializationSpec) Size ¶
func (m *MaterializationSpec) Size() (n int)
func (*MaterializationSpec) String ¶
func (this *MaterializationSpec) String() string
func (*MaterializationSpec) Unmarshal ¶
func (m *MaterializationSpec) Unmarshal(dAtA []byte) error
func (*MaterializationSpec) XXX_DiscardUnknown ¶
func (m *MaterializationSpec) XXX_DiscardUnknown()
func (*MaterializationSpec) XXX_Marshal ¶
func (m *MaterializationSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*MaterializationSpec) XXX_Merge ¶
func (m *MaterializationSpec) XXX_Merge(src proto.Message)
func (*MaterializationSpec) XXX_Size ¶
func (m *MaterializationSpec) XXX_Size() int
func (*MaterializationSpec) XXX_Unmarshal ¶
func (m *MaterializationSpec) XXX_Unmarshal(b []byte) error
type MetricSpec ¶
type MetricSpec struct { // Dataset is the name of the dataset // +kubebuilder:validation:Optional DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"` // Column is the name of the column // +kubebuilder:validation:Optional Column *string `json:"column,omitempty" protobuf:"bytes,2,opt,name=column"` // Row is the row number // +kubebuilder:validation:Optional Row *int32 `json:"row,omitempty" protobuf:"varint,3,opt,name=row"` // Scalar the a const value // +kubebuilder:validation:Optional Scalar *string `json:"scalar,omitempty" protobuf:"bytes,4,opt,name=scalar"` }
func (*MetricSpec) DeepCopy ¶
func (in *MetricSpec) DeepCopy() *MetricSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetricSpec.
func (*MetricSpec) DeepCopyInto ¶
func (in *MetricSpec) DeepCopyInto(out *MetricSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*MetricSpec) Descriptor ¶
func (*MetricSpec) Descriptor() ([]byte, []int)
func (*MetricSpec) Marshal ¶
func (m *MetricSpec) Marshal() (dAtA []byte, err error)
func (*MetricSpec) MarshalToSizedBuffer ¶
func (m *MetricSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*MetricSpec) ProtoMessage ¶
func (*MetricSpec) ProtoMessage()
func (*MetricSpec) Reset ¶
func (m *MetricSpec) Reset()
func (*MetricSpec) Size ¶
func (m *MetricSpec) Size() (n int)
func (*MetricSpec) String ¶
func (this *MetricSpec) String() string
func (*MetricSpec) Unmarshal ¶
func (m *MetricSpec) Unmarshal(dAtA []byte) error
func (*MetricSpec) XXX_DiscardUnknown ¶
func (m *MetricSpec) XXX_DiscardUnknown()
func (*MetricSpec) XXX_Marshal ¶
func (m *MetricSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*MetricSpec) XXX_Merge ¶
func (m *MetricSpec) XXX_Merge(src proto.Message)
func (*MetricSpec) XXX_Size ¶
func (m *MetricSpec) XXX_Size() int
func (*MetricSpec) XXX_Unmarshal ¶
func (m *MetricSpec) XXX_Unmarshal(b []byte) error
type OutlierStat ¶ added in v0.4.858
type OutlierStat struct { // The number of outliers below baseline Lower int32 `json:"lower,omitempty" protobuf:"varint,1,opt,name=lower"` // The number of outliers above baseline Upper int32 `json:"upper,omitempty" protobuf:"varint,2,opt,name=upper"` // Percentage of rows detected as outliers Percent float32 `json:"percent,omitempty" protobuf:"bytes,3,opt,name=percent"` }
func (*OutlierStat) DeepCopy ¶ added in v0.4.858
func (in *OutlierStat) DeepCopy() *OutlierStat
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OutlierStat.
func (*OutlierStat) DeepCopyInto ¶ added in v0.4.858
func (in *OutlierStat) DeepCopyInto(out *OutlierStat)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*OutlierStat) Descriptor ¶ added in v0.4.858
func (*OutlierStat) Descriptor() ([]byte, []int)
func (*OutlierStat) Marshal ¶ added in v0.4.858
func (m *OutlierStat) Marshal() (dAtA []byte, err error)
func (*OutlierStat) MarshalTo ¶ added in v0.4.858
func (m *OutlierStat) MarshalTo(dAtA []byte) (int, error)
func (*OutlierStat) MarshalToSizedBuffer ¶ added in v0.4.858
func (m *OutlierStat) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*OutlierStat) ProtoMessage ¶ added in v0.4.858
func (*OutlierStat) ProtoMessage()
func (*OutlierStat) Reset ¶ added in v0.4.858
func (m *OutlierStat) Reset()
func (*OutlierStat) Size ¶ added in v0.4.858
func (m *OutlierStat) Size() (n int)
func (*OutlierStat) String ¶ added in v0.4.858
func (this *OutlierStat) String() string
func (*OutlierStat) Unmarshal ¶ added in v0.4.858
func (m *OutlierStat) Unmarshal(dAtA []byte) error
func (*OutlierStat) XXX_DiscardUnknown ¶ added in v0.4.858
func (m *OutlierStat) XXX_DiscardUnknown()
func (*OutlierStat) XXX_Marshal ¶ added in v0.4.858
func (m *OutlierStat) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*OutlierStat) XXX_Merge ¶ added in v0.4.858
func (m *OutlierStat) XXX_Merge(src proto.Message)
func (*OutlierStat) XXX_Size ¶ added in v0.4.858
func (m *OutlierStat) XXX_Size() int
func (*OutlierStat) XXX_Unmarshal ¶ added in v0.4.858
func (m *OutlierStat) XXX_Unmarshal(b []byte) error
type PageSpec ¶
type PageSpec struct {
Rows []RowSpec `json:"rows,omitempty" protobuf:"bytes,1,rep,name=rows"`
}
func (*PageSpec) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PageSpec.
func (*PageSpec) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*PageSpec) Descriptor ¶
func (*PageSpec) MarshalToSizedBuffer ¶
func (*PageSpec) ProtoMessage ¶
func (*PageSpec) ProtoMessage()
func (*PageSpec) XXX_DiscardUnknown ¶
func (m *PageSpec) XXX_DiscardUnknown()
func (*PageSpec) XXX_Marshal ¶
func (*PageSpec) XXX_Unmarshal ¶
type ParquetFileSpec ¶ added in v0.4.861
type ParquetFileSpec struct { // The character used to separate fields (by default, a comma) // +kubebuilder:default:="auto" // +kubebuilder:validation:Optional Engine *string `json:"engine,omitempty" protobuf:"bytes,1,opt,name=engine"` }
ParquetFileSpec specifies the format of a CSV (comma-separated values) file
func (*ParquetFileSpec) DeepCopy ¶ added in v0.4.861
func (in *ParquetFileSpec) DeepCopy() *ParquetFileSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParquetFileSpec.
func (*ParquetFileSpec) DeepCopyInto ¶ added in v0.4.861
func (in *ParquetFileSpec) DeepCopyInto(out *ParquetFileSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*ParquetFileSpec) Descriptor ¶ added in v0.4.861
func (*ParquetFileSpec) Descriptor() ([]byte, []int)
func (*ParquetFileSpec) Marshal ¶ added in v0.4.861
func (m *ParquetFileSpec) Marshal() (dAtA []byte, err error)
func (*ParquetFileSpec) MarshalTo ¶ added in v0.4.861
func (m *ParquetFileSpec) MarshalTo(dAtA []byte) (int, error)
func (*ParquetFileSpec) MarshalToSizedBuffer ¶ added in v0.4.861
func (m *ParquetFileSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*ParquetFileSpec) ProtoMessage ¶ added in v0.4.861
func (*ParquetFileSpec) ProtoMessage()
func (*ParquetFileSpec) Reset ¶ added in v0.4.861
func (m *ParquetFileSpec) Reset()
func (*ParquetFileSpec) Size ¶ added in v0.4.861
func (m *ParquetFileSpec) Size() (n int)
func (*ParquetFileSpec) String ¶ added in v0.4.861
func (this *ParquetFileSpec) String() string
func (*ParquetFileSpec) Unmarshal ¶ added in v0.4.861
func (m *ParquetFileSpec) Unmarshal(dAtA []byte) error
func (*ParquetFileSpec) XXX_DiscardUnknown ¶ added in v0.4.861
func (m *ParquetFileSpec) XXX_DiscardUnknown()
func (*ParquetFileSpec) XXX_Marshal ¶ added in v0.4.861
func (m *ParquetFileSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*ParquetFileSpec) XXX_Merge ¶ added in v0.4.861
func (m *ParquetFileSpec) XXX_Merge(src proto.Message)
func (*ParquetFileSpec) XXX_Size ¶ added in v0.4.861
func (m *ParquetFileSpec) XXX_Size() int
func (*ParquetFileSpec) XXX_Unmarshal ¶ added in v0.4.861
func (m *ParquetFileSpec) XXX_Unmarshal(b []byte) error
type Recipe ¶
type Recipe struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Spec RecipeSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"` //+optional Status RecipeStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"` }
+kubebuilder:object:root=true +kubebuilder:resource:path=recipes,shortName=rc,singular=recipe,categories={data,modela,all} +kubebuilder:storageversion +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description" +kubebuilder:printcolumn:name="Last Run",type="date",JSONPath=".status.lastRun.at",description="" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" Recipe represents a single batch of data
func (*Recipe) AddFinalizer ¶
func (recipe *Recipe) AddFinalizer()
func (*Recipe) CreateOrUpdateCond ¶
Merge or update condition
func (*Recipe) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Recipe.
func (*Recipe) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Recipe) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*Recipe) Descriptor ¶
func (*Recipe) GetCondIdx ¶
func (Recipe) HasFinalizer ¶
func (*Recipe) MarshalToSizedBuffer ¶
func (Recipe) PrintConditions ¶
func (recipe Recipe) PrintConditions()
func (*Recipe) ProtoMessage ¶
func (*Recipe) ProtoMessage()
func (*Recipe) RemoveFinalizer ¶
func (recipe *Recipe) RemoveFinalizer()
func (*Recipe) SetupWebhookWithManager ¶
func (*Recipe) UpdateRunStatus ¶ added in v0.4.612
func (Recipe) ValidateCreate ¶
ValidateCreate implements webhook.Validator so a webhook will be registered for the type
func (Recipe) ValidateDelete ¶
func (Recipe) ValidateUpdate ¶
ValidateUpdate implements webhook.Validator so a webhook will be registered for the type
func (*Recipe) XXX_DiscardUnknown ¶
func (m *Recipe) XXX_DiscardUnknown()
func (*Recipe) XXX_Marshal ¶
func (*Recipe) XXX_Unmarshal ¶
type RecipeConditionType ¶
type RecipeConditionType string
RecipeConditionType is the condition on the Recipe
type RecipeInputSpec ¶
type RecipeInputSpec struct { // DatasetName is the name of the dataset // +kubebuilder:default:="" // +kubebuilder:validation:Optional DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"` // Location is the folder of the actual data resides, if not using dataset // +required. Location *DataLocation `json:"location,omitempty" protobuf:"bytes,2,opt,name=location"` // Format is the dataset format // +kubebuilder:default:=csv Format *FlatFileType `json:"format,omitempty" protobuf:"bytes,3,opt,name=format"` }
RecipeInputSpec specify the input for a recipe
func (*RecipeInputSpec) DeepCopy ¶
func (in *RecipeInputSpec) DeepCopy() *RecipeInputSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeInputSpec.
func (*RecipeInputSpec) DeepCopyInto ¶
func (in *RecipeInputSpec) DeepCopyInto(out *RecipeInputSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RecipeInputSpec) Descriptor ¶
func (*RecipeInputSpec) Descriptor() ([]byte, []int)
func (*RecipeInputSpec) Marshal ¶
func (m *RecipeInputSpec) Marshal() (dAtA []byte, err error)
func (*RecipeInputSpec) MarshalToSizedBuffer ¶
func (m *RecipeInputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*RecipeInputSpec) ProtoMessage ¶
func (*RecipeInputSpec) ProtoMessage()
func (*RecipeInputSpec) Reset ¶
func (m *RecipeInputSpec) Reset()
func (*RecipeInputSpec) Size ¶
func (m *RecipeInputSpec) Size() (n int)
func (*RecipeInputSpec) String ¶
func (this *RecipeInputSpec) String() string
func (*RecipeInputSpec) Unmarshal ¶
func (m *RecipeInputSpec) Unmarshal(dAtA []byte) error
func (*RecipeInputSpec) XXX_DiscardUnknown ¶
func (m *RecipeInputSpec) XXX_DiscardUnknown()
func (*RecipeInputSpec) XXX_Marshal ¶
func (m *RecipeInputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*RecipeInputSpec) XXX_Merge ¶
func (m *RecipeInputSpec) XXX_Merge(src proto.Message)
func (*RecipeInputSpec) XXX_Size ¶
func (m *RecipeInputSpec) XXX_Size() int
func (*RecipeInputSpec) XXX_Unmarshal ¶
func (m *RecipeInputSpec) XXX_Unmarshal(b []byte) error
type RecipeList ¶
type RecipeList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"` Items []Recipe `json:"items" protobuf:"bytes,2,rep,name=items"` }
RecipeList contains a list of Recipes +kubebuilder:object:root=true
func (*RecipeList) DeepCopy ¶
func (in *RecipeList) DeepCopy() *RecipeList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeList.
func (*RecipeList) DeepCopyInto ¶
func (in *RecipeList) DeepCopyInto(out *RecipeList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RecipeList) DeepCopyObject ¶
func (in *RecipeList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*RecipeList) Descriptor ¶
func (*RecipeList) Descriptor() ([]byte, []int)
func (*RecipeList) Marshal ¶
func (m *RecipeList) Marshal() (dAtA []byte, err error)
func (*RecipeList) MarshalToSizedBuffer ¶
func (m *RecipeList) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*RecipeList) ProtoMessage ¶
func (*RecipeList) ProtoMessage()
func (*RecipeList) Reset ¶
func (m *RecipeList) Reset()
func (*RecipeList) Size ¶
func (m *RecipeList) Size() (n int)
func (*RecipeList) String ¶
func (this *RecipeList) String() string
func (*RecipeList) Unmarshal ¶
func (m *RecipeList) Unmarshal(dAtA []byte) error
func (*RecipeList) XXX_DiscardUnknown ¶
func (m *RecipeList) XXX_DiscardUnknown()
func (*RecipeList) XXX_Marshal ¶
func (m *RecipeList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*RecipeList) XXX_Merge ¶
func (m *RecipeList) XXX_Merge(src proto.Message)
func (*RecipeList) XXX_Size ¶
func (m *RecipeList) XXX_Size() int
func (*RecipeList) XXX_Unmarshal ¶
func (m *RecipeList) XXX_Unmarshal(b []byte) error
type RecipeOutputSpec ¶
type RecipeOutputSpec struct { // CreateDataset if true, create a new dataset when the recipe is done. // +kubebuilder:default:=false // +kubebuilder:validation:Optional CreateDataset *bool `json:"createDataset,omitempty" protobuf:"varint,1,opt,name=createDataset"` // DatasetName is the name of the dataset output to the recipe // +kubebuilder:default:="" // +kubebuilder:validation:Optional DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,2,opt,name=datasetName"` // Location is the data location folder of the actual data resides. // +kubebuilder:validation:Required // +required Location *DataLocation `json:"location,omitempty" protobuf:"bytes,3,opt,name=location"` }
RecipeOutputSpec for the recipe output
func (*RecipeOutputSpec) DeepCopy ¶
func (in *RecipeOutputSpec) DeepCopy() *RecipeOutputSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeOutputSpec.
func (*RecipeOutputSpec) DeepCopyInto ¶
func (in *RecipeOutputSpec) DeepCopyInto(out *RecipeOutputSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RecipeOutputSpec) Descriptor ¶
func (*RecipeOutputSpec) Descriptor() ([]byte, []int)
func (*RecipeOutputSpec) Marshal ¶
func (m *RecipeOutputSpec) Marshal() (dAtA []byte, err error)
func (*RecipeOutputSpec) MarshalToSizedBuffer ¶
func (m *RecipeOutputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*RecipeOutputSpec) ProtoMessage ¶
func (*RecipeOutputSpec) ProtoMessage()
func (*RecipeOutputSpec) Reset ¶
func (m *RecipeOutputSpec) Reset()
func (*RecipeOutputSpec) Size ¶
func (m *RecipeOutputSpec) Size() (n int)
func (*RecipeOutputSpec) String ¶
func (this *RecipeOutputSpec) String() string
func (*RecipeOutputSpec) Unmarshal ¶
func (m *RecipeOutputSpec) Unmarshal(dAtA []byte) error
func (*RecipeOutputSpec) XXX_DiscardUnknown ¶
func (m *RecipeOutputSpec) XXX_DiscardUnknown()
func (*RecipeOutputSpec) XXX_Marshal ¶
func (m *RecipeOutputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*RecipeOutputSpec) XXX_Merge ¶
func (m *RecipeOutputSpec) XXX_Merge(src proto.Message)
func (*RecipeOutputSpec) XXX_Size ¶
func (m *RecipeOutputSpec) XXX_Size() int
func (*RecipeOutputSpec) XXX_Unmarshal ¶
func (m *RecipeOutputSpec) XXX_Unmarshal(b []byte) error
type RecipePartSpec ¶
type RecipePartSpec struct { // RecipeName is the name of the recipe to run // +kubebuilder:default:="" // +kubebuilder:validation:Optional RecipeName *string `json:"recipeName,omitempty" protobuf:"bytes,1,opt,name=recipeName"` // Dependents is the list of recipe that need to run after this recipe. Dependents []string `json:"dependents,omitempty" protobuf:"bytes,2,rep,name=dependents"` }
func (*RecipePartSpec) DeepCopy ¶
func (in *RecipePartSpec) DeepCopy() *RecipePartSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipePartSpec.
func (*RecipePartSpec) DeepCopyInto ¶
func (in *RecipePartSpec) DeepCopyInto(out *RecipePartSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RecipePartSpec) Descriptor ¶
func (*RecipePartSpec) Descriptor() ([]byte, []int)
func (*RecipePartSpec) Marshal ¶
func (m *RecipePartSpec) Marshal() (dAtA []byte, err error)
func (*RecipePartSpec) MarshalToSizedBuffer ¶
func (m *RecipePartSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*RecipePartSpec) ProtoMessage ¶
func (*RecipePartSpec) ProtoMessage()
func (*RecipePartSpec) Reset ¶
func (m *RecipePartSpec) Reset()
func (*RecipePartSpec) Size ¶
func (m *RecipePartSpec) Size() (n int)
func (*RecipePartSpec) String ¶
func (this *RecipePartSpec) String() string
func (*RecipePartSpec) Unmarshal ¶
func (m *RecipePartSpec) Unmarshal(dAtA []byte) error
func (*RecipePartSpec) XXX_DiscardUnknown ¶
func (m *RecipePartSpec) XXX_DiscardUnknown()
func (*RecipePartSpec) XXX_Marshal ¶
func (m *RecipePartSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*RecipePartSpec) XXX_Merge ¶
func (m *RecipePartSpec) XXX_Merge(src proto.Message)
func (*RecipePartSpec) XXX_Size ¶
func (m *RecipePartSpec) XXX_Size() int
func (*RecipePartSpec) XXX_Unmarshal ¶
func (m *RecipePartSpec) XXX_Unmarshal(b []byte) error
type RecipeRun ¶
type RecipeRun struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"` Spec RecipeRunSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"` //+optional Status RecipeRunStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"` }
+kubebuilder:subresource:status +kubebuilder:resource:path=reciperuns,shortName=rcr,singular=reciperun,categories={data,modela,all} +kubebuilder:object:root=true +kubebuilder:storageversion +kubebuilder:printcolumn:name="Status",type="string",JSONPath=".status.phase" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Recipe",type="string",JSONPath=".spec.recipeName" +kubebuilder:printcolumn:name="StartedAt",type="date",JSONPath=".status.startTime",priority=1 +kubebuilder:printcolumn:name="CompletedAt",type="date",JSONPath=".status.completionTime",priority=1 RecipeRun represent one execution of the recipe. Execution is performed by creating a Kubernetes job.
func (*RecipeRun) AddFinalizer ¶
func (reciperun *RecipeRun) AddFinalizer()
func (RecipeRun) CompletionAlert ¶ added in v0.4.601
func (reciperun RecipeRun) CompletionAlert(tenantRef *v1.ObjectReference, notifierName *string) *infra.Alert
Generate a dataset completion alert
func (*RecipeRun) CreateOrUpdateCond ¶
Merge or update condition
func (*RecipeRun) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeRun.
func (*RecipeRun) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RecipeRun) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*RecipeRun) Descriptor ¶
func (RecipeRun) ErrorAlert ¶ added in v0.4.601
func (RecipeRun) GetCondIdx ¶
func (RecipeRun) HasFinalizer ¶
func (RecipeRun) ManifestURI ¶ added in v0.5.472
func (*RecipeRun) MarkCompleted ¶
func (reciperun *RecipeRun) MarkCompleted()
func (*RecipeRun) MarkFailed ¶
func (*RecipeRun) MarkRunning ¶
func (reciperun *RecipeRun) MarkRunning()
func (*RecipeRun) MarshalToSizedBuffer ¶
func (RecipeRun) PrintConditions ¶
func (reciperun RecipeRun) PrintConditions()
func (*RecipeRun) ProtoMessage ¶
func (*RecipeRun) ProtoMessage()
func (*RecipeRun) RemoveFinalizer ¶
func (reciperun *RecipeRun) RemoveFinalizer()
func (RecipeRun) ReportName ¶
func (RecipeRun) RunStatus ¶ added in v0.4.614
func (reciperun RecipeRun) RunStatus() *catalog.LastRunStatus
Return the state of the run as RunStatus
func (*RecipeRun) SetupWebhookWithManager ¶
func (RecipeRun) StatusString ¶
func (*RecipeRun) ValidateCreate ¶
ValidateCreate implements webhook.Validator so a webhook will be registered for the type
func (*RecipeRun) ValidateDelete ¶
func (*RecipeRun) ValidateUpdate ¶
ValidateUpdate implements webhook.Validator so a webhook will be registered for the type
func (*RecipeRun) XXX_DiscardUnknown ¶
func (m *RecipeRun) XXX_DiscardUnknown()
func (*RecipeRun) XXX_Marshal ¶
func (*RecipeRun) XXX_Unmarshal ¶
type RecipeRunList ¶
type RecipeRunList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"` Items []RecipeRun `json:"items" protobuf:"bytes,2,rep,name=items"` }
+kubebuilder:object:root=true RecipeRunList contains a list of Recipes
func (*RecipeRunList) DeepCopy ¶
func (in *RecipeRunList) DeepCopy() *RecipeRunList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeRunList.
func (*RecipeRunList) DeepCopyInto ¶
func (in *RecipeRunList) DeepCopyInto(out *RecipeRunList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RecipeRunList) DeepCopyObject ¶
func (in *RecipeRunList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*RecipeRunList) Descriptor ¶
func (*RecipeRunList) Descriptor() ([]byte, []int)
func (*RecipeRunList) Marshal ¶
func (m *RecipeRunList) Marshal() (dAtA []byte, err error)
func (*RecipeRunList) MarshalToSizedBuffer ¶
func (m *RecipeRunList) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*RecipeRunList) ProtoMessage ¶
func (*RecipeRunList) ProtoMessage()
func (*RecipeRunList) Reset ¶
func (m *RecipeRunList) Reset()
func (*RecipeRunList) Size ¶
func (m *RecipeRunList) Size() (n int)
func (*RecipeRunList) String ¶
func (this *RecipeRunList) String() string
func (*RecipeRunList) Unmarshal ¶
func (m *RecipeRunList) Unmarshal(dAtA []byte) error
func (*RecipeRunList) XXX_DiscardUnknown ¶
func (m *RecipeRunList) XXX_DiscardUnknown()
func (*RecipeRunList) XXX_Marshal ¶
func (m *RecipeRunList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*RecipeRunList) XXX_Merge ¶
func (m *RecipeRunList) XXX_Merge(src proto.Message)
func (*RecipeRunList) XXX_Size ¶
func (m *RecipeRunList) XXX_Size() int
func (*RecipeRunList) XXX_Unmarshal ¶
func (m *RecipeRunList) XXX_Unmarshal(b []byte) error
type RecipeRunPhase ¶
type RecipeRunPhase string
const ( RecipeRunPhasePending RecipeRunPhase = "Pending" RecipeRunPhaseAborted RecipeRunPhase = "Aborted" RecipeRunPhaseRunning RecipeRunPhase = "Running" RecipeRunPhaseUnitTesting RecipeRunPhase = "UnitTesting" RecipeRunPhaseSucceed RecipeRunPhase = "Completed" RecipeRunPhaseFailed RecipeRunPhase = "Failed" )
type RecipeRunSpec ¶
type RecipeRunSpec struct { // +kubebuilder:default:="" // +kubebuilder:validation:Optional VersionName *string `json:"versionName,omitempty" protobuf:"bytes,1,opt,name=versionName"` // The name of the recipe that execute this run // +kubebuilder:default:="" // +kubebuilder:validation:Optional RecipeName *string `json:"recipeName,omitempty" protobuf:"bytes,2,opt,name=recipeName"` // The execution env of this recipes // +kubebuilder:validation:Optional LabRef v1.ObjectReference `json:"labRef,omitempty" protobuf:"bytes,3,opt,name=labRef"` // The location of the data output. // +required. Location DataLocation `json:"location,omitempty" protobuf:"bytes,4,opt,name=location"` // Resources are hardware resource req for a recipe run. // +kubebuilder:validation:Optional Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,5,opt,name=resources"` // TTL. // +kubebuilder:default:=0 // +kubebuilder:validation:Optional TTL *int32 `json:"ttl,omitempty" protobuf:"varint,6,opt,name=ttl"` // The model class for this data pipeline run if the dataset was created by a model class // +kubebuilder:validation:Optional ModelClassName *string `json:"modelClassName,omitempty" protobuf:"bytes,7,opt,name=modelClassName"` // If this report was created by a data pipeline run, this is the run name // +kubebuilder:validation:Optional ModelClassRunName *string `json:"modelClassRunName,omitempty" protobuf:"bytes,8,opt,name=modelClassRunName"` }
RecipeSpec defines the desired state of a dataset
func (*RecipeRunSpec) DeepCopy ¶
func (in *RecipeRunSpec) DeepCopy() *RecipeRunSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeRunSpec.
func (*RecipeRunSpec) DeepCopyInto ¶
func (in *RecipeRunSpec) DeepCopyInto(out *RecipeRunSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RecipeRunSpec) Descriptor ¶
func (*RecipeRunSpec) Descriptor() ([]byte, []int)
func (*RecipeRunSpec) Marshal ¶
func (m *RecipeRunSpec) Marshal() (dAtA []byte, err error)
func (*RecipeRunSpec) MarshalToSizedBuffer ¶
func (m *RecipeRunSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*RecipeRunSpec) ProtoMessage ¶
func (*RecipeRunSpec) ProtoMessage()
func (*RecipeRunSpec) Reset ¶
func (m *RecipeRunSpec) Reset()
func (*RecipeRunSpec) Size ¶
func (m *RecipeRunSpec) Size() (n int)
func (*RecipeRunSpec) String ¶
func (this *RecipeRunSpec) String() string
func (*RecipeRunSpec) Unmarshal ¶
func (m *RecipeRunSpec) Unmarshal(dAtA []byte) error
func (*RecipeRunSpec) XXX_DiscardUnknown ¶
func (m *RecipeRunSpec) XXX_DiscardUnknown()
func (*RecipeRunSpec) XXX_Marshal ¶
func (m *RecipeRunSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*RecipeRunSpec) XXX_Merge ¶
func (m *RecipeRunSpec) XXX_Merge(src proto.Message)
func (*RecipeRunSpec) XXX_Size ¶
func (m *RecipeRunSpec) XXX_Size() int
func (*RecipeRunSpec) XXX_Unmarshal ¶
func (m *RecipeRunSpec) XXX_Unmarshal(b []byte) error
type RecipeRunStatus ¶
type RecipeRunStatus struct { // Represents the end time // +kubebuilder:validation:Optional CompletedAt *metav1.Time `json:"completedAt,omitempty" protobuf:"bytes,1,opt,name=completedAt"` // The phase of the dataset processing // +kubebuilder:default:="Pending" // +kubebuilder:validation:Optional Phase RecipeRunPhase `json:"phase,omitempty" protobuf:"bytes,2,opt,name=phase"` // ObservedGeneration is the Last generation that was acted on //+kubebuilder:validation:Optional ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,3,opt,name=observedGeneration"` // UpdateUpdateStrategy in case of terminal failure // Borrowed from cluster api controller //+kubebuilder:validation:Optional FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,4,opt,name=failureReason"` // UpdateUpdateStrategy in case of terminal failure message //+kubebuilder:validation:Optional FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,5,opt,name=failureMessage"` // What triggered the run //+kubebuilder:validation:Optional TriggeredBy catalog.TriggerType `json:"triggeredBy,omitempty" protobuf:"bytes,6,opt,name=triggeredBy"` // Holds the location of log paths //+kubebuilder:validation:Optional Logs catalog.Logs `json:"logs,,omitempty" protobuf:"bytes,7,opt,name=logs"` // Last time the object was updated //+kubebuilder:validation:Optional UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,9,opt,name=updatedAt"` // +patchMergeKey=type // +patchStrategy=merge // +kubebuilder:validation:Optional Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,11,rep,name=conditions"` }
RecipeStatus defines the observed state of Recipe
func (*RecipeRunStatus) DeepCopy ¶
func (in *RecipeRunStatus) DeepCopy() *RecipeRunStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeRunStatus.
func (*RecipeRunStatus) DeepCopyInto ¶
func (in *RecipeRunStatus) DeepCopyInto(out *RecipeRunStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RecipeRunStatus) Descriptor ¶
func (*RecipeRunStatus) Descriptor() ([]byte, []int)
func (*RecipeRunStatus) Marshal ¶
func (m *RecipeRunStatus) Marshal() (dAtA []byte, err error)
func (*RecipeRunStatus) MarshalToSizedBuffer ¶
func (m *RecipeRunStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*RecipeRunStatus) ProtoMessage ¶
func (*RecipeRunStatus) ProtoMessage()
func (*RecipeRunStatus) Reset ¶
func (m *RecipeRunStatus) Reset()
func (*RecipeRunStatus) Size ¶
func (m *RecipeRunStatus) Size() (n int)
func (*RecipeRunStatus) String ¶
func (this *RecipeRunStatus) String() string
func (*RecipeRunStatus) Unmarshal ¶
func (m *RecipeRunStatus) Unmarshal(dAtA []byte) error
func (*RecipeRunStatus) XXX_DiscardUnknown ¶
func (m *RecipeRunStatus) XXX_DiscardUnknown()
func (*RecipeRunStatus) XXX_Marshal ¶
func (m *RecipeRunStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*RecipeRunStatus) XXX_Merge ¶
func (m *RecipeRunStatus) XXX_Merge(src proto.Message)
func (*RecipeRunStatus) XXX_Size ¶
func (m *RecipeRunStatus) XXX_Size() int
func (*RecipeRunStatus) XXX_Unmarshal ¶
func (m *RecipeRunStatus) XXX_Unmarshal(b []byte) error
type RecipeSpec ¶
type RecipeSpec struct { // Owner is the owner of the recipe // +kubebuilder:default:="no-one" // +kubebuilder:validation:Optional Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"` // VersionName is the data product version of the recipe // +kubebuilder:default:="" // +kubebuilder:validation:MaxLength=63 // required. VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"` // Description is the user provided description // +kubebuilder:validation:MaxLength=512 // +kubebuilder:default:="" // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"` // Input is the input recipe spec Input RecipeInputSpec `json:"input,omitempty" protobuf:"bytes,4,opt,name=input"` // Steps are the list of recipe steps Steps []RecipeStep `json:"steps,omitempty" protobuf:"bytes,5,rep,name=steps"` // Output is the desired output Output RecipeOutputSpec `json:"output,omitempty" protobuf:"bytes,6,opt,name=output"` // Sample specify the sampling paramters when viewing the recipe // +kubebuilder:validation:Optional Sample SampleSpec `json:"sample,omitempty" protobuf:"bytes,7,opt,name=sample"` // Resource define the resource requirements to run the recipe // +kubebuilder:validation:Optional Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,8,opt,name=resources"` // Timeout is the deadline setup on jobs for this recipe. // +kubebuilder:default:=600 // +kubebuilder:validation:Optional Timeout *int64 `json:"timeout,omitempty" protobuf:"varint,10,opt,name=timeout"` // TTL. // +kubebuilder:default:=0 // +kubebuilder:validation:Optional TTL *int32 `json:"ttl,omitempty" protobuf:"varint,11,opt,name=ttl"` // Unit tests templates operating on the recipe run. // +kubebuilder:validation:Optional UnitTestsTemplate catalog.TestSuite `json:"unitTestsTemplate,omitempty" protobuf:"bytes,12,opt,name=unitTestsTemplate"` }
RecipeSpec defines the desired state of a dataset
func (*RecipeSpec) DeepCopy ¶
func (in *RecipeSpec) DeepCopy() *RecipeSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeSpec.
func (*RecipeSpec) DeepCopyInto ¶
func (in *RecipeSpec) DeepCopyInto(out *RecipeSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RecipeSpec) Descriptor ¶
func (*RecipeSpec) Descriptor() ([]byte, []int)
func (*RecipeSpec) Marshal ¶
func (m *RecipeSpec) Marshal() (dAtA []byte, err error)
func (*RecipeSpec) MarshalToSizedBuffer ¶
func (m *RecipeSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*RecipeSpec) ProtoMessage ¶
func (*RecipeSpec) ProtoMessage()
func (*RecipeSpec) Reset ¶
func (m *RecipeSpec) Reset()
func (*RecipeSpec) Size ¶
func (m *RecipeSpec) Size() (n int)
func (*RecipeSpec) String ¶
func (this *RecipeSpec) String() string
func (*RecipeSpec) Unmarshal ¶
func (m *RecipeSpec) Unmarshal(dAtA []byte) error
func (*RecipeSpec) XXX_DiscardUnknown ¶
func (m *RecipeSpec) XXX_DiscardUnknown()
func (*RecipeSpec) XXX_Marshal ¶
func (m *RecipeSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*RecipeSpec) XXX_Merge ¶
func (m *RecipeSpec) XXX_Merge(src proto.Message)
func (*RecipeSpec) XXX_Size ¶
func (m *RecipeSpec) XXX_Size() int
func (*RecipeSpec) XXX_Unmarshal ¶
func (m *RecipeSpec) XXX_Unmarshal(b []byte) error
type RecipeStatus ¶
type RecipeStatus struct { //ObservedGeneration is the Last generation that was acted on //+kubebuilder:validation:Optional ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"` // Last run is the last time a data pipeline run was created //+kubebuilder:validation:Optional LastRun catalog.LastRunStatus `json:"lastRun,omitempty" protobuf:"bytes,2,opt,name=lastRun"` // Last time the object was updated //+kubebuilder:validation:Optional UpdatedAt *metav1.Time `json:"updatedAt,omitempty" protobuf:"bytes,4,opt,name=updatedAt"` // +patchMergeKey=type // +patchStrategy=merge // +kubebuilder:validation:Optional Conditions []metav1.Condition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,7,rep,name=conditions"` }
RecipeStatus defines the observed state of Recipe
func (*RecipeStatus) DeepCopy ¶
func (in *RecipeStatus) DeepCopy() *RecipeStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeStatus.
func (*RecipeStatus) DeepCopyInto ¶
func (in *RecipeStatus) DeepCopyInto(out *RecipeStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RecipeStatus) Descriptor ¶
func (*RecipeStatus) Descriptor() ([]byte, []int)
func (*RecipeStatus) Marshal ¶
func (m *RecipeStatus) Marshal() (dAtA []byte, err error)
func (*RecipeStatus) MarshalToSizedBuffer ¶
func (m *RecipeStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*RecipeStatus) ProtoMessage ¶
func (*RecipeStatus) ProtoMessage()
func (*RecipeStatus) Reset ¶
func (m *RecipeStatus) Reset()
func (*RecipeStatus) Size ¶
func (m *RecipeStatus) Size() (n int)
func (*RecipeStatus) String ¶
func (this *RecipeStatus) String() string
func (*RecipeStatus) Unmarshal ¶
func (m *RecipeStatus) Unmarshal(dAtA []byte) error
func (*RecipeStatus) XXX_DiscardUnknown ¶
func (m *RecipeStatus) XXX_DiscardUnknown()
func (*RecipeStatus) XXX_Marshal ¶
func (m *RecipeStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*RecipeStatus) XXX_Merge ¶
func (m *RecipeStatus) XXX_Merge(src proto.Message)
func (*RecipeStatus) XXX_Size ¶
func (m *RecipeStatus) XXX_Size() int
func (*RecipeStatus) XXX_Unmarshal ¶
func (m *RecipeStatus) XXX_Unmarshal(b []byte) error
type RecipeStep ¶
type RecipeStep struct { Op RecipeStepOperation `json:"op,omitempty" protobuf:"bytes,1,opt,name=op"` Parameters []*RecipeStepParam `json:"parameters,omitempty" protobuf:"bytes,2,rep,name=parameters"` }
RecipeStep defines one step in the recipe
func (*RecipeStep) DeepCopy ¶
func (in *RecipeStep) DeepCopy() *RecipeStep
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeStep.
func (*RecipeStep) DeepCopyInto ¶
func (in *RecipeStep) DeepCopyInto(out *RecipeStep)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RecipeStep) Descriptor ¶
func (*RecipeStep) Descriptor() ([]byte, []int)
func (*RecipeStep) Marshal ¶
func (m *RecipeStep) Marshal() (dAtA []byte, err error)
func (*RecipeStep) MarshalToSizedBuffer ¶
func (m *RecipeStep) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*RecipeStep) ProtoMessage ¶
func (*RecipeStep) ProtoMessage()
func (*RecipeStep) Reset ¶
func (m *RecipeStep) Reset()
func (*RecipeStep) Size ¶
func (m *RecipeStep) Size() (n int)
func (*RecipeStep) String ¶
func (this *RecipeStep) String() string
func (*RecipeStep) Unmarshal ¶
func (m *RecipeStep) Unmarshal(dAtA []byte) error
func (*RecipeStep) XXX_DiscardUnknown ¶
func (m *RecipeStep) XXX_DiscardUnknown()
func (*RecipeStep) XXX_Marshal ¶
func (m *RecipeStep) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*RecipeStep) XXX_Merge ¶
func (m *RecipeStep) XXX_Merge(src proto.Message)
func (*RecipeStep) XXX_Size ¶
func (m *RecipeStep) XXX_Size() int
func (*RecipeStep) XXX_Unmarshal ¶
func (m *RecipeStep) XXX_Unmarshal(b []byte) error
type RecipeStepOperation ¶
type RecipeStepOperation string
RecipeStepOperation is the operation name of one step in the recipe
const ( // Aggregate function AnyStepOp RecipeStepOperation = "any" AnyIfStepOp RecipeStepOperation = "any-if" ApproximateMedianStepOp RecipeStepOperation = "approximate-median" ApproximatePercentileStepOp RecipeStepOperation = "approximate-percentile" ApproximateQuartileStepOp RecipeStepOperation = "approximate-quartile" AverageStepOp RecipeStepOperation = "average" AverageIfStepOp RecipeStepOperation = "average-if" CorrelStepOp RecipeStepOperation = "correl" CountAStepOp RecipeStepOperation = "counta" CountAIfStepOp RecipeStepOperation = "counta-if" CountDistinctStepOp RecipeStepOperation = "counta-distinct" CountDistinctIfStepOp RecipeStepOperation = "counta-distinct-if" CountStepOp RecipeStepOperation = "count" CountIfStepOp RecipeStepOperation = "count-if" CovarStepOp RecipeStepOperation = "covar" KthLargestStepOp RecipeStepOperation = "kth-largest" KthLargestIfStepOp RecipeStepOperation = "kth-largest-if" KthLargestUniqueStepOp RecipeStepOperation = "kth-largest-unique" KthLargestUniqueIfStepOp RecipeStepOperation = "kth-largest-unique-if" ListStepOp RecipeStepOperation = "list" ListIfStepOp RecipeStepOperation = "list-if" MaxStepOp RecipeStepOperation = "max" MaxIfStepOp RecipeStepOperation = "max-if" MedianStepOp RecipeStepOperation = "median" MinStepOp RecipeStepOperation = "min" MinIfStepOp RecipeStepOperation = "min-if" ModeStepOp RecipeStepOperation = "mode" ModeIfStepOp RecipeStepOperation = "mode-if" PercentileStepOp RecipeStepOperation = "percentile" QuartileStepOp RecipeStepOperation = "quartile" StdDevStepOp RecipeStepOperation = "stddev" StdDevIfStepOp RecipeStepOperation = "stddev-if" SumStepOp RecipeStepOperation = "sum" SumIfStepOp RecipeStepOperation = "sum-if" UniqueStepOp RecipeStepOperation = "unique" VarStepOp RecipeStepOperation = "var" VarIfStepOp RecipeStepOperation = "var-if" // Logical functions OrStepOp RecipeStepOperation = "or" AndStepOp RecipeStepOperation = "and" NotStepOp RecipeStepOperation = "not" // Comparison ComparisonStepOp RecipeStepOperation = "comparison" IsEvenStepOp RecipeStepOperation = "is-even" IsOddStepOp RecipeStepOperation = "is-odd" InStepOp RecipeStepOperation = "in" MatchesStepOp RecipeStepOperation = "matches" EqualStepOp RecipeStepOperation = "equal" NotEqualStepOp RecipeStepOperation = "not-equal" GreaterThanStepOp RecipeStepOperation = "greater-than" GreaterThanEqualStepOp RecipeStepOperation = "greater-than-equal" LessThanStepOp RecipeStepOperation = "less-than" LessThanEqualOp RecipeStepOperation = "less-than-equal" // Math AddStepOp RecipeStepOperation = "add" SubstractStepOp RecipeStepOperation = "subtract" MultiplyStepOp RecipeStepOperation = "multiply" DivideOp RecipeStepOperation = "divide" ModStepOp RecipeStepOperation = "mod" NegateStepOp RecipeStepOperation = "negate" SignStepOp RecipeStepOperation = "sign" LcmStepOp RecipeStepOperation = "lcm" AbsoluteStepOp RecipeStepOperation = "absolute" ExponentStepOp RecipeStepOperation = "exponent" LogStepOp RecipeStepOperation = "log" LnStepOp RecipeStepOperation = "ln" PowerStepOp RecipeStepOperation = "power" SquareRootStepOp RecipeStepOperation = "sqr" CeilingStepOp RecipeStepOperation = "ceiling" FloorStepOp RecipeStepOperation = "floor" RoundStepOp RecipeStepOperation = "round" TruncStepOp RecipeStepOperation = "trunc" PIStepOp RecipeStepOperation = "pi" RandomStepOp RecipeStepOperation = "random" RandomBetweenStepOp RecipeStepOperation = "random-between" // Trig SinStepOp RecipeStepOperation = "sin" CosStepOp RecipeStepOperation = "cos" TanStepOp RecipeStepOperation = "tan" ASinStepOp RecipeStepOperation = "asin" ACosStepOp RecipeStepOperation = "acos" ATanStepOp RecipeStepOperation = "atan" SinHStepOp RecipeStepOperation = "sinh" CosHStepOp RecipeStepOperation = "cosh" TanHStepOp RecipeStepOperation = "tanh" ASinHStepOp RecipeStepOperation = "asinh" ACosHStepOp RecipeStepOperation = "acosh" ATanHStepOp RecipeStepOperation = "atanh" DegreesOp RecipeStepOperation = "degrees" RadiansStepOp RecipeStepOperation = "radians" // Date and time functions DateStepOp RecipeStepOperation = "date" TimeStepOp RecipeStepOperation = "time" DateTimeStepOp RecipeStepOperation = "date-time" DateAddStepOp RecipeStepOperation = "date-add" DateDiffStepOp RecipeStepOperation = "date-diff" DateFormatStepOp RecipeStepOperation = "date-format" UnixTimeFormatStepOp RecipeStepOperation = "unix-time-format" MonthStepOp RecipeStepOperation = "month" MonthNameStepOp RecipeStepOperation = "month-name" YearStepOp RecipeStepOperation = "year" DayStepOp RecipeStepOperation = "day" WeekNumberStepOp RecipeStepOperation = "week-number" WeekDayStepOp RecipeStepOperation = "week-day" HourStepOp RecipeStepOperation = "hour" MinuteStepOp RecipeStepOperation = "minute" SecondStepOp RecipeStepOperation = "second" UnixTimeStepOp RecipeStepOperation = "unix-time" NowStepOp RecipeStepOperation = "now" TodayStepOp RecipeStepOperation = "today" ParseDateStepOp RecipeStepOperation = "parse-date" NetworkDaysStepOp RecipeStepOperation = "network-days" NetworkDaysIntlStepOp RecipeStepOperation = "network-days-intl" MinDateStepOp RecipeStepOperation = "min-date" MaxDateStepOp RecipeStepOperation = "max-date" ModeDateStepOp RecipeStepOperation = "mode-date" WorkdayStepOp RecipeStepOperation = "workday" WorkDayIntlStepOp RecipeStepOperation = "workday-intl" ConvertFromUtcStepOp RecipeStepOperation = "convert-from-utc" ConvertToUtcStepOp RecipeStepOperation = "convert-to-utc" ConvertTimeZoneStepOp RecipeStepOperation = "convert-time-zone" MinDateIfStepOp RecipeStepOperation = "min-date-if" MaxDateIfStepOp RecipeStepOperation = "max-date-if" ModeDateIfStepOp RecipeStepOperation = "model-date-if" KthLargestDateStepOp RecipeStepOperation = "kth-largest-date" KthLargestUniqueDateStepOp RecipeStepOperation = "kth-largest-unique-date" KthLargestUniqueDateIfStepOp RecipeStepOperation = "kth-largest-unique-date-step" KthLargestDateIfStepOp RecipeStepOperation = "kth-largest-date-if" WeekDayNameStepOp RecipeStepOperation = "week-day-name" // String CharStepOp RecipeStepOperation = "char" UnicodeStepOp RecipeStepOperation = "unicode" UpperStepOp RecipeStepOperation = "upper" LowerStepOp RecipeStepOperation = "lower" ProperStepOp RecipeStepOperation = "proper" TrimStepOp RecipeStepOperation = "trim" RemoveWhitespaceStepOp RecipeStepOperation = "remove-white-spaces" RemoveSymbolsStepOp RecipeStepOperation = "remove-symbols" LenStepOp RecipeStepOperation = "len" FindStepOp RecipeStepOperation = "find" RightFindStepOp RecipeStepOperation = "right-find" SubstringStepOp RecipeStepOperation = "substring" SubstitueStepOp RecipeStepOperation = "substitute" LeftStepOp RecipeStepOperation = "left" RightStepOp RecipeStepOperation = "right" PadStepOp RecipeStepOperation = "pad" MergeStringStepOp RecipeStepOperation = "merge-string" StartsWithStepOp RecipeStepOperation = "starts-with" EndsWithStepOp RecipeStepOperation = "ends-with" RepeatStepOp RecipeStepOperation = "repeat" ExactStepOp RecipeStepOperation = "exact" StringGreaterThanStepOp RecipeStepOperation = "string-greater-than" StringGreaterThanEqualStepOp RecipeStepOperation = "string-greater-equal" StringLessThanStepOp RecipeStepOperation = "string-less-than" StringLessThanEqualStepOp RecipeStepOperation = "string-less-than-equal" DoubleMetaphoneStepOp RecipeStepOperation = "double-metaphone" DoubleMetaphoneEqualsStepOp RecipeStepOperation = "double-metaphone-equals" TransliterateStepOp RecipeStepOperation = "transliterate" TrimQuotesStepOp RecipeStepOperation = "trim-quotes" Base64EncodeStepOp RecipeStepOperation = "base64-encode" Base64DecodeStepOp RecipeStepOperation = "base64-decode" // Type functions IfMissingStepOp RecipeStepOperation = "if-missing" IsMissingStepOp RecipeStepOperation = "is-missing" ParseIntStepOp RecipeStepOperation = "parse-int" ParseBoolStepOp RecipeStepOperation = "parse-bool" ParseFloatStepOp RecipeStepOperation = "parse-float" // windows functions PrevStepOp RecipeStepOperation = "prev" NextStepOp RecipeStepOperation = "next" FillStepOp RecipeStepOperation = "fill" RankStepOp RecipeStepOperation = "rank" DenseRankStepOp RecipeStepOperation = "dense-rank" RollingAvgStepOp RecipeStepOperation = "rolling-avg" RollingModeStepOp RecipeStepOperation = "rolling-mode" RollingMaxStepOp RecipeStepOperation = "rolling-max" RollingMinStepOp RecipeStepOperation = "rolling-min" RollingSumStepOp RecipeStepOperation = "rolling-sum" RollingStdDevStepOp RecipeStepOperation = "rolling-std-dev" RollingStdDevSampStepOp RecipeStepOperation = "rolling-std-dev-samp" RollingVarianceStepOp RecipeStepOperation = "rolling-variance" RollingVarianceSampStepOp RecipeStepOperation = "rolling-variance-samp" RollingCountAStepOp RecipeStepOperation = "rolling-counta" RollingKthLargestStepOp RecipeStepOperation = "rolling-k-largest" RollingKthLargestUniqueStepOp RecipeStepOperation = "rolling-k-largest-unique" RollingListStepOp RecipeStepOperation = "rolling-list" RowNumberStepOp RecipeStepOperation = "row-number" SessionStepOp RecipeStepOperation = "session" // other functions IpToIntStepOp RecipeStepOperation = "ip-to-int" IntToIpStepOp RecipeStepOperation = "int-to-ip" UrlParamsStepOp RecipeStepOperation = "url-params" COALESCEStepOp RecipeStepOperation = "coalesce" SourceRowNumberStepOp RecipeStepOperation = "source-row-number" IfStepOp RecipeStepOperation = "if" CaseStepOp RecipeStepOperation = "case" RangeStepOp RecipeStepOperation = "range" HostStepOp RecipeStepOperation = "host" DomainStepOp RecipeStepOperation = "domain" SubDomainStepOp RecipeStepOperation = "subdomain" // Basic cleaning DeleteStep RecipeStepOperation = "delete" DuplicateStep RecipeStepOperation = "duplicate" MoveAfterStep RecipeStepOperation = "move-after" MoveBeforeStep RecipeStepOperation = "move-before" MoveToEndStep RecipeStepOperation = "move-to-end" MoveToIndexStep RecipeStepOperation = "move-to-index" MoveToStartStep RecipeStepOperation = "move-to-start" RenameStep RecipeStepOperation = "rename" ToBoolColumnStep RecipeStepOperation = "to-boolean-column" ToDoubleColumnStep RecipeStepOperation = "to-double-column" ToNumberColumnStep RecipeStepOperation = "to-number-column" ToStringColumnStep RecipeStepOperation = "to-string-column" // Location cleaning CapitalCaseStepOp RecipeStepOperation = "capital-case" FormatDateStepOp RecipeStepOperation = "format-date" AddDoubleQuotesStepOp RecipeStepOperation = "add-double-quotes" AddPrefixStepOp RecipeStepOperation = "add-prefix" AddSingleQuotesStepOp RecipeStepOperation = "add-single-quotes" AddSuffixStepOp RecipeStepOperation = "add-suffix" ExtractBetweenDelimitersStepOp RecipeStepOperation = "extract-between-delimiters" ExtractBetweenPositionsStepOp RecipeStepOperation = "extract-between-position" ExtractPatternStepOp RecipeStepOperation = "extract-pattern" RemoveCombinedStepOp RecipeStepOperation = "remove-combined" ReplaceBetweenDelimitersStepOp RecipeStepOperation = "replace-between-delimiters" ReplaceBetweenPositionsStepOp RecipeStepOperation = "replace-between-positions" ReplaceTextStepOp RecipeStepOperation = "replace-text" // Location quality FillWithAvgStepOp RecipeStepOperation = "fill-with-average" FillWithCustomStepOp RecipeStepOperation = "fill-with-custom" FillWithEmptyStepOp RecipeStepOperation = "fill-with-empty" FillWithLastValidStepOp RecipeStepOperation = "fill-with-last-valid" FillWithMedianStepOp RecipeStepOperation = "fill-with-median" FillWithModeStepOp RecipeStepOperation = "fill-with-mode" FillWithMostFreqStepOp RecipeStepOperation = "fill-with-most-freq" FillWithNullStepOp RecipeStepOperation = "fill-with-null" FillWithSumStepOp RecipeStepOperation = "fill-with-sum" RemoveDuplicatesStepOp RecipeStepOperation = "remove-duplicates" RemoveMissingStepOp RecipeStepOperation = "remove-missing" FlagColumnFromNullStepOp RecipeStepOperation = "flag-column-from-null" FlagColumnFromPatternStepOp RecipeStepOperation = "flag-column-from-pattern" MergeStepOp RecipeStepOperation = "merge" SplitColumnsBetweenDelimiterStepOp RecipeStepOperation = "split-columns-between-delimiter" SplitColumnsBetweenPositionsStepOp RecipeStepOperation = "split-columns-between-positions" SplitColumnsFromEndStepOp RecipeStepOperation = "split-columns-from-end" SplitColumnsFromStartStepOp RecipeStepOperation = "split-columns-from-start" SplitColumnMultipleDelimiterStepOp RecipeStepOperation = "split-column-multiple-delimiter" SplitColumnSingleDelimiterStepOp RecipeStepOperation = "split-column-single-delimiter" SplitColumnWithIntervalsStepOp RecipeStepOperation = "split-column-with-intervals" // Location structure CatMappingStepOp RecipeStepOperation = "category-mapping" GroupByStepOp RecipeStepOperation = "group-by" JoinStepOp RecipeStepOperation = "join" OneHotEncodeStepOp RecipeStepOperation = "one-hot-encode" PivotStepOp RecipeStepOperation = "pivot" TokenizeStepOp RecipeStepOperation = "tokenize" UnionStepOp RecipeStepOperation = "union" UnpivotStepOp RecipeStepOperation = "unpivot" )
type RecipeStepParam ¶
type RecipeStepParam struct { Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"` Value string `json:"value,omitempty" protobuf:"bytes,2,opt,name=value"` }
RecipeStepParam is a key value parameter of the recipe
func (*RecipeStepParam) DeepCopy ¶
func (in *RecipeStepParam) DeepCopy() *RecipeStepParam
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeStepParam.
func (*RecipeStepParam) DeepCopyInto ¶
func (in *RecipeStepParam) DeepCopyInto(out *RecipeStepParam)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RecipeStepParam) Descriptor ¶
func (*RecipeStepParam) Descriptor() ([]byte, []int)
func (*RecipeStepParam) Marshal ¶
func (m *RecipeStepParam) Marshal() (dAtA []byte, err error)
func (*RecipeStepParam) MarshalToSizedBuffer ¶
func (m *RecipeStepParam) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*RecipeStepParam) ProtoMessage ¶
func (*RecipeStepParam) ProtoMessage()
func (*RecipeStepParam) Reset ¶
func (m *RecipeStepParam) Reset()
func (*RecipeStepParam) Size ¶
func (m *RecipeStepParam) Size() (n int)
func (*RecipeStepParam) String ¶
func (this *RecipeStepParam) String() string
func (*RecipeStepParam) Unmarshal ¶
func (m *RecipeStepParam) Unmarshal(dAtA []byte) error
func (*RecipeStepParam) XXX_DiscardUnknown ¶
func (m *RecipeStepParam) XXX_DiscardUnknown()
func (*RecipeStepParam) XXX_Marshal ¶
func (m *RecipeStepParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*RecipeStepParam) XXX_Merge ¶
func (m *RecipeStepParam) XXX_Merge(src proto.Message)
func (*RecipeStepParam) XXX_Size ¶
func (m *RecipeStepParam) XXX_Size() int
func (*RecipeStepParam) XXX_Unmarshal ¶
func (m *RecipeStepParam) XXX_Unmarshal(b []byte) error
type RecommendationSchema ¶
type RecommendationSchema struct { // The name of the column that specifies user IDs (i.e. the primary key) // +kubebuilder:default:="user_id" // +kubebuilder:validation:Optional UserIDColumn *string `json:"userIDColumn,omitempty" protobuf:"bytes,1,opt,name=userIDColumn"` // The name of the column that specifies item IDs // +kubebuilder:default:="item_id" // +kubebuilder:validation:Optional ItemIDColumn *string `json:"itemIDColumn,omitempty" protobuf:"bytes,2,opt,name=itemIDColumn"` // The name of the column that specifies ratings // +kubebuilder:default:="rating" // +kubebuilder:validation:Optional RatingColumn *string `json:"ratingColumn,omitempty" protobuf:"bytes,3,opt,name=ratingColumn"` }
func (*RecommendationSchema) DeepCopy ¶
func (in *RecommendationSchema) DeepCopy() *RecommendationSchema
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecommendationSchema.
func (*RecommendationSchema) DeepCopyInto ¶
func (in *RecommendationSchema) DeepCopyInto(out *RecommendationSchema)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RecommendationSchema) Descriptor ¶
func (*RecommendationSchema) Descriptor() ([]byte, []int)
func (*RecommendationSchema) Marshal ¶
func (m *RecommendationSchema) Marshal() (dAtA []byte, err error)
func (*RecommendationSchema) MarshalTo ¶
func (m *RecommendationSchema) MarshalTo(dAtA []byte) (int, error)
func (*RecommendationSchema) MarshalToSizedBuffer ¶
func (m *RecommendationSchema) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*RecommendationSchema) ProtoMessage ¶
func (*RecommendationSchema) ProtoMessage()
func (*RecommendationSchema) Reset ¶
func (m *RecommendationSchema) Reset()
func (*RecommendationSchema) Size ¶
func (m *RecommendationSchema) Size() (n int)
func (*RecommendationSchema) String ¶
func (this *RecommendationSchema) String() string
func (*RecommendationSchema) Unmarshal ¶
func (m *RecommendationSchema) Unmarshal(dAtA []byte) error
func (*RecommendationSchema) XXX_DiscardUnknown ¶
func (m *RecommendationSchema) XXX_DiscardUnknown()
func (*RecommendationSchema) XXX_Marshal ¶
func (m *RecommendationSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*RecommendationSchema) XXX_Merge ¶
func (m *RecommendationSchema) XXX_Merge(src proto.Message)
func (*RecommendationSchema) XXX_Size ¶
func (m *RecommendationSchema) XXX_Size() int
func (*RecommendationSchema) XXX_Unmarshal ¶
func (m *RecommendationSchema) XXX_Unmarshal(b []byte) error
type RelationshipSpec ¶
type RelationshipSpec struct { // The name of the relationship // +kubebuilder:validation:Required // +required Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=type"` // The name of the columns that holds the foreign key Column string `json:"columns,omitempty" protobuf:"bytes,2,opt,name=column"` // The relationship arity Arity *catalog.RelationshipArity `json:"arity,omitempty" protobuf:"bytes,3,opt,name=arity"` // The name of the other DataSource object // +kubebuilder:validation:Required // +required RelatesTo string `json:"relatesTo,omitempty" protobuf:"bytes,4,opt,name=relatesTo"` }
RelationSpec defines a relationship between two DataSource objects
func (*RelationshipSpec) DeepCopy ¶
func (in *RelationshipSpec) DeepCopy() *RelationshipSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RelationshipSpec.
func (*RelationshipSpec) DeepCopyInto ¶
func (in *RelationshipSpec) DeepCopyInto(out *RelationshipSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RelationshipSpec) Descriptor ¶
func (*RelationshipSpec) Descriptor() ([]byte, []int)
func (*RelationshipSpec) Marshal ¶
func (m *RelationshipSpec) Marshal() (dAtA []byte, err error)
func (*RelationshipSpec) MarshalToSizedBuffer ¶
func (m *RelationshipSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*RelationshipSpec) ProtoMessage ¶
func (*RelationshipSpec) ProtoMessage()
func (*RelationshipSpec) Reset ¶
func (m *RelationshipSpec) Reset()
func (*RelationshipSpec) Size ¶
func (m *RelationshipSpec) Size() (n int)
func (*RelationshipSpec) String ¶
func (this *RelationshipSpec) String() string
func (*RelationshipSpec) Unmarshal ¶
func (m *RelationshipSpec) Unmarshal(dAtA []byte) error
func (*RelationshipSpec) XXX_DiscardUnknown ¶
func (m *RelationshipSpec) XXX_DiscardUnknown()
func (*RelationshipSpec) XXX_Marshal ¶
func (m *RelationshipSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*RelationshipSpec) XXX_Merge ¶
func (m *RelationshipSpec) XXX_Merge(src proto.Message)
func (*RelationshipSpec) XXX_Size ¶
func (m *RelationshipSpec) XXX_Size() int
func (*RelationshipSpec) XXX_Unmarshal ¶
func (m *RelationshipSpec) XXX_Unmarshal(b []byte) error
type RowSpec ¶
type RowSpec struct {
Cols []ColumnSpec `json:"cols,omitempty" protobuf:"bytes,1,rep,name=cols"`
}
func (*RowSpec) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RowSpec.
func (*RowSpec) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RowSpec) Descriptor ¶
func (*RowSpec) MarshalToSizedBuffer ¶
func (*RowSpec) ProtoMessage ¶
func (*RowSpec) ProtoMessage()
func (*RowSpec) XXX_DiscardUnknown ¶
func (m *RowSpec) XXX_DiscardUnknown()
func (*RowSpec) XXX_Marshal ¶
func (*RowSpec) XXX_Unmarshal ¶
type SampleSpec ¶
type SampleSpec struct { // Indicates if sampling is enabled // +kubebuilder:default:=false // +kubebuilder:validation:Optional Enabled *bool `json:"enabled,omitempty" protobuf:"varint,1,opt,name=enabled"` // The type of sampling (random sampling, by default) // +kubebuilder:default:="random" // +kubebuilder:validation:Optional Type catalog.SamplingType `json:"type,omitempty" protobuf:"bytes,2,opt,name=type"` // The number of rows to sample (by default, 500) // +kubebuilder:default:=500 // +kubebuilder:validation:Minimum=0 // +kubebuilder:validation:Optional Rows *int32 `json:"rows,omitempty" protobuf:"varint,3,opt,name=rows"` // The percentage of rows to sample // +kubebuilder:default:=100 // +kubebuilder:validation:Minimum=0 // +kubebuilder:validation:Optional Pct *int32 `json:"percent,omitempty" protobuf:"varint,4,opt,name=percent"` // The filter formula, valid only if the sample type is a filter // +kubebuilder:default:="" // +kubebuilder:validation:Optional Filter *string `json:"filter,omitempty" protobuf:"bytes,5,opt,name=filter"` // The name of the column to be used for stratified sampling // +kubebuilder:default:="" // +kubebuilder:validation:Optional Column *string `json:"column,omitempty" protobuf:"bytes,6,opt,name=column"` }
SampleSpec specifies how the contents of a dataset should be sampled
func (*SampleSpec) DeepCopy ¶
func (in *SampleSpec) DeepCopy() *SampleSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SampleSpec.
func (*SampleSpec) DeepCopyInto ¶
func (in *SampleSpec) DeepCopyInto(out *SampleSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*SampleSpec) Descriptor ¶
func (*SampleSpec) Descriptor() ([]byte, []int)
func (*SampleSpec) Marshal ¶
func (m *SampleSpec) Marshal() (dAtA []byte, err error)
func (*SampleSpec) MarshalToSizedBuffer ¶
func (m *SampleSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*SampleSpec) ProtoMessage ¶
func (*SampleSpec) ProtoMessage()
func (*SampleSpec) Reset ¶
func (m *SampleSpec) Reset()
func (*SampleSpec) Size ¶
func (m *SampleSpec) Size() (n int)
func (*SampleSpec) String ¶
func (this *SampleSpec) String() string
func (*SampleSpec) Unmarshal ¶
func (m *SampleSpec) Unmarshal(dAtA []byte) error
func (*SampleSpec) XXX_DiscardUnknown ¶
func (m *SampleSpec) XXX_DiscardUnknown()
func (*SampleSpec) XXX_Marshal ¶
func (m *SampleSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*SampleSpec) XXX_Merge ¶
func (m *SampleSpec) XXX_Merge(src proto.Message)
func (*SampleSpec) XXX_Size ¶
func (m *SampleSpec) XXX_Size() int
func (*SampleSpec) XXX_Unmarshal ¶
func (m *SampleSpec) XXX_Unmarshal(b []byte) error
type ScatterPlotSpec ¶
type ScatterPlotSpec struct { // Dataset is the name of the dataset // +kubebuilder:validation:Optional DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"` // name of the X column // +kubebuilder:validation:Optional X *string `json:"x,omitempty" protobuf:"bytes,2,opt,name=x"` // name of the Y axis columns // +kubebuilder:validation:Optional Y *string `json:"y,omitempty" protobuf:"bytes,3,opt,name=y"` }
func (*ScatterPlotSpec) DeepCopy ¶
func (in *ScatterPlotSpec) DeepCopy() *ScatterPlotSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScatterPlotSpec.
func (*ScatterPlotSpec) DeepCopyInto ¶
func (in *ScatterPlotSpec) DeepCopyInto(out *ScatterPlotSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*ScatterPlotSpec) Descriptor ¶
func (*ScatterPlotSpec) Descriptor() ([]byte, []int)
func (*ScatterPlotSpec) Marshal ¶
func (m *ScatterPlotSpec) Marshal() (dAtA []byte, err error)
func (*ScatterPlotSpec) MarshalToSizedBuffer ¶
func (m *ScatterPlotSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*ScatterPlotSpec) ProtoMessage ¶
func (*ScatterPlotSpec) ProtoMessage()
func (*ScatterPlotSpec) Reset ¶
func (m *ScatterPlotSpec) Reset()
func (*ScatterPlotSpec) Size ¶
func (m *ScatterPlotSpec) Size() (n int)
func (*ScatterPlotSpec) String ¶
func (this *ScatterPlotSpec) String() string
func (*ScatterPlotSpec) Unmarshal ¶
func (m *ScatterPlotSpec) Unmarshal(dAtA []byte) error
func (*ScatterPlotSpec) XXX_DiscardUnknown ¶
func (m *ScatterPlotSpec) XXX_DiscardUnknown()
func (*ScatterPlotSpec) XXX_Marshal ¶
func (m *ScatterPlotSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*ScatterPlotSpec) XXX_Merge ¶
func (m *ScatterPlotSpec) XXX_Merge(src proto.Message)
func (*ScatterPlotSpec) XXX_Size ¶
func (m *ScatterPlotSpec) XXX_Size() int
func (*ScatterPlotSpec) XXX_Unmarshal ¶
func (m *ScatterPlotSpec) XXX_Unmarshal(b []byte) error
type Schema ¶
type Schema struct { // The time-series schema, which sets time-series specific parameters // +kubebuilder:validation:Optional TimeSeriesSchema TimeSeriesSchema `json:"timeSeriesSchema,omitempty" protobuf:"bytes,1,opt,name=timeSeriesSchema"` // The recommendation schema, which is used for the recommendation ML task // +kubebuilder:validation:Optional RecommendationSchema RecommendationSchema `json:"recommendationSchema,omitempty" protobuf:"bytes,2,opt,name=recommendationSchema"` // The collection of columns and their attributes // +kubebuilder:validation:Optional Columns []Column `json:"columns,omitempty" protobuf:"bytes,3,rep,name=columns"` // The keys columns are the index of the file or table. The set of keys will be used as an index for the in memory representation(e.g. pandas) // +kubebuilder:validation:Optional Key []string `json:"key,omitempty" protobuf:"bytes,4,rep,name=key"` }
Schema defines the column-level format and validation rules for data associated with a DataSource
func (*Schema) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Schema.
func (*Schema) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Schema) Descriptor ¶
func (*Schema) MarshalToSizedBuffer ¶
func (*Schema) ProtoMessage ¶
func (*Schema) ProtoMessage()
func (*Schema) XXX_DiscardUnknown ¶
func (m *Schema) XXX_DiscardUnknown()
func (*Schema) XXX_Marshal ¶
func (*Schema) XXX_Unmarshal ¶
type SyntheticSpec ¶ added in v0.5.177
type SyntheticSpec struct { // Enabled syntatic data // +kubebuilder:validation:Optional Enabled *bool `json:"enabled,omitempty" protobuf:"varint,1,opt,name=enabled"` // The number of top correlations to be included in the correlation results // +kubebuilder:default:=0 // +kubebuilder:validation:Optional Rows *int32 `json:"rows,omitempty" protobuf:"varint,2,opt,name=rows"` }
func (*SyntheticSpec) DeepCopy ¶ added in v0.5.177
func (in *SyntheticSpec) DeepCopy() *SyntheticSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SyntheticSpec.
func (*SyntheticSpec) DeepCopyInto ¶ added in v0.5.177
func (in *SyntheticSpec) DeepCopyInto(out *SyntheticSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*SyntheticSpec) Descriptor ¶ added in v0.5.177
func (*SyntheticSpec) Descriptor() ([]byte, []int)
func (*SyntheticSpec) Marshal ¶ added in v0.5.177
func (m *SyntheticSpec) Marshal() (dAtA []byte, err error)
func (*SyntheticSpec) MarshalTo ¶ added in v0.5.177
func (m *SyntheticSpec) MarshalTo(dAtA []byte) (int, error)
func (*SyntheticSpec) MarshalToSizedBuffer ¶ added in v0.5.177
func (m *SyntheticSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*SyntheticSpec) ProtoMessage ¶ added in v0.5.177
func (*SyntheticSpec) ProtoMessage()
func (*SyntheticSpec) Reset ¶ added in v0.5.177
func (m *SyntheticSpec) Reset()
func (*SyntheticSpec) Size ¶ added in v0.5.177
func (m *SyntheticSpec) Size() (n int)
func (*SyntheticSpec) String ¶ added in v0.5.177
func (this *SyntheticSpec) String() string
func (*SyntheticSpec) Unmarshal ¶ added in v0.5.177
func (m *SyntheticSpec) Unmarshal(dAtA []byte) error
func (*SyntheticSpec) XXX_DiscardUnknown ¶ added in v0.5.177
func (m *SyntheticSpec) XXX_DiscardUnknown()
func (*SyntheticSpec) XXX_Marshal ¶ added in v0.5.177
func (m *SyntheticSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*SyntheticSpec) XXX_Merge ¶ added in v0.5.177
func (m *SyntheticSpec) XXX_Merge(src proto.Message)
func (*SyntheticSpec) XXX_Size ¶ added in v0.5.177
func (m *SyntheticSpec) XXX_Size() int
func (*SyntheticSpec) XXX_Unmarshal ¶ added in v0.5.177
func (m *SyntheticSpec) XXX_Unmarshal(b []byte) error
type TableSpec ¶
type TableSpec struct { // Dataset is the name of the dataset // +kubebuilder:validation:Optional DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"` // List of table columns, if empty use call the columns // +kubebuilder:validation:Optional Columns []string `json:"columns,omitempty" protobuf:"bytes,2,rep,name=columns"` // one or more filters // +kubebuilder:validation:Optional Filters []string `json:"filters,omitempty" protobuf:"bytes,3,rep,name=filters"` // GroupBy columns // +kubebuilder:validation:Optional GroupBy []string `json:"groupby,omitempty" protobuf:"bytes,4,rep,name=groupby"` // Row is the row number // +kubebuilder:validation:Optional Rows *int32 `json:"rows,omitempty" protobuf:"varint,5,opt,name=rows"` // Show index column // +kubebuilder:default:=false // +kubebuilder:validation:Optional ShowIndex *bool `json:"showIndex,omitempty" protobuf:"varint,6,opt,name=showIndex"` // Show borther // +kubebuilder:default:=false // +kubebuilder:validation:Optional Border *bool `json:"border,omitempty" protobuf:"varint,7,opt,name=border"` }
func (*TableSpec) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpec.
func (*TableSpec) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*TableSpec) Descriptor ¶
func (*TableSpec) MarshalToSizedBuffer ¶
func (*TableSpec) ProtoMessage ¶
func (*TableSpec) ProtoMessage()
func (*TableSpec) XXX_DiscardUnknown ¶
func (m *TableSpec) XXX_DiscardUnknown()
func (*TableSpec) XXX_Marshal ¶
func (*TableSpec) XXX_Unmarshal ¶
type TimeSeriesSchema ¶
type TimeSeriesSchema struct { // The time series type // +kubebuilder:default:="series" // +kubebuilder:validation:Optional Type *catalog.TimeSeriesType `json:"type,omitempty" protobuf:"bytes,1,rep,name=type"` // The time series frequency // +kubebuilder:default:="day" // +kubebuilder:validation:Optional Freq *catalog.Freq `json:"freq,omitempty" protobuf:"bytes,2,opt,name=freq"` // The interval to forecast at this level // +kubebuilder:default:=1 // +kubebuilder:validation:Optional Interval *int32 `json:"interval,omitempty" protobuf:"varint,3,opt,name=interval"` }
func (*TimeSeriesSchema) DeepCopy ¶
func (in *TimeSeriesSchema) DeepCopy() *TimeSeriesSchema
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TimeSeriesSchema.
func (*TimeSeriesSchema) DeepCopyInto ¶
func (in *TimeSeriesSchema) DeepCopyInto(out *TimeSeriesSchema)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*TimeSeriesSchema) Descriptor ¶
func (*TimeSeriesSchema) Descriptor() ([]byte, []int)
func (*TimeSeriesSchema) Marshal ¶
func (m *TimeSeriesSchema) Marshal() (dAtA []byte, err error)
func (*TimeSeriesSchema) MarshalToSizedBuffer ¶
func (m *TimeSeriesSchema) MarshalToSizedBuffer(dAtA []byte) (int, error)
func (*TimeSeriesSchema) ProtoMessage ¶
func (*TimeSeriesSchema) ProtoMessage()
func (*TimeSeriesSchema) Reset ¶
func (m *TimeSeriesSchema) Reset()
func (*TimeSeriesSchema) Size ¶
func (m *TimeSeriesSchema) Size() (n int)
func (*TimeSeriesSchema) String ¶
func (this *TimeSeriesSchema) String() string
func (*TimeSeriesSchema) Unmarshal ¶
func (m *TimeSeriesSchema) Unmarshal(dAtA []byte) error
func (*TimeSeriesSchema) XXX_DiscardUnknown ¶
func (m *TimeSeriesSchema) XXX_DiscardUnknown()
func (*TimeSeriesSchema) XXX_Marshal ¶
func (m *TimeSeriesSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
func (*TimeSeriesSchema) XXX_Merge ¶
func (m *TimeSeriesSchema) XXX_Merge(src proto.Message)
func (*TimeSeriesSchema) XXX_Size ¶
func (m *TimeSeriesSchema) XXX_Size() int
func (*TimeSeriesSchema) XXX_Unmarshal ¶
func (m *TimeSeriesSchema) XXX_Unmarshal(b []byte) error
Source Files
¶
- datapipeline_lifecycle.go
- datapipeline_types.go
- datapipeline_webhook.go
- datapipelinerun_lifecycle.go
- datapipelinerun_types.go
- datapipelinerun_webhook.go
- dataproduct_lifecycle.go
- dataproduct_types.go
- dataproduct_webhook.go
- dataproductversion_lifecycle.go
- dataproductversion_types.go
- dataproductversion_webhook.go
- dataset_lifecycle.go
- dataset_types.go
- dataset_webhook.go
- datasource_lifecycle.go
- datasource_types.go
- datasource_webhook.go
- doc.go
- entity_lifecycle.go
- entity_types.go
- entity_webhook.go
- featuegroup_types.go
- feature_histogram_lifecycle.go
- feature_histogram_types.go
- feature_histogram_webhook.go
- featuregroup_lifecycle.go
- featuregroup_webhook.go
- generated.pb.go
- recipe_lifecycle.go
- recipe_types.go
- recipe_webhook.go
- reciperun_lifecycle.go
- reciperun_types.go
- reciperun_webhook.go
- register.go
- reports_common_types.go
- zz_generated.deepcopy.go