v1alpha1

package
v0.4.951 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jul 7, 2022 License: Apache-2.0 Imports: 29 Imported by: 0

Documentation

Overview

  • Copyright (c) 2020. *

  • Metaprov.com

  • Copyright (c) 2020. *

  • Metaprov.com

+groupName=data.modela.ai +kubebuilder:object:generate=true

  • Copyright (c) 2020. *
  • Metaprov.com

+kubebuilder:object:generate=true +groupName=data.modela.ai

Index

Constants

View Source
const (
	MultiDatasetSameNumberOfRows      catalog.AssertionType = "multi-dataset-same-number-of-rows"
	MultiDatasetOuterJoinEmpty        catalog.AssertionType = "multi-dataset-outer-join-empty"
	MultiDatasetOuterJoinNotEmpty     catalog.AssertionType = "multi-dataset-outer-join-not-empty"
	MultiDatasetInnerJoinEmpty        catalog.AssertionType = "multi-dataset-inner-join-empty"
	MultiDatasetInnerJoinNotEmpty     catalog.AssertionType = "multi-dataset-inner-join-not-empty"
	MultiDatasetLeftJoinEmpty         catalog.AssertionType = "multi-dataset-left-join-empty"
	MultiDatasetLeftJoinNotEmpty      catalog.AssertionType = "multi-dataset-left-join-not-empty"
	MultiDatasetRightJoinEmpty        catalog.AssertionType = "multi-dataset-right-join-empty"
	MultiDatasetRightJoinNotEmpty     catalog.AssertionType = "multi-dataset-right-join-not-empty"
	DatasetColumnsCountEqual          catalog.AssertionType = "dataset-columns-count-equal"
	DatasetColumnsNameInSet           catalog.AssertionType = "dataset-columns-in-set"
	DatasetColumnsInOrderedList       catalog.AssertionType = "dataset-columns-in-ordered-list"
	DatasetRowCountBetween            catalog.AssertionType = "dataset-row-count-between"
	DatasetNotEmpty                   catalog.AssertionType = "dataset-dataset-not-empty"
	DatasetTestNameNameEmpty          catalog.AssertionType = "dataset-empty"
	MultiColumnCorr                   catalog.AssertionType = "multi-column-corr"
	ColumnTestNameColumnExist         catalog.AssertionType = "column-exist"
	ColumnHaveValues                  catalog.AssertionType = "column-have-values"
	ColumnHasNoValue                  catalog.AssertionType = "column-has-no-values"
	ColumnHaveNulls                   catalog.AssertionType = "column-value-have-nulls"
	ColumnHasNoNull                   catalog.AssertionType = "column-value-has-no-nulls"
	ColumnOfType                      catalog.AssertionType = "column-of-type"
	ColumnValuesInSet                 catalog.AssertionType = "column-values-in-set"
	ColumnValuesIncreasing            catalog.AssertionType = "column-values-increasing"
	ColumnsValuesDecreasing           catalog.AssertionType = "column-values-decreasing"
	ColumnValueLengthBetween          catalog.AssertionType = "column-value-length-between"
	ColumnValueNameMatchRegex         catalog.AssertionType = "column-value-match-regex"
	ColumnValueIsDate                 catalog.AssertionType = "column-value-is-date"
	ColumnValueIsJson                 catalog.AssertionType = "column-value-is-json"
	ColumnValueInDomain               catalog.AssertionType = "column-value-in-domain"
	ColumnUniqueValueCountBetween     catalog.AssertionType = "column-unique-value-count-between"
	ColumnOutlierValueUniqueBetween   catalog.AssertionType = "column-outlier-value-count-between"
	ColumnValidValueUniqueBetween     catalog.AssertionType = "column-valid-values-count-between"
	ColumnMismatchValueBetween        catalog.AssertionType = "column-mismatch-values-between"
	ColumnValueMinBetween             catalog.AssertionType = "column-value-min-between"
	ColumnValueLowerQuartileBetween   catalog.AssertionType = "column-value-lower-quartile-between"
	ColumnValueMedianBetween          catalog.AssertionType = "column-value-median-between"
	ColumnValueAvgBetween             catalog.AssertionType = "column-value-average-between"
	ColumnValueUpperQuartileBetween   catalog.AssertionType = "column-value-upper-quartile-between"
	ColumnValueMaxBetween             catalog.AssertionType = "column-value-max-between"
	ColumnValueStddevBetween          catalog.AssertionType = "column-value-stddev-between"
	ColumnValueChiSquarePValueBetween catalog.AssertionType = "column-value-chi-square-p-value-between"
	ColumnValuePairCramersBetween     catalog.AssertionType = "column-value-pair-cramers-between"
	FileSizeBetween                   catalog.AssertionType = "file-size-between"
	FileExist                         catalog.AssertionType = "file-exist"
	FileRegexMatchCountBetween        catalog.AssertionType = "file-regex-match-count-between"
	FileValidJson                     catalog.AssertionType = "file-valid-json"
	FileValidCsv                      catalog.AssertionType = "file-valid-csv"
)
View Source
const SqlQueryLabelKey = "sqlquery"
View Source
const WebQueryLabelKey = "sqlquery"

Variables

View Source
var (
	ErrInvalidLengthGenerated        = fmt.Errorf("proto: negative length found during unmarshaling")
	ErrIntOverflowGenerated          = fmt.Errorf("proto: integer overflow")
	ErrUnexpectedEndOfGroupGenerated = fmt.Errorf("proto: unexpected end of group")
)
View Source
var (
	// SchemeBuilder registers our types
	SchemeBuilder = k8sruntime.NewSchemeBuilder(AddKnownTypes)
	// AddToScheme local alias for SchemeBuilder.AddToScheme
	AddToScheme = SchemeBuilder.AddToScheme
)
View Source
var SchemeGroupVersion = schema.GroupVersion{Group: data.GroupName, Version: "v1alpha1"}

SchemeGroupVersion is group version used to register these objects

Functions

func AddKnownTypes

func AddKnownTypes(scheme *k8sruntime.Scheme) error

Adds the list of known types to api.Scheme.

func Kind

func Kind(kind string) schema.GroupKind

Kind takes an unqualified kind and returns back a Group qualified GroupKind

func Resource

func Resource(resource string) schema.GroupResource

Resource takes an unqualified resource and returns a Group qualified GroupResource

Types

type AggregationSpec

type AggregationSpec struct {
	SlidePeriod  string            `json:"slidePeriod,omitempty" protobuf:"bytes,1,opt,name=slidePeriod"`
	Aggregations []FeatureAggrSpec `json:"aggregations,omitempty" protobuf:"bytes,2,rep,name=aggregations"`
}

Define the aggregation period

func (*AggregationSpec) DeepCopy

func (in *AggregationSpec) DeepCopy() *AggregationSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AggregationSpec.

func (*AggregationSpec) DeepCopyInto

func (in *AggregationSpec) DeepCopyInto(out *AggregationSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*AggregationSpec) Descriptor

func (*AggregationSpec) Descriptor() ([]byte, []int)

func (*AggregationSpec) Marshal

func (m *AggregationSpec) Marshal() (dAtA []byte, err error)

func (*AggregationSpec) MarshalTo

func (m *AggregationSpec) MarshalTo(dAtA []byte) (int, error)

func (*AggregationSpec) MarshalToSizedBuffer

func (m *AggregationSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*AggregationSpec) ProtoMessage

func (*AggregationSpec) ProtoMessage()

func (*AggregationSpec) Reset

func (m *AggregationSpec) Reset()

func (*AggregationSpec) Size

func (m *AggregationSpec) Size() (n int)

func (*AggregationSpec) String

func (this *AggregationSpec) String() string

func (*AggregationSpec) Unmarshal

func (m *AggregationSpec) Unmarshal(dAtA []byte) error

func (*AggregationSpec) XXX_DiscardUnknown

func (m *AggregationSpec) XXX_DiscardUnknown()

func (*AggregationSpec) XXX_Marshal

func (m *AggregationSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*AggregationSpec) XXX_Merge

func (m *AggregationSpec) XXX_Merge(src proto.Message)

func (*AggregationSpec) XXX_Size

func (m *AggregationSpec) XXX_Size() int

func (*AggregationSpec) XXX_Unmarshal

func (m *AggregationSpec) XXX_Unmarshal(b []byte) error

type ApprovalType

type ApprovalType string
const (
	ApprovalTypeApproved ApprovalType = "approved"
	ApprovalTypeReject   ApprovalType = "reject"
)

type BarChartSpec

type BarChartSpec struct {
	// Dataset is the name of the dataset
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// name of the X column
	// +kubebuilder:validation:Optional
	X *string `json:"x,omitempty" protobuf:"bytes,2,opt,name=x"`
	// Y column
	// +kubebuilder:validation:Optional
	Y *string `json:"y,omitempty" protobuf:"bytes,3,opt,name=y"`
	// Show borther
	// +kubebuilder:validation:Optional
	Legend *bool `json:"legend,omitempty" protobuf:"varint,4,opt,name=legend"`
	// Show borther
	// +kubebuilder:validation:Optional
	Sort *bool `json:"sort,omitempty" protobuf:"varint,5,opt,name=sort"`
}

Bar Chart

func (*BarChartSpec) DeepCopy

func (in *BarChartSpec) DeepCopy() *BarChartSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BarChartSpec.

func (*BarChartSpec) DeepCopyInto

func (in *BarChartSpec) DeepCopyInto(out *BarChartSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*BarChartSpec) Descriptor

func (*BarChartSpec) Descriptor() ([]byte, []int)

func (*BarChartSpec) Marshal

func (m *BarChartSpec) Marshal() (dAtA []byte, err error)

func (*BarChartSpec) MarshalTo

func (m *BarChartSpec) MarshalTo(dAtA []byte) (int, error)

func (*BarChartSpec) MarshalToSizedBuffer

func (m *BarChartSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*BarChartSpec) ProtoMessage

func (*BarChartSpec) ProtoMessage()

func (*BarChartSpec) Reset

func (m *BarChartSpec) Reset()

func (*BarChartSpec) Size

func (m *BarChartSpec) Size() (n int)

func (*BarChartSpec) String

func (this *BarChartSpec) String() string

func (*BarChartSpec) Unmarshal

func (m *BarChartSpec) Unmarshal(dAtA []byte) error

func (*BarChartSpec) XXX_DiscardUnknown

func (m *BarChartSpec) XXX_DiscardUnknown()

func (*BarChartSpec) XXX_Marshal

func (m *BarChartSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*BarChartSpec) XXX_Merge

func (m *BarChartSpec) XXX_Merge(src proto.Message)

func (*BarChartSpec) XXX_Size

func (m *BarChartSpec) XXX_Size() int

func (*BarChartSpec) XXX_Unmarshal

func (m *BarChartSpec) XXX_Unmarshal(b []byte) error

type Column

type Column struct {
	// The name of the column
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:validation:MinLength=1
	Name string `json:"name" protobuf:"bytes,1,opt,name=name"`
	// The display name of the column, which is used in reports and other visual elements. If omitted, it will use the raw name
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:validation:Optional
	DisplayName *string `json:"displayName,omitempty" protobuf:"bytes,2,opt,name=displayName"`
	// The data type of the feature (e.g. number, string, boolean, etc.)
	DataType catalog.DataType `json:"datatype" protobuf:"bytes,3,opt,name=datatype"`
	// The data domain of the feature, which constrains the contents of the feature to a specific set of values
	// +kubebuilder:validation:Optional
	Format *catalog.DataDomain `json:"format,omitempty" protobuf:"bytes,4,opt,name=format"`
	// The user-specified description of the feature
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:validation:Optional
	Description *string `json:"description,omitempty" protobuf:"bytes,5,opt,name=description"`
	// Indicates if the feature should be ignored when building models
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Ignore *bool `json:"ignore,omitempty" protobuf:"varint,6,opt,name=ignore"`
	// Indicates if the feature is the target feature of the model, and the feature which predictions will be made on
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Target *bool `json:"target,omitempty" protobuf:"varint,7,opt,name=target"`
	// Indicates if the column can contain null values
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Nullable *bool `json:"nullable,omitempty" protobuf:"varint,8,opt,name=nullable"`
	// Denotes if the column specifies a primary key of a database table (i.e. a users ID)
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	PK *bool `json:"pk,omitempty" protobuf:"varint,9,opt,name=pk"`
	// Denotes if the column specifies a foreign key of another database table
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	FK *bool `json:"fk,omitempty" protobuf:"varint,10,opt,name=fk"`
	// The integer value which the values of the column should be a multiple of
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	MultipleOf *int32 `json:"multipleOf,omitempty" protobuf:"varint,11,opt,name=multipleOf"`
	// The maximum value of values all values in the column
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	Maximum *float64 `json:"maximum,omitempty" protobuf:"bytes,12,opt,name=maximum"`
	// The exclusive upper limit of all values in the column, which does not include the maximum value
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	ExclusiveMaximum *bool `json:"exclusiveMaximum,omitempty" protobuf:"varint,13,opt,name=exclusiveMaximum"`
	// The minimum value of values all values in the column
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	Minimum *float64 `json:"minimum,omitempty" protobuf:"bytes,14,opt,name=minimum"`
	// The exclusive lower limit of all values in the column, which does not include the minimum value
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	ExclusiveMinimum *bool `json:"exclusiveMinimum,omitempty" protobuf:"varint,15,opt,name=exclusiveMinimum"`
	// The maximum length of values in the column, if the column data type is a string
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	MaxLength *int32 `json:"maxLength,omitempty" protobuf:"varint,16,opt,name=maxLength"`
	// The minimum length of values in the column, if the column data type is a string
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	MinLength *int32 `json:"minLength,omitempty" protobuf:"varint,17,opt,name=minLength"`
	// The regex pattern which values in the column must adhere to
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Pattern *string `json:"pattern,omitempty" protobuf:"bytes,18,opt,name=pattern"`
	// Required
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Required *bool `json:"required,omitempty" protobuf:"varint,19,opt,name=required"`
	// A user-specified example value
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Example *string `json:"example,omitempty" protobuf:"bytes,20,opt,name=example"`
	// A link to user-specified external documentation
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	ExternalDocs *string `json:"externalDocs,omitempty" protobuf:"bytes,21,opt,name=externalDocs"`
	// The collection of unique values for categorical features
	// +kubebuilder:validation:Optional
	Enum []string `json:"enum,omitempty" protobuf:"bytes,22,rep,name=enum"`
	// Indicates if the feature is ordinal, in the case of categorical features
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Ordinal *bool `json:"ordinal,omitempty" protobuf:"varint,23,opt,name=ordinal"`
	// The maximum number of items if the column is a list of values
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	MaxItems *int32 `json:"maxItems,omitempty" protobuf:"varint,24,opt,name=maxItems"`
	// The minimum number of items if the column is a list of values
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	MinItems *int32 `json:"minItems,omitempty" protobuf:"varint,25,opt,name=minItems"`
	// Enforce that all the items in the list are unique
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	UniqueItems *bool `json:"uniqueItems,omitempty" protobuf:"varint,26,opt,name=uniqueItems"`
	// Indicates if the column is used as the time axis in time series forecasting
	// Default is false.
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	TimeColumn *bool `json:"timeColumn,omitempty" protobuf:"varint,27,opt,name=timeColumn"`
	// Indicates if the column contains personally identifiable information
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	PII *bool `json:"pii,omitempty" protobuf:"varint,28,opt,name=pii"`
	// Indicates if the column contains personal health information
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	PHI *bool `json:"phi,omitempty" protobuf:"varint,29,opt,name=phi"`
	// Indicates if the column contains any personal data
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	PersonalData *bool `json:"personalData,omitempty" protobuf:"varint,30,opt,name=personalData"`
	// Protected means that this feature is important for ethical AI / Fairness
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Protected *bool `json:"protected,omitempty" protobuf:"varint,31,opt,name=protected"`
	// The default value for number types; used internally for synthetic data and validation
	DefaultValueNum *float64 `json:"DefaultValueNum,omitempty" protobuf:"bytes,32,opt,name=defaultValueNum"`
	// Indicates if values from this column will be sampled on a logarithmic scale
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Log *bool `json:"log,omitempty" protobuf:"varint,33,opt,name=log"`
	// Mu is the mean of the normal distribution
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	Mu *float64 `json:"mu,omitempty" protobuf:"bytes,34,opt,name=mu"`
	// Sigma is the standard deviation of the distribution
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	Sigma *float64 `json:"sigma,omitempty" protobuf:"bytes,35,opt,name=sigma"`
	// The threshold skew for skew detection
	// +kubebuilder:validation:Optional
	Skewthreshold *float64 `json:"skewThreshold,omitempty" protobuf:"bytes,36,opt,name=skewThreshold"`
	// The threshold drift value for model drift detection.
	// +kubebuilder:validation:Optional
	Driftthreshold *float64 `json:"driftThreshold,omitempty" protobuf:"bytes,37,opt,name=driftThreshold"`
	// Indicates if the column is an index column
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	IndexColumn *bool `json:"indexColumn,omitempty" protobuf:"varint,38,opt,name=indexColumn"`
	// Indicates if the column holds fold values
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Fold *bool `json:"fold,omitempty" protobuf:"varint,39,opt,name=fold"`
	// If True than this is a weight column
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Weight *bool `json:"weight,omitempty" protobuf:"varint,40,opt,name=weight"`
	// Indicates that the feature should always be used in training
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Reserved *bool `json:"reserved,omitempty" protobuf:"varint,41,opt,name=reserved"`
	// The recommended imputation method for the column
	// +kubebuilder:default:=auto
	// +kubebuilder:validation:Optional
	Imputation *catalog.Imputation `json:"imputation,omitempty" protobuf:"bytes,42,opt,name=imputation"`
	// The recommended scaling method for the column
	// +kubebuilder:default:=auto
	// +kubebuilder:validation:Optional
	Scaling *catalog.Scaling `json:"scaling,omitempty" protobuf:"bytes,43,opt,name=scaling"`
	// Indicates if the feature was automatically generated
	// +kubebuilder:validation:Optional
	Generated bool `json:"generated,omitempty" protobuf:"varint,44,opt,name=generated"`
	// The formula used to generate the column
	// +kubebuilder:validation:Optional
	Formula string `json:"formula,omitempty" protobuf:"bytes,45,opt,name=formula"`
	// Indicates if the column is an ID column
	// +kubebuilder:validation:Optional
	ID bool `json:"id,omitempty" protobuf:"varint,46,opt,name=id"`
	// The step value if the column values are a sequence of numbers
	// +kubebuilder:default:=1
	// +kubebuilder:validation:Optional
	Step *float64 `json:"step,omitempty" protobuf:"bytes,47,opt,name=step"`
	// Contain the Index for the column in the schema
	// +kubebuilder:validation:Optional
	Index int32 `json:"index,omitempty" protobuf:"varint,48,opt,name=index"`
}

Column specifies the attribute of a single column in a dataset. The fields of the Column align with the JSON schema standard; you can view detailed documentation at https://json-schema.org/draft/2020-12/json-schema-validation.html

func (*Column) DeepCopy

func (in *Column) DeepCopy() *Column

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Column.

func (*Column) DeepCopyInto

func (in *Column) DeepCopyInto(out *Column)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Column) Descriptor

func (*Column) Descriptor() ([]byte, []int)

func (*Column) Marshal

func (m *Column) Marshal() (dAtA []byte, err error)

func (*Column) MarshalTo

func (m *Column) MarshalTo(dAtA []byte) (int, error)

func (*Column) MarshalToSizedBuffer

func (m *Column) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*Column) ProtoMessage

func (*Column) ProtoMessage()

func (*Column) Reset

func (m *Column) Reset()

func (*Column) Size

func (m *Column) Size() (n int)

func (*Column) String

func (this *Column) String() string

func (*Column) Unmarshal

func (m *Column) Unmarshal(dAtA []byte) error

func (*Column) Validate

func (in *Column) Validate() (bool, []metav1.StatusCause)

func (*Column) ValidateColumn

func (a *Column) ValidateColumn() (bool, []metav1.StatusCause)

func (*Column) XXX_DiscardUnknown

func (m *Column) XXX_DiscardUnknown()

func (*Column) XXX_Marshal

func (m *Column) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Column) XXX_Merge

func (m *Column) XXX_Merge(src proto.Message)

func (*Column) XXX_Size

func (m *Column) XXX_Size() int

func (*Column) XXX_Unmarshal

func (m *Column) XXX_Unmarshal(b []byte) error

type ColumnDrift added in v0.4.925

type ColumnDrift struct {
	// The name of the column
	//+kubebuilder:validation:Optional
	Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
	// Measure of drift for a column
	//+kubebuilder:validation:Optional
	Metrics []catalog.Measurement `json:"metrics,omitempty" protobuf:"bytes,2,opt,name=metrics"`
}

func (*ColumnDrift) DeepCopy added in v0.4.926

func (in *ColumnDrift) DeepCopy() *ColumnDrift

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnDrift.

func (*ColumnDrift) DeepCopyInto added in v0.4.926

func (in *ColumnDrift) DeepCopyInto(out *ColumnDrift)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ColumnDrift) Descriptor added in v0.4.925

func (*ColumnDrift) Descriptor() ([]byte, []int)

func (*ColumnDrift) Marshal added in v0.4.925

func (m *ColumnDrift) Marshal() (dAtA []byte, err error)

func (*ColumnDrift) MarshalTo added in v0.4.925

func (m *ColumnDrift) MarshalTo(dAtA []byte) (int, error)

func (*ColumnDrift) MarshalToSizedBuffer added in v0.4.925

func (m *ColumnDrift) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ColumnDrift) ProtoMessage added in v0.4.925

func (*ColumnDrift) ProtoMessage()

func (*ColumnDrift) Reset added in v0.4.925

func (m *ColumnDrift) Reset()

func (*ColumnDrift) Size added in v0.4.925

func (m *ColumnDrift) Size() (n int)

func (*ColumnDrift) String added in v0.4.925

func (this *ColumnDrift) String() string

func (*ColumnDrift) Unmarshal added in v0.4.925

func (m *ColumnDrift) Unmarshal(dAtA []byte) error

func (*ColumnDrift) XXX_DiscardUnknown added in v0.4.925

func (m *ColumnDrift) XXX_DiscardUnknown()

func (*ColumnDrift) XXX_Marshal added in v0.4.925

func (m *ColumnDrift) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ColumnDrift) XXX_Merge added in v0.4.925

func (m *ColumnDrift) XXX_Merge(src proto.Message)

func (*ColumnDrift) XXX_Size added in v0.4.925

func (m *ColumnDrift) XXX_Size() int

func (*ColumnDrift) XXX_Unmarshal added in v0.4.925

func (m *ColumnDrift) XXX_Unmarshal(b []byte) error

type ColumnHistogram added in v0.4.925

type ColumnHistogram struct {
	// The name of the column
	//+kubebuilder:validation:Optional
	Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
	// Measure of drift for a column
	//+kubebuilder:validation:Optional
	Historgram catalog.HistogramData `json:"historgram,omitempty" protobuf:"bytes,2,opt,name=histogram"`
}

func (*ColumnHistogram) DeepCopy added in v0.4.926

func (in *ColumnHistogram) DeepCopy() *ColumnHistogram

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnHistogram.

func (*ColumnHistogram) DeepCopyInto added in v0.4.926

func (in *ColumnHistogram) DeepCopyInto(out *ColumnHistogram)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ColumnHistogram) Descriptor added in v0.4.925

func (*ColumnHistogram) Descriptor() ([]byte, []int)

func (*ColumnHistogram) Marshal added in v0.4.925

func (m *ColumnHistogram) Marshal() (dAtA []byte, err error)

func (*ColumnHistogram) MarshalTo added in v0.4.925

func (m *ColumnHistogram) MarshalTo(dAtA []byte) (int, error)

func (*ColumnHistogram) MarshalToSizedBuffer added in v0.4.925

func (m *ColumnHistogram) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ColumnHistogram) ProtoMessage added in v0.4.925

func (*ColumnHistogram) ProtoMessage()

func (*ColumnHistogram) Reset added in v0.4.925

func (m *ColumnHistogram) Reset()

func (*ColumnHistogram) Size added in v0.4.925

func (m *ColumnHistogram) Size() (n int)

func (*ColumnHistogram) String added in v0.4.925

func (this *ColumnHistogram) String() string

func (*ColumnHistogram) Unmarshal added in v0.4.925

func (m *ColumnHistogram) Unmarshal(dAtA []byte) error

func (*ColumnHistogram) XXX_DiscardUnknown added in v0.4.925

func (m *ColumnHistogram) XXX_DiscardUnknown()

func (*ColumnHistogram) XXX_Marshal added in v0.4.925

func (m *ColumnHistogram) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ColumnHistogram) XXX_Merge added in v0.4.925

func (m *ColumnHistogram) XXX_Merge(src proto.Message)

func (*ColumnHistogram) XXX_Size added in v0.4.925

func (m *ColumnHistogram) XXX_Size() int

func (*ColumnHistogram) XXX_Unmarshal added in v0.4.925

func (m *ColumnHistogram) XXX_Unmarshal(b []byte) error

type ColumnSpec

type ColumnSpec struct {
	// If true this column is an empty spacer
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Spacer *bool `json:"spacer,omitempty" protobuf:"varint,1,opt,name=spacer"`
	// The width in columns, each row is divided into 12 columns
	// +kubebuilder:validation:Optional
	Width *int32 `json:"width,omitempty" protobuf:"varint,2,opt,name=width"`
	// The actual
	Content ComponentSpec `json:",omitempty" protobuf:"bytes,3,opt,name=content"`
}

The spec for a column in the row. A column can span multiple grid based columns

func (*ColumnSpec) DeepCopy

func (in *ColumnSpec) DeepCopy() *ColumnSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnSpec.

func (*ColumnSpec) DeepCopyInto

func (in *ColumnSpec) DeepCopyInto(out *ColumnSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ColumnSpec) Descriptor

func (*ColumnSpec) Descriptor() ([]byte, []int)

func (*ColumnSpec) Marshal

func (m *ColumnSpec) Marshal() (dAtA []byte, err error)

func (*ColumnSpec) MarshalTo

func (m *ColumnSpec) MarshalTo(dAtA []byte) (int, error)

func (*ColumnSpec) MarshalToSizedBuffer

func (m *ColumnSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ColumnSpec) ProtoMessage

func (*ColumnSpec) ProtoMessage()

func (*ColumnSpec) Reset

func (m *ColumnSpec) Reset()

func (*ColumnSpec) Size

func (m *ColumnSpec) Size() (n int)

func (*ColumnSpec) String

func (this *ColumnSpec) String() string

func (*ColumnSpec) Unmarshal

func (m *ColumnSpec) Unmarshal(dAtA []byte) error

func (*ColumnSpec) XXX_DiscardUnknown

func (m *ColumnSpec) XXX_DiscardUnknown()

func (*ColumnSpec) XXX_Marshal

func (m *ColumnSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ColumnSpec) XXX_Merge

func (m *ColumnSpec) XXX_Merge(src proto.Message)

func (*ColumnSpec) XXX_Size

func (m *ColumnSpec) XXX_Size() int

func (*ColumnSpec) XXX_Unmarshal

func (m *ColumnSpec) XXX_Unmarshal(b []byte) error

type ColumnStatistics

type ColumnStatistics struct {
	// The name of the column
	// +kubebuilder:validation:Optional
	Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
	// The data type of the column
	// +kubebuilder:validation:Optional
	DataType catalog.DataType `json:"datatype,omitempty" protobuf:"bytes,2,opt,name=datatype"`
	// Amount of rows which contain a value for the feature
	// +kubebuilder:validation:Optional
	Count float64 `json:"count,omitempty" protobuf:"bytes,3,opt,name=count"`
	// Amount of unique values present in the column
	// +kubebuilder:validation:Optional
	Distinct int32 `json:"distinct,omitempty" protobuf:"varint,4,opt,name=distinct"`
	// Amount of missing values present in the column
	// +kubebuilder:validation:Optional
	Missing int32 `json:"missing,omitempty" protobuf:"varint,5,opt,name=missing"`
	// Percentage of missing values in the column
	// +kubebuilder:validation:Optional
	PercentMissing float64 `json:"percentMissing,omitempty" protobuf:"bytes,6,opt,name=percentMissing"`
	// The mean of all values in the column, if the column data type is a number
	// +kubebuilder:validation:Optional
	Mean float64 `json:"mean,omitempty" protobuf:"bytes,7,opt,name=mean"`
	// The standard deviation of the columns values
	// +kubebuilder:validation:Optional
	StdDev float64 `json:"stddev,omitempty" protobuf:"bytes,8,opt,name=stddev"`
	// The variability of the columns values from the columns mean
	// +kubebuilder:validation:Optional
	Variance float64 `json:"variance,omitempty" protobuf:"bytes,9,opt,name=variance"`
	// The minimum value of all values in the column
	// +kubebuilder:validation:Optional
	Min float64 `json:"min,omitempty" protobuf:"bytes,10,opt,name=min"`
	// The maximum value of all values in the column
	// +kubebuilder:validation:Optional
	Max float64 `json:"max,omitempty" protobuf:"bytes,11,opt,name=max"`
	// The computed kurtosis, which measures the peakedness of the distribution of values in the column
	// +kubebuilder:validation:Optional
	Kurtosis float64 `json:"kurtosis,omitempty" protobuf:"bytes,12,opt,name=kurtosis"`
	// The computed skewness, which measures the asymmetry of the distribution of values in the column
	// +kubebuilder:validation:Optional
	Skewness float64 `json:"skewness,omitempty" protobuf:"bytes,13,opt,name=skewness"`
	// Skewness is the standard deviation value of the attribute
	// +kubebuilder:validation:Optional
	Sum float64 `json:"sum,omitempty" protobuf:"bytes,14,opt,name=sum"`
	// The sum of all values in the column
	// +kubebuilder:validation:Optional
	Mad float64 `json:"mad,omitempty" protobuf:"bytes,15,opt,name=mad"`
	// The 25% point of all the values of the column in order
	// +kubebuilder:validation:Optional
	P25 float64 `json:"p25,omitempty" protobuf:"bytes,16,opt,name=p25"`
	// The 50% point of all the values of the column in order, also known as the median
	// +kubebuilder:validation:Optional
	P50 float64 `json:"p50,omitempty" protobuf:"bytes,17,opt,name=p50"`
	// The 75% point of all the values of the column in order
	// +kubebuilder:validation:Optional
	P75 float64 `json:"p75,omitempty" protobuf:"bytes,18,opt,name=p75"`
	// The interquartile range of the columns values
	// +kubebuilder:validation:Optional
	IQR float64 `json:"iqr,omitempty" protobuf:"bytes,19,opt,name=iqr"`
	// The mode value of the column, also known as the most frequent value
	// +kubebuilder:validation:Optional
	Mode string `json:"mode,omitempty" protobuf:"bytes,20,opt,name=mode"`
	// The number of zero values in the column
	// +kubebuilder:validation:Optional
	Zeros float64 `json:"zeros,omitempty" protobuf:"bytes,21,opt,name=zeros"`
	// The number of invalid values in the column
	// +kubebuilder:validation:Optional
	Invalid int32 `json:"invalid,omitempty" protobuf:"varint,22,opt,name=invalid"`
	// The feature importance of the column
	// +kubebuilder:validation:Optional
	Importance float64 `json:"importance,omitempty" protobuf:"bytes,23,opt,name=importance"`
	// Indicates if the feature is the target attribute for a Study, as specified by the Dataset's DataSource
	// +kubebuilder:validation:Optional
	Target bool `json:"target,omitempty" protobuf:"varint,24,opt,name=target"`
	// Indicates if the column should be ignored, as specified by the Dataset's DataSource
	// +kubebuilder:validation:Optional
	Ignore bool `json:"ignore,omitempty" protobuf:"varint,25,opt,name=ignore"`
	// Indicates if the column may contain null values, as specified by the Dataset's DataSource
	// +kubebuilder:validation:Optional
	Nullable bool `json:"nullable,omitempty" protobuf:"varint,26,opt,name=nullable"`
	// Indicates if the column has high cardinality and should use the high cardinality encoder during feature engineering
	// +kubebuilder:validation:Optional
	HighCardinality bool `json:"highCardinality,omitempty" protobuf:"varint,27,opt,name=highCardinality"`
	// Indicates if the column has high correlation with another feature, and that it should be dropped
	// +kubebuilder:validation:Optional
	HighCorrWithOtherFeatures bool `json:"highCorrWithOtherFeatures,omitempty" protobuf:"varint,28,opt,name=highCorrWithOtherFeatures"`
	// Indicate that the feature has low correlation with the target feature, and that it should be dropped
	// +kubebuilder:validation:Optional
	LowCorrWithTarget bool `json:"lowCorrWithTarget,omitempty" protobuf:"varint,29,opt,name=lowCorrWithTarget"`
	// Indicates if the column has a high percentage of missing values, and that it should be dropped
	// +kubebuilder:validation:Optional
	HighMissingPct bool `json:"highMissingPct,omitempty" protobuf:"varint,30,opt,name=highMissingPct"`
	// Marks that the column is skewed and would require a power transform.
	//
	// If skewness is less than -1 or greater than 1, the distribution is highly skewed.
	// If skewness is between -1 and -0.5 or between 0.5 and 1, the distribution is moderately skewed.
	// If skewness is between -0.5 and 0.5, the distribution is approximately symmetric
	// +kubebuilder:validation:Optional
	Skewed bool `json:"skewed,omitempty" protobuf:"varint,31,opt,name=skewed"`
	// Indicates if the column is an ID column, such as a primary key
	// +kubebuilder:validation:Optional
	Id bool `json:"id,omitempty" protobuf:"varint,32,opt,name=id"`
	// +kubebuilder:validation:Optional
	Constant bool `json:"constant,omitempty" protobuf:"varint,33,opt,name=constant"`
	// Indicates if the column is a duplicate of another column
	// +kubebuilder:validation:Optional
	Duplicate bool `json:"duplicate,omitempty" protobuf:"varint,34,opt,name=duplicate"`
	// Indicates if the column is reserved and must be a feature included in model training
	// +kubebuilder:validation:Optional
	Reserved bool `json:"reserved,omitempty" protobuf:"varint,35,opt,name=reserved"`
	// The ratio between non-null and null values in the column
	// +kubebuilder:validation:Optional
	Completeness float64 `json:"completeness,omitempty" protobuf:"bytes,37,opt,name=completeness"`
	// The ratio between unique values and non-unique values in the column
	// +kubebuilder:validation:Optional
	DistinctValueCount float64 `json:"distinctValueCount,omitempty" protobuf:"bytes,38,opt,name=distinctValueCount"`
	// The ratio between most the most frequent value to the number of total values in the column
	// +kubebuilder:validation:Optional
	MostFreqValuesRatio float64 `json:"mostFreqValuesRatio,omitempty" protobuf:"bytes,39,opt,name=mostFreqValuesRatio"`
	// Used for text attributes
	// +kubebuilder:validation:Optional
	IndexOfPeculiarity float64 `json:"indexOfPeculiarity,omitempty" protobuf:"bytes,40,opt,name=indexOfPeculiarity"`
	// Histogram data representing the distribution of the values in the column
	// +kubebuilder:validation:Optional
	Histogram catalog.HistogramData `json:"histogram,omitempty" protobuf:"bytes,41,opt,name=histogram"`
	// Correlation to the target feature
	// +kubebuilder:validation:Optional
	CorrToTarget float64 `json:"corrToTarget,omitempty" protobuf:"bytes,42,opt,name=corrToTarget"`
	// The column index in the dataset
	Index int32 `json:"index,omitempty" protobuf:"bytes,43,opt,name=index"`
	// Outlier statistics.
	Outliers OutlierStat `json:"outliers,omitempty" protobuf:"bytes,44,opt,name=outliers"`
}

ColumnStatistics contains statistical parameters for a single feature from a dataset

func (*ColumnStatistics) DeepCopy

func (in *ColumnStatistics) DeepCopy() *ColumnStatistics

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnStatistics.

func (*ColumnStatistics) DeepCopyInto

func (in *ColumnStatistics) DeepCopyInto(out *ColumnStatistics)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ColumnStatistics) Descriptor

func (*ColumnStatistics) Descriptor() ([]byte, []int)

func (*ColumnStatistics) Marshal

func (m *ColumnStatistics) Marshal() (dAtA []byte, err error)

func (*ColumnStatistics) MarshalTo

func (m *ColumnStatistics) MarshalTo(dAtA []byte) (int, error)

func (*ColumnStatistics) MarshalToSizedBuffer

func (m *ColumnStatistics) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ColumnStatistics) ProtoMessage

func (*ColumnStatistics) ProtoMessage()

func (*ColumnStatistics) Reset

func (m *ColumnStatistics) Reset()

func (*ColumnStatistics) Size

func (m *ColumnStatistics) Size() (n int)

func (*ColumnStatistics) String

func (this *ColumnStatistics) String() string

func (*ColumnStatistics) Unmarshal

func (m *ColumnStatistics) Unmarshal(dAtA []byte) error

func (*ColumnStatistics) XXX_DiscardUnknown

func (m *ColumnStatistics) XXX_DiscardUnknown()

func (*ColumnStatistics) XXX_Marshal

func (m *ColumnStatistics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ColumnStatistics) XXX_Merge

func (m *ColumnStatistics) XXX_Merge(src proto.Message)

func (*ColumnStatistics) XXX_Size

func (m *ColumnStatistics) XXX_Size() int

func (*ColumnStatistics) XXX_Unmarshal

func (m *ColumnStatistics) XXX_Unmarshal(b []byte) error

type ComponentSpec

type ComponentSpec struct {
	// +kubebuilder:validation:Optional
	Title *string `json:"title,omitempty" protobuf:"bytes,1,opt,name=title"`
	// +kubebuilder:validation:Optional
	SubTitle *string `json:"subtitle,omitempty" protobuf:"bytes,2,opt,name=subtitle"`
	// +kubebuilder:validation:Optional
	Footer        *string `json:"footer,omitempty" protobuf:"bytes,3,opt,name=footer"`
	ComponentView `json:",inline" protobuf:"bytes,4,opt,name=content"`
}

func (*ComponentSpec) DeepCopy

func (in *ComponentSpec) DeepCopy() *ComponentSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ComponentSpec.

func (*ComponentSpec) DeepCopyInto

func (in *ComponentSpec) DeepCopyInto(out *ComponentSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ComponentSpec) Descriptor

func (*ComponentSpec) Descriptor() ([]byte, []int)

func (*ComponentSpec) Marshal

func (m *ComponentSpec) Marshal() (dAtA []byte, err error)

func (*ComponentSpec) MarshalTo

func (m *ComponentSpec) MarshalTo(dAtA []byte) (int, error)

func (*ComponentSpec) MarshalToSizedBuffer

func (m *ComponentSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ComponentSpec) ProtoMessage

func (*ComponentSpec) ProtoMessage()

func (*ComponentSpec) Reset

func (m *ComponentSpec) Reset()

func (*ComponentSpec) Size

func (m *ComponentSpec) Size() (n int)

func (*ComponentSpec) String

func (this *ComponentSpec) String() string

func (*ComponentSpec) Unmarshal

func (m *ComponentSpec) Unmarshal(dAtA []byte) error

func (*ComponentSpec) XXX_DiscardUnknown

func (m *ComponentSpec) XXX_DiscardUnknown()

func (*ComponentSpec) XXX_Marshal

func (m *ComponentSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ComponentSpec) XXX_Merge

func (m *ComponentSpec) XXX_Merge(src proto.Message)

func (*ComponentSpec) XXX_Size

func (m *ComponentSpec) XXX_Size() int

func (*ComponentSpec) XXX_Unmarshal

func (m *ComponentSpec) XXX_Unmarshal(b []byte) error

type ComponentView

type ComponentView struct {
	// +kubebuilder:validation:Optional
	Metric *MetricSpec `json:"metric,omitempty" protobuf:"bytes,1,opt,name=metric"`
	// +kubebuilder:validation:Optional
	Gauge *GaugeSpec `json:"gauge,omitempty" protobuf:"bytes,2,opt,name=gauge"`
	// +kubebuilder:validation:Optional
	Histogram *HistogramSpec `json:"histogram,omitempty" protobuf:"bytes,3,opt,name=histogram"`
	// +kubebuilder:validation:Optional
	Table *TableSpec `json:"table,omitempty" protobuf:"bytes,4,opt,name=table"`
	// +kubebuilder:validation:Optional
	LineChart *LineChartSpec `json:"lineChart,omitempty" protobuf:"bytes,5,opt,name=lineChart"`
	// +kubebuilder:validation:Optional
	BarChart *BarChartSpec `json:"barChart,omitempty" protobuf:"bytes,6,opt,name=barChart"`
	// +kubebuilder:validation:Optional
	ScatterChart *ScatterPlotSpec `json:"scatterPlot,omitempty" protobuf:"bytes,7,opt,name=scatterPlot"`
}

Represent the view part of a component. Only one part should be specified per component

func (*ComponentView) DeepCopy

func (in *ComponentView) DeepCopy() *ComponentView

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ComponentView.

func (*ComponentView) DeepCopyInto

func (in *ComponentView) DeepCopyInto(out *ComponentView)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ComponentView) Descriptor

func (*ComponentView) Descriptor() ([]byte, []int)

func (*ComponentView) Marshal

func (m *ComponentView) Marshal() (dAtA []byte, err error)

func (*ComponentView) MarshalTo

func (m *ComponentView) MarshalTo(dAtA []byte) (int, error)

func (*ComponentView) MarshalToSizedBuffer

func (m *ComponentView) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ComponentView) ProtoMessage

func (*ComponentView) ProtoMessage()

func (*ComponentView) Reset

func (m *ComponentView) Reset()

func (*ComponentView) Size

func (m *ComponentView) Size() (n int)

func (*ComponentView) String

func (this *ComponentView) String() string

func (*ComponentView) Unmarshal

func (m *ComponentView) Unmarshal(dAtA []byte) error

func (*ComponentView) XXX_DiscardUnknown

func (m *ComponentView) XXX_DiscardUnknown()

func (*ComponentView) XXX_Marshal

func (m *ComponentView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ComponentView) XXX_Merge

func (m *ComponentView) XXX_Merge(src proto.Message)

func (*ComponentView) XXX_Size

func (m *ComponentView) XXX_Size() int

func (*ComponentView) XXX_Unmarshal

func (m *ComponentView) XXX_Unmarshal(b []byte) error

type Correlation

type Correlation struct {
	// The first feature name
	Feature1 string `json:"feature1" protobuf:"bytes,1,opt,name=feature1"`
	// The second feature name
	Feature2 string `json:"feature2" protobuf:"bytes,2,opt,name=feature2"`
	// The correlation value
	Value float64 `json:"value,omitempty" protobuf:"bytes,3,opt,name=value"`
	// How the value was calculated
	Method string `json:"method,omitempty" protobuf:"bytes,4,opt,name=method"`
}

Correlation records the correlation between two features in a Dataset

func (*Correlation) DeepCopy

func (in *Correlation) DeepCopy() *Correlation

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Correlation.

func (*Correlation) DeepCopyInto

func (in *Correlation) DeepCopyInto(out *Correlation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Correlation) Descriptor

func (*Correlation) Descriptor() ([]byte, []int)

func (*Correlation) Marshal

func (m *Correlation) Marshal() (dAtA []byte, err error)

func (*Correlation) MarshalTo

func (m *Correlation) MarshalTo(dAtA []byte) (int, error)

func (*Correlation) MarshalToSizedBuffer

func (m *Correlation) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*Correlation) ProtoMessage

func (*Correlation) ProtoMessage()

func (*Correlation) Reset

func (m *Correlation) Reset()

func (*Correlation) Size

func (m *Correlation) Size() (n int)

func (*Correlation) String

func (this *Correlation) String() string

func (*Correlation) Unmarshal

func (m *Correlation) Unmarshal(dAtA []byte) error

func (*Correlation) XXX_DiscardUnknown

func (m *Correlation) XXX_DiscardUnknown()

func (*Correlation) XXX_Marshal

func (m *Correlation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Correlation) XXX_Merge

func (m *Correlation) XXX_Merge(src proto.Message)

func (*Correlation) XXX_Size

func (m *Correlation) XXX_Size() int

func (*Correlation) XXX_Unmarshal

func (m *Correlation) XXX_Unmarshal(b []byte) error

type CorrelationSpec

type CorrelationSpec struct {
	// The minimum value of a computed correlation to be stored as a result
	// +kubebuilder:default:=50
	// +kubebuilder:validation:Optional
	Cutoff *float64 `json:"cutoff,omitempty" protobuf:"bytes,1,opt,name=cutoff"`
	// The method to be used when computing correlations
	// +kubebuilder:default:="pearson"
	// +kubebuilder:validation:Optional
	Method *string `json:"method,omitempty" protobuf:"bytes,2,opt,name=method"`
	// The number of top correlations to be included in the correlation results
	// +kubebuilder:default:=10
	// +kubebuilder:validation:Optional
	Top *int32 `json:"top,omitempty" protobuf:"varint,3,opt,name=top"`
}

CorrelationSpec specifies how the correlations between features in a Dataset should be computed

func (*CorrelationSpec) DeepCopy

func (in *CorrelationSpec) DeepCopy() *CorrelationSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CorrelationSpec.

func (*CorrelationSpec) DeepCopyInto

func (in *CorrelationSpec) DeepCopyInto(out *CorrelationSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*CorrelationSpec) Descriptor

func (*CorrelationSpec) Descriptor() ([]byte, []int)

func (*CorrelationSpec) Marshal

func (m *CorrelationSpec) Marshal() (dAtA []byte, err error)

func (*CorrelationSpec) MarshalTo

func (m *CorrelationSpec) MarshalTo(dAtA []byte) (int, error)

func (*CorrelationSpec) MarshalToSizedBuffer

func (m *CorrelationSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*CorrelationSpec) ProtoMessage

func (*CorrelationSpec) ProtoMessage()

func (*CorrelationSpec) Reset

func (m *CorrelationSpec) Reset()

func (*CorrelationSpec) Size

func (m *CorrelationSpec) Size() (n int)

func (*CorrelationSpec) String

func (this *CorrelationSpec) String() string

func (*CorrelationSpec) Unmarshal

func (m *CorrelationSpec) Unmarshal(dAtA []byte) error

func (*CorrelationSpec) XXX_DiscardUnknown

func (m *CorrelationSpec) XXX_DiscardUnknown()

func (*CorrelationSpec) XXX_Marshal

func (m *CorrelationSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*CorrelationSpec) XXX_Merge

func (m *CorrelationSpec) XXX_Merge(src proto.Message)

func (*CorrelationSpec) XXX_Size

func (m *CorrelationSpec) XXX_Size() int

func (*CorrelationSpec) XXX_Unmarshal

func (m *CorrelationSpec) XXX_Unmarshal(b []byte) error

type CsvFileSpec

type CsvFileSpec struct {
	// The character used to separate fields (by default, a comma)
	// +kubebuilder:default:="comma"
	// +kubebuilder:validation:Optional
	ColumnDelimiter *Delimiter `json:"columnDelimiter,omitempty" protobuf:"bytes,1,opt,name=columnDelimiter"`
	// The character used to signal the end of a row (by default, a newline \n)
	// +kubebuilder:default:="crlf"
	// +kubebuilder:validation:Optional
	RowDelimiter *Delimiter `json:"rowDelimiter,omitempty" protobuf:"bytes,2,opt,name=rowDelimiter"`
	// The charcter used for quotes (by default, a double quote ")
	// +kubebuilder:default:="double-quote"
	// +kubebuilder:validation:Optional
	QuoteChar QuoteChar `json:"quote,omitempty" protobuf:"bytes,3,opt,name=quote"`
	// The character used to escape the delimiter
	// +kubebuilder:default:="none"
	// +kubebuilder:validation:Optional
	EscapeChar EscapeChar `json:"escapeChar,omitempty" protobuf:"bytes,4,opt,name=escapeChar"`
	// The comment character used to split comments off the end of lines (by default, a hashtag #)
	// +kubebuilder:default:="#"
	// +kubebuilder:validation:Optional
	CommentChars *string `json:"commentChars,omitempty" protobuf:"bytes,5,opt,name=commentChars"`
	// Indicates if a header is present in the file
	// +kubebuilder:default:=true
	// +kubebuilder:validation:Optional
	Header *bool `json:"header,omitempty" protobuf:"varint,6,opt,name=header"`
	// The number of rows to skip from the top of the file
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	SkipRows *int32 `json:"skipRows,omitempty" protobuf:"varint,7,opt,name=skipRows"`
	// NullValues is a sequence of values to replace with NA.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	NullValues *string `json:"nullValues,omitempty" protobuf:"bytes,8,opt,name=nullValues"`
	// The unicode encoding of the file (e.g. 'utf-8' for UTF-8 encoded text)
	// +kubebuilder:default:="utf-8"
	// +kubebuilder:validation:Optional
	Encoding *catalog.FileEncoding `json:"encoding,omitempty" protobuf:"bytes,9,opt,name=encoding"`
	// The maximum number of rows to read
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	MaxRows *int32 `json:"maxRows,omitempty" protobuf:"varint,10,opt,name=maxRows"`
	// Indicates if the read of the CSV file should fail if there are any errors
	// +kubebuilder:default:=true
	// +kubebuilder:validation:Optional
	Strict *bool `json:"strict,omitempty" protobuf:"varint,11,opt,name=strict"`
	// The compression type, if the file is compressed
	// +kubebuilder:default:="none"
	// +kubebuilder:validation:Optional
	Compression *string `json:"compression,omitempty" protobuf:"bytes,12,opt,name=compression"`
	// Indicates if the file contains an index column
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	HasIndexColumn *bool `json:"hasIndexColumn,omitempty" protobuf:"varint,13,opt,name=hasIndexColumn"`
	// The position of the index column
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	IndexColumn *int32 `json:"indexColumn,omitempty" protobuf:"varint,14,opt,name=indexColumn"`
}

CsvFileSpec specifies the format of a CSV (comma-separated values) file

func (*CsvFileSpec) DeepCopy

func (in *CsvFileSpec) DeepCopy() *CsvFileSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CsvFileSpec.

func (*CsvFileSpec) DeepCopyInto

func (in *CsvFileSpec) DeepCopyInto(out *CsvFileSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*CsvFileSpec) Descriptor

func (*CsvFileSpec) Descriptor() ([]byte, []int)

func (*CsvFileSpec) Marshal

func (m *CsvFileSpec) Marshal() (dAtA []byte, err error)

func (*CsvFileSpec) MarshalTo

func (m *CsvFileSpec) MarshalTo(dAtA []byte) (int, error)

func (*CsvFileSpec) MarshalToSizedBuffer

func (m *CsvFileSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*CsvFileSpec) ProtoMessage

func (*CsvFileSpec) ProtoMessage()

func (*CsvFileSpec) Reset

func (m *CsvFileSpec) Reset()

func (*CsvFileSpec) Size

func (m *CsvFileSpec) Size() (n int)

func (*CsvFileSpec) String

func (this *CsvFileSpec) String() string

func (*CsvFileSpec) Unmarshal

func (m *CsvFileSpec) Unmarshal(dAtA []byte) error

func (*CsvFileSpec) XXX_DiscardUnknown

func (m *CsvFileSpec) XXX_DiscardUnknown()

func (*CsvFileSpec) XXX_Marshal

func (m *CsvFileSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*CsvFileSpec) XXX_Merge

func (m *CsvFileSpec) XXX_Merge(src proto.Message)

func (*CsvFileSpec) XXX_Size

func (m *CsvFileSpec) XXX_Size() int

func (*CsvFileSpec) XXX_Unmarshal

func (m *CsvFileSpec) XXX_Unmarshal(b []byte) error

type DataInputSpec added in v0.4.649

type DataInputSpec struct {
	// The physical location of the dataset
	// +kubebuilder:validation:Optional
	Location *DataLocation `json:"location,omitempty" protobuf:"bytes,2,opt,name=location"`
	// The file format of the dataset, if applicable
	// +kubebuilder:default:="csv"
	// +kubebuilder:validation:Optional
	Format *catalog.DatastoreType `json:"format,omitempty" protobuf:"bytes,3,opt,name=format"`
}

DataInputSpec specifies the format and location of an input dataset

func (*DataInputSpec) DeepCopy added in v0.4.649

func (in *DataInputSpec) DeepCopy() *DataInputSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataInputSpec.

func (*DataInputSpec) DeepCopyInto added in v0.4.649

func (in *DataInputSpec) DeepCopyInto(out *DataInputSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataInputSpec) Descriptor added in v0.4.649

func (*DataInputSpec) Descriptor() ([]byte, []int)

func (*DataInputSpec) Marshal added in v0.4.649

func (m *DataInputSpec) Marshal() (dAtA []byte, err error)

func (*DataInputSpec) MarshalTo added in v0.4.649

func (m *DataInputSpec) MarshalTo(dAtA []byte) (int, error)

func (*DataInputSpec) MarshalToSizedBuffer added in v0.4.649

func (m *DataInputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataInputSpec) ProtoMessage added in v0.4.649

func (*DataInputSpec) ProtoMessage()

func (*DataInputSpec) Reset added in v0.4.649

func (m *DataInputSpec) Reset()

func (*DataInputSpec) Size added in v0.4.649

func (m *DataInputSpec) Size() (n int)

func (*DataInputSpec) String added in v0.4.649

func (this *DataInputSpec) String() string

func (*DataInputSpec) Unmarshal added in v0.4.649

func (m *DataInputSpec) Unmarshal(dAtA []byte) error

func (*DataInputSpec) XXX_DiscardUnknown added in v0.4.649

func (m *DataInputSpec) XXX_DiscardUnknown()

func (*DataInputSpec) XXX_Marshal added in v0.4.649

func (m *DataInputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataInputSpec) XXX_Merge added in v0.4.649

func (m *DataInputSpec) XXX_Merge(src proto.Message)

func (*DataInputSpec) XXX_Size added in v0.4.649

func (m *DataInputSpec) XXX_Size() int

func (*DataInputSpec) XXX_Unmarshal added in v0.4.649

func (m *DataInputSpec) XXX_Unmarshal(b []byte) error

type DataLocation

type DataLocation struct {
	// The type of location where the data resides, which can either be an object inside an object storage system (i.e. Minio), a SQL location
	// like a table or a view, a data stream (i.e. Kafka, currently unsupported), or a web location (currently unsupported)
	// +kubebuilder:default:="object"
	// +kubebuilder:validation:Optional
	Type *DataLocationType `json:"type,omitempty" protobuf:"bytes,1,opt,name=type"`
	// In the case of the type of location being a database, ConnectionName specifies the name of the Connection resource
	// that exists in the same tenant as the resource specifying the DataLocation. Modela will attempt to connect
	// to the database using the credentials specified in the Connection, and will execute the query specified by the SQL field
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	ConnectionName *string `json:"connectionName,omitempty" protobuf:"bytes,2,opt,name=connectionName"`
	// In the case of the location type being an object storage system, BucketName is the name of the VirtualBucket resource
	// that exists in the same tenant as the resource specifying the DataLocation. Modela will connect to the external
	// object storage system, and will access the file from the path specified by the Path field
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	BucketName *string `json:"bucketName,omitempty" protobuf:"bytes,3,opt,name=bucketName"`
	// The path to a flat-file inside an object storage system. When using the Modela API to upload files (through the
	// FileService API), Modela will upload the data to a predetermined path based on the Tenant, DataProduct,
	// DataProductVersion, and resource type of the resource in relation to the file being uploaded.
	// The path does not need to adhere to this format; you can give the path to a file inside a bucket not managed by Modela
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Path *string `json:"path,omitempty" protobuf:"bytes,4,opt,name=path"`
	// The name of a table inside a database, if applicable
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Table *string `json:"table,omitempty" protobuf:"bytes,5,opt,name=table"`
	// The name of a database inside the database system specified by the ConnectionName field
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Database *string `json:"database,omitempty" protobuf:"bytes,6,opt,name=database"`
	// The SQL statement which will be executed to query data from the table specified by Table
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Sql *string `json:"sql,omitempty" protobuf:"bytes,7,opt,name=sql"`
	// The name of the streaming topic (currently unsupported)
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Topic *string `json:"topic,omitempty" protobuf:"bytes,8,opt,name=topic"`
	// In the case of the location type being WebApi, URL specifies the external location (HTTP or Git) that will be queried
	// and then stored as flat-file by the resource which specifies the DataLocation
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	URL *string `json:"url,omitempty" protobuf:"bytes,9,opt,name=url"`
	// In the case of the location type being Dataset or PublicDataset, ResourceRef references another resource that
	// containing data that will be used as a data source
	// +kubebuilder:validation:Optional
	ResourceRef *v1.ObjectReference `json:"resourceRef,omitempty" protobuf:"bytes,10,opt,name=resourceRef"`
}

DataLocation describes the external location of data that will be accessed by Modela, and additional information on how to query the data if the location is a non flat-file source.

func (*DataLocation) DeepCopy

func (in *DataLocation) DeepCopy() *DataLocation

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataLocation.

func (*DataLocation) DeepCopyInto

func (in *DataLocation) DeepCopyInto(out *DataLocation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataLocation) Descriptor

func (*DataLocation) Descriptor() ([]byte, []int)

func (*DataLocation) Marshal

func (m *DataLocation) Marshal() (dAtA []byte, err error)

func (*DataLocation) MarshalTo

func (m *DataLocation) MarshalTo(dAtA []byte) (int, error)

func (*DataLocation) MarshalToSizedBuffer

func (m *DataLocation) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataLocation) ProtoMessage

func (*DataLocation) ProtoMessage()

func (*DataLocation) Reset

func (m *DataLocation) Reset()

func (*DataLocation) Size

func (m *DataLocation) Size() (n int)

func (*DataLocation) String

func (this *DataLocation) String() string

func (*DataLocation) Unmarshal

func (m *DataLocation) Unmarshal(dAtA []byte) error

func (*DataLocation) XXX_DiscardUnknown

func (m *DataLocation) XXX_DiscardUnknown()

func (*DataLocation) XXX_Marshal

func (m *DataLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataLocation) XXX_Merge

func (m *DataLocation) XXX_Merge(src proto.Message)

func (*DataLocation) XXX_Size

func (m *DataLocation) XXX_Size() int

func (*DataLocation) XXX_Unmarshal

func (m *DataLocation) XXX_Unmarshal(b []byte) error

type DataLocationType

type DataLocationType string

+kubebuilder:validation:Enum="object";"table";"view";"stream";"web";"public-dataset";"dataset"

const (
	DataLocationObjectStorage DataLocationType = "object"
	DataLocationSQLTable      DataLocationType = "table"
	DataLocationSQLView       DataLocationType = "view"
	DataLocationStream        DataLocationType = "stream"
	DataLocationWebApi        DataLocationType = "web"
	DataLocationPublicDataset DataLocationType = "public-dataset" // The data reside in a public dataset
	DataLocationDataset       DataLocationType = "dataset"        // The data reside inside another dataset

)

type DataOutputSpec

type DataOutputSpec struct {
	// DatasetName is the name of a new Dataset resource that will be created.
	// If empty, the system will save the dataset to a file
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// The location where the dataset will be stored
	// +kubebuilder:validation:Optional
	Location *DataLocation `json:"location,omitempty" protobuf:"bytes,2,opt,name=location"`
	// The format of the dataset, applicable if the output location is a flat-file
	// +kubebuilder:default:="csv"
	// +kubebuilder:validation:Optional
	Format *catalog.DatastoreType `json:"format,omitempty" protobuf:"bytes,3,opt,name=format"`
	// The update strategy for the dataset in the case that the output location already exists (i.e a database table)
	// Upsert will insert new records and update existing ones;
	// Insert will insert new records and not update existing ones;
	// Update will not insert new records and only update existing ones
	// +kubebuilder:default:="upsert"
	// +kubebuilder:validation:Optional
	Action *catalog.UpdateStrategy `json:"action,omitempty" protobuf:"bytes,4,opt,name=action"`
	// If true, the database table specified by Location will be created if it does not exist
	// +kubebuilder:validation:Optional
	CreateTableIfNotExist *bool `json:"createTableIfNotExist,omitempty" protobuf:"varint,5,opt,name=createTableIfNotExist"`
	// Indicates if the features (i.e. all the columns) of the input dataset, excluding the
	// feature which was predicted on, will be included in the output dataset
	// +kubebuilder:default:=true
	// +kubebuilder:validation:Optional
	IncludeFeatures *bool `json:"includeFeatures,omitempty" protobuf:"varint,6,opt,name=includeFeatures"`
	// If true, SHAP values for each predicted row will be included as JSON as an additional column of the dataset
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	XAI *bool `json:"xai,omitempty" protobuf:"varint,7,opt,name=xai"`
	// If true, an additional column will be added to the dataset which
	// indicates if each predicted row was detected to be an outlier
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	DetectOutliers *bool `json:"detectOutliers,omitempty" protobuf:"varint,8,opt,name=detectOutliers"`
}

DataOutputSpec specifies the format, features, and output location of a transformed dataset

func (*DataOutputSpec) DeepCopy

func (in *DataOutputSpec) DeepCopy() *DataOutputSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataOutputSpec.

func (*DataOutputSpec) DeepCopyInto

func (in *DataOutputSpec) DeepCopyInto(out *DataOutputSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataOutputSpec) Descriptor

func (*DataOutputSpec) Descriptor() ([]byte, []int)

func (*DataOutputSpec) Marshal

func (m *DataOutputSpec) Marshal() (dAtA []byte, err error)

func (*DataOutputSpec) MarshalTo

func (m *DataOutputSpec) MarshalTo(dAtA []byte) (int, error)

func (*DataOutputSpec) MarshalToSizedBuffer

func (m *DataOutputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataOutputSpec) ProtoMessage

func (*DataOutputSpec) ProtoMessage()

func (*DataOutputSpec) Reset

func (m *DataOutputSpec) Reset()

func (*DataOutputSpec) Size

func (m *DataOutputSpec) Size() (n int)

func (*DataOutputSpec) String

func (this *DataOutputSpec) String() string

func (*DataOutputSpec) Unmarshal

func (m *DataOutputSpec) Unmarshal(dAtA []byte) error

func (*DataOutputSpec) XXX_DiscardUnknown

func (m *DataOutputSpec) XXX_DiscardUnknown()

func (*DataOutputSpec) XXX_Marshal

func (m *DataOutputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataOutputSpec) XXX_Merge

func (m *DataOutputSpec) XXX_Merge(src proto.Message)

func (*DataOutputSpec) XXX_Size

func (m *DataOutputSpec) XXX_Size() int

func (*DataOutputSpec) XXX_Unmarshal

func (m *DataOutputSpec) XXX_Unmarshal(b []byte) error

type DataPipeline

type DataPipeline struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              DataPipelineSpec   `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	Status            DataPipelineStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

+kubebuilder:object:root=true +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description",description="" +kubebuilder:printcolumn:name="Schedule",type="string",JSONPath=".spec.schedule",description="" +kubebuilder:printcolumn:name="Last Run",type="date",JSONPath=".status.lastRun.at",description="" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=datapipelines,singular=datapipeline,shortName="dpl",categories={data,modela} DataPipeline represents the ETL flow from the data sources to a processed dataset, ready for training.

func (*DataPipeline) AddFinalizer

func (wr *DataPipeline) AddFinalizer()

func (*DataPipeline) CreateOrUpdateCond

func (wr *DataPipeline) CreateOrUpdateCond(cond DataPipelineCondition)

Merge or update condition

func (*DataPipeline) DeepCopy

func (in *DataPipeline) DeepCopy() *DataPipeline

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipeline.

func (*DataPipeline) DeepCopyInto

func (in *DataPipeline) DeepCopyInto(out *DataPipeline)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipeline) DeepCopyObject

func (in *DataPipeline) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataPipeline) Default

func (wr *DataPipeline) Default()

func (*DataPipeline) Descriptor

func (*DataPipeline) Descriptor() ([]byte, []int)

func (*DataPipeline) GetCond

func (*DataPipeline) GetCondIdx

func (wr *DataPipeline) GetCondIdx(t DataPipelineConditionType) int

func (*DataPipeline) HasFinalizer

func (wr *DataPipeline) HasFinalizer() bool

func (*DataPipeline) IsReady

func (w *DataPipeline) IsReady() bool

func (*DataPipeline) IsSaved

func (w *DataPipeline) IsSaved() bool

func (*DataPipeline) ManifestUri

func (wr *DataPipeline) ManifestUri() string

func (*DataPipeline) MarkFailed

func (in *DataPipeline) MarkFailed(err error)

func (*DataPipeline) MarkReady

func (in *DataPipeline) MarkReady()

func (*DataPipeline) MarkSaved

func (in *DataPipeline) MarkSaved()

func (*DataPipeline) Marshal

func (m *DataPipeline) Marshal() (dAtA []byte, err error)

func (*DataPipeline) MarshalTo

func (m *DataPipeline) MarshalTo(dAtA []byte) (int, error)

func (*DataPipeline) MarshalToSizedBuffer

func (m *DataPipeline) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipeline) ProtoMessage

func (*DataPipeline) ProtoMessage()

func (*DataPipeline) RemoveFinalizer

func (wr *DataPipeline) RemoveFinalizer()

func (*DataPipeline) Reset

func (m *DataPipeline) Reset()

func (*DataPipeline) RootUri

func (wr *DataPipeline) RootUri() string

func (*DataPipeline) SetupWebhookWithManager

func (wr *DataPipeline) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DataPipeline) Size

func (m *DataPipeline) Size() (n int)

func (*DataPipeline) String

func (this *DataPipeline) String() string

func (*DataPipeline) ToYamlFile

func (wr *DataPipeline) ToYamlFile() ([]byte, error)

func (*DataPipeline) Unmarshal

func (m *DataPipeline) Unmarshal(dAtA []byte) error

func (*DataPipeline) UpdateRunStatus added in v0.4.612

func (in *DataPipeline) UpdateRunStatus(run DataPipelineRun)

func (*DataPipeline) ValidateCreate

func (wr *DataPipeline) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*DataPipeline) ValidateDelete

func (wr *DataPipeline) ValidateDelete() error

func (*DataPipeline) ValidateUpdate

func (wr *DataPipeline) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*DataPipeline) XXX_DiscardUnknown

func (m *DataPipeline) XXX_DiscardUnknown()

func (*DataPipeline) XXX_Marshal

func (m *DataPipeline) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipeline) XXX_Merge

func (m *DataPipeline) XXX_Merge(src proto.Message)

func (*DataPipeline) XXX_Size

func (m *DataPipeline) XXX_Size() int

func (*DataPipeline) XXX_Unmarshal

func (m *DataPipeline) XXX_Unmarshal(b []byte) error

type DataPipelineCondition

type DataPipelineCondition struct {
	// Type of account condition.
	Type DataPipelineConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=DataPipelineConditionType"`
	// Status of the condition, one of True, False, Unknown.
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,4,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,5,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,6,opt,name=message"`
}

DataPipelineCondition describes the state of a wrangler at a certain point.

func (*DataPipelineCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineCondition.

func (*DataPipelineCondition) DeepCopyInto

func (in *DataPipelineCondition) DeepCopyInto(out *DataPipelineCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineCondition) Descriptor

func (*DataPipelineCondition) Descriptor() ([]byte, []int)

func (*DataPipelineCondition) Marshal

func (m *DataPipelineCondition) Marshal() (dAtA []byte, err error)

func (*DataPipelineCondition) MarshalTo

func (m *DataPipelineCondition) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineCondition) MarshalToSizedBuffer

func (m *DataPipelineCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineCondition) ProtoMessage

func (*DataPipelineCondition) ProtoMessage()

func (*DataPipelineCondition) Reset

func (m *DataPipelineCondition) Reset()

func (*DataPipelineCondition) Size

func (m *DataPipelineCondition) Size() (n int)

func (*DataPipelineCondition) String

func (this *DataPipelineCondition) String() string

func (*DataPipelineCondition) Unmarshal

func (m *DataPipelineCondition) Unmarshal(dAtA []byte) error

func (*DataPipelineCondition) XXX_DiscardUnknown

func (m *DataPipelineCondition) XXX_DiscardUnknown()

func (*DataPipelineCondition) XXX_Marshal

func (m *DataPipelineCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineCondition) XXX_Merge

func (m *DataPipelineCondition) XXX_Merge(src proto.Message)

func (*DataPipelineCondition) XXX_Size

func (m *DataPipelineCondition) XXX_Size() int

func (*DataPipelineCondition) XXX_Unmarshal

func (m *DataPipelineCondition) XXX_Unmarshal(b []byte) error

type DataPipelineConditionType

type DataPipelineConditionType string

Condition of the data pipeline

const (
	DataPipelineReady DataPipelineConditionType = "Ready"
	DataPipelineSaved DataPipelineConditionType = "Saved"
)

/ DataPipeline Condition

type DataPipelineList

type DataPipelineList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []DataPipeline `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true DataPipelineList contain the list of DataPipeline

func (*DataPipelineList) DeepCopy

func (in *DataPipelineList) DeepCopy() *DataPipelineList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineList.

func (*DataPipelineList) DeepCopyInto

func (in *DataPipelineList) DeepCopyInto(out *DataPipelineList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineList) DeepCopyObject

func (in *DataPipelineList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataPipelineList) Descriptor

func (*DataPipelineList) Descriptor() ([]byte, []int)

func (*DataPipelineList) Marshal

func (m *DataPipelineList) Marshal() (dAtA []byte, err error)

func (*DataPipelineList) MarshalTo

func (m *DataPipelineList) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineList) MarshalToSizedBuffer

func (m *DataPipelineList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineList) ProtoMessage

func (*DataPipelineList) ProtoMessage()

func (*DataPipelineList) Reset

func (m *DataPipelineList) Reset()

func (*DataPipelineList) Size

func (m *DataPipelineList) Size() (n int)

func (*DataPipelineList) String

func (this *DataPipelineList) String() string

func (*DataPipelineList) Unmarshal

func (m *DataPipelineList) Unmarshal(dAtA []byte) error

func (*DataPipelineList) XXX_DiscardUnknown

func (m *DataPipelineList) XXX_DiscardUnknown()

func (*DataPipelineList) XXX_Marshal

func (m *DataPipelineList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineList) XXX_Merge

func (m *DataPipelineList) XXX_Merge(src proto.Message)

func (*DataPipelineList) XXX_Size

func (m *DataPipelineList) XXX_Size() int

func (*DataPipelineList) XXX_Unmarshal

func (m *DataPipelineList) XXX_Unmarshal(b []byte) error

type DataPipelineRun

type DataPipelineRun struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              DataPipelineRunSpec   `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	Status            DataPipelineRunStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

+kubebuilder:object:root=true +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Status",type="string",JSONPath=".status.phase" +kubebuilder:printcolumn:name="Progress",type="string",JSONPath=".status.progress",priority=1 +kubebuilder:printcolumn:name="Pipeline",type="string",JSONPath=".spec.datapipelineName" +kubebuilder:printcolumn:name="StartTime",type="date",JSONPath=".status.startTime",priority=1 +kubebuilder:printcolumn:name="CompletionTime",type="date",JSONPath=".status.completionTime",priority=1 +kubebuilder:printcolumn:name="Failure",type="string",JSONPath=".metadata.failureMessage" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:path=datapipelineruns,singular=datapipelinerun,shortName="dpr",categories={data,modela,all} DataPipelineRun represent one execution of the data pipeline

func (*DataPipelineRun) Aborted

func (in *DataPipelineRun) Aborted() bool

func (*DataPipelineRun) AddFinalizer

func (run *DataPipelineRun) AddFinalizer()

func (*DataPipelineRun) CompletionAlert added in v0.4.601

func (run *DataPipelineRun) CompletionAlert(tenantRef *v1.ObjectReference, notifierName *string) *infra.Alert

Generate a dataset completion alert

func (*DataPipelineRun) CreateOrUpdateCond

func (run *DataPipelineRun) CreateOrUpdateCond(cond DataPipelineRunCondition)

Merge or update condition

func (*DataPipelineRun) DeepCopy

func (in *DataPipelineRun) DeepCopy() *DataPipelineRun

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRun.

func (*DataPipelineRun) DeepCopyInto

func (in *DataPipelineRun) DeepCopyInto(out *DataPipelineRun)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineRun) DeepCopyObject

func (in *DataPipelineRun) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataPipelineRun) Default

func (run *DataPipelineRun) Default()

func (*DataPipelineRun) Descriptor

func (*DataPipelineRun) Descriptor() ([]byte, []int)

func (*DataPipelineRun) ErrorAlert added in v0.4.601

func (run *DataPipelineRun) ErrorAlert(tenantRef *v1.ObjectReference, notifierName *string, err error) *infra.Alert

func (*DataPipelineRun) GetCond

func (*DataPipelineRun) GetCondIdx

func (*DataPipelineRun) HasFinalizer

func (run *DataPipelineRun) HasFinalizer() bool

func (*DataPipelineRun) IsCompleted

func (in *DataPipelineRun) IsCompleted() bool

func (*DataPipelineRun) IsFailed

func (in *DataPipelineRun) IsFailed() bool

func (*DataPipelineRun) IsReady

func (w *DataPipelineRun) IsReady() bool

func (*DataPipelineRun) IsRunning

func (in *DataPipelineRun) IsRunning() bool

func (*DataPipelineRun) IsSaved

func (w *DataPipelineRun) IsSaved() bool

func (*DataPipelineRun) ManifestUri

func (run *DataPipelineRun) ManifestUri() string

func (*DataPipelineRun) MarkAborted added in v0.4.607

func (in *DataPipelineRun) MarkAborted(err error)

func (*DataPipelineRun) MarkComplete

func (in *DataPipelineRun) MarkComplete()

func (*DataPipelineRun) MarkFailed

func (in *DataPipelineRun) MarkFailed(err error)

func (*DataPipelineRun) MarkRunning

func (r *DataPipelineRun) MarkRunning()

func (*DataPipelineRun) MarkSaved

func (in *DataPipelineRun) MarkSaved()

func (*DataPipelineRun) Marshal

func (m *DataPipelineRun) Marshal() (dAtA []byte, err error)

func (*DataPipelineRun) MarshalTo

func (m *DataPipelineRun) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineRun) MarshalToSizedBuffer

func (m *DataPipelineRun) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineRun) Paused

func (in *DataPipelineRun) Paused() bool

func (*DataPipelineRun) ProtoMessage

func (*DataPipelineRun) ProtoMessage()

func (*DataPipelineRun) RemoveFinalizer

func (run *DataPipelineRun) RemoveFinalizer()

func (*DataPipelineRun) Reset

func (m *DataPipelineRun) Reset()

func (*DataPipelineRun) RootUri

func (run *DataPipelineRun) RootUri() string

func (*DataPipelineRun) RunStatus added in v0.4.614

func (run *DataPipelineRun) RunStatus() *catalog.LastRunStatus

Return the state of the run as RunStatus

func (*DataPipelineRun) SetupWebhookWithManager

func (wr *DataPipelineRun) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DataPipelineRun) Size

func (m *DataPipelineRun) Size() (n int)

func (*DataPipelineRun) StatusString

func (run *DataPipelineRun) StatusString() string

func (*DataPipelineRun) String

func (this *DataPipelineRun) String() string

func (*DataPipelineRun) ToYamlFile

func (run *DataPipelineRun) ToYamlFile() ([]byte, error)

func (*DataPipelineRun) Unmarshal

func (m *DataPipelineRun) Unmarshal(dAtA []byte) error

func (*DataPipelineRun) ValidateCreate

func (run *DataPipelineRun) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*DataPipelineRun) ValidateDelete

func (run *DataPipelineRun) ValidateDelete() error

func (*DataPipelineRun) ValidateUpdate

func (run *DataPipelineRun) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*DataPipelineRun) XXX_DiscardUnknown

func (m *DataPipelineRun) XXX_DiscardUnknown()

func (*DataPipelineRun) XXX_Marshal

func (m *DataPipelineRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineRun) XXX_Merge

func (m *DataPipelineRun) XXX_Merge(src proto.Message)

func (*DataPipelineRun) XXX_Size

func (m *DataPipelineRun) XXX_Size() int

func (*DataPipelineRun) XXX_Unmarshal

func (m *DataPipelineRun) XXX_Unmarshal(b []byte) error

type DataPipelineRunCondition

type DataPipelineRunCondition struct {
	// Type of account condition.
	Type DataPipelineRunConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=DataPipelineRunConditionType"`
	// Status of the condition, one of True, False, Unknown.
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,4,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,5,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,6,opt,name=message"`
}

DataPipelineRunCondition describes the state of a data processor run at a certain point.

func (*DataPipelineRunCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRunCondition.

func (*DataPipelineRunCondition) DeepCopyInto

func (in *DataPipelineRunCondition) DeepCopyInto(out *DataPipelineRunCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineRunCondition) Descriptor

func (*DataPipelineRunCondition) Descriptor() ([]byte, []int)

func (*DataPipelineRunCondition) Marshal

func (m *DataPipelineRunCondition) Marshal() (dAtA []byte, err error)

func (*DataPipelineRunCondition) MarshalTo

func (m *DataPipelineRunCondition) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineRunCondition) MarshalToSizedBuffer

func (m *DataPipelineRunCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineRunCondition) ProtoMessage

func (*DataPipelineRunCondition) ProtoMessage()

func (*DataPipelineRunCondition) Reset

func (m *DataPipelineRunCondition) Reset()

func (*DataPipelineRunCondition) Size

func (m *DataPipelineRunCondition) Size() (n int)

func (*DataPipelineRunCondition) String

func (this *DataPipelineRunCondition) String() string

func (*DataPipelineRunCondition) Unmarshal

func (m *DataPipelineRunCondition) Unmarshal(dAtA []byte) error

func (*DataPipelineRunCondition) XXX_DiscardUnknown

func (m *DataPipelineRunCondition) XXX_DiscardUnknown()

func (*DataPipelineRunCondition) XXX_Marshal

func (m *DataPipelineRunCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineRunCondition) XXX_Merge

func (m *DataPipelineRunCondition) XXX_Merge(src proto.Message)

func (*DataPipelineRunCondition) XXX_Size

func (m *DataPipelineRunCondition) XXX_Size() int

func (*DataPipelineRunCondition) XXX_Unmarshal

func (m *DataPipelineRunCondition) XXX_Unmarshal(b []byte) error

type DataPipelineRunConditionType

type DataPipelineRunConditionType string

Condition on the dataset

const (
	DataPipelineRunCompleted DataPipelineRunConditionType = "Completed"
	DataPipelineRunSaved     DataPipelineRunConditionType = "Saved"
)

/ DataPipelineRun Condition

type DataPipelineRunList

type DataPipelineRunList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []DataPipelineRun `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true DataPipelineRunList contains a list of DataPipelineRun

func (*DataPipelineRunList) DeepCopy

func (in *DataPipelineRunList) DeepCopy() *DataPipelineRunList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRunList.

func (*DataPipelineRunList) DeepCopyInto

func (in *DataPipelineRunList) DeepCopyInto(out *DataPipelineRunList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineRunList) DeepCopyObject

func (in *DataPipelineRunList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataPipelineRunList) Descriptor

func (*DataPipelineRunList) Descriptor() ([]byte, []int)

func (*DataPipelineRunList) Marshal

func (m *DataPipelineRunList) Marshal() (dAtA []byte, err error)

func (*DataPipelineRunList) MarshalTo

func (m *DataPipelineRunList) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineRunList) MarshalToSizedBuffer

func (m *DataPipelineRunList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineRunList) ProtoMessage

func (*DataPipelineRunList) ProtoMessage()

func (*DataPipelineRunList) Reset

func (m *DataPipelineRunList) Reset()

func (*DataPipelineRunList) Size

func (m *DataPipelineRunList) Size() (n int)

func (*DataPipelineRunList) String

func (this *DataPipelineRunList) String() string

func (*DataPipelineRunList) Unmarshal

func (m *DataPipelineRunList) Unmarshal(dAtA []byte) error

func (*DataPipelineRunList) XXX_DiscardUnknown

func (m *DataPipelineRunList) XXX_DiscardUnknown()

func (*DataPipelineRunList) XXX_Marshal

func (m *DataPipelineRunList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineRunList) XXX_Merge

func (m *DataPipelineRunList) XXX_Merge(src proto.Message)

func (*DataPipelineRunList) XXX_Size

func (m *DataPipelineRunList) XXX_Size() int

func (*DataPipelineRunList) XXX_Unmarshal

func (m *DataPipelineRunList) XXX_Unmarshal(b []byte) error

type DataPipelineRunPhase

type DataPipelineRunPhase string
const (
	DataPipelineRunPhasePending   DataPipelineRunPhase = "Pending"
	DataPipelineRunPhaseRunning   DataPipelineRunPhase = "Running"
	DataPipelineRunPhaseCompleted DataPipelineRunPhase = "Completed"
	DataPipelineRunPhaseFailed    DataPipelineRunPhase = "Failed"
	DataPipelineRunPhaseAborted   DataPipelineRunPhase = "Aborted"
	DataPipelineRunPhasePaused    DataPipelineRunPhase = "Paused"
)

type DataPipelineRunSpec

type DataPipelineRunSpec struct {
	// The data product version of the run
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,1,opt,name=versionName"`
	// The data product
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	DataPipelineName *string `json:"datapipelineName,omitempty" protobuf:"bytes,2,opt,name=datapipelineName"`
	// The owner of the run, set to the owner of the pipeline
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,4,opt,name=owner"`
	// Specify the resources for the data pipeline run
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,5,opt,name=resources"`
	// The priority of this data pipeline. The default is medium.
	// +kubebuilder:default:=medium
	// +kubebuilder:validation:Optional
	Priority *catalog.PriorityLevel `json:"priority,omitempty" protobuf:"bytes,6,opt,name=priority"`
	// Set to true to pause the pipeline run
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Paused *bool `json:"paused,omitempty" protobuf:"varint,7,opt,name=paused"`
	// Set to true to abort the  pipeline run
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Aborted *bool `json:"aborted,omitempty" protobuf:"varint,8,opt,name=aborted"`
	// The Lab where the data pipeline run.
	// +kubebuilder:validation:Optional
	LabRef v1.ObjectReference `json:"labRef,omitempty" protobuf:"bytes,9,opt,name=labRef"`
}

DataPipelineRunSpec defines the desired state of a schema

func (*DataPipelineRunSpec) DeepCopy

func (in *DataPipelineRunSpec) DeepCopy() *DataPipelineRunSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRunSpec.

func (*DataPipelineRunSpec) DeepCopyInto

func (in *DataPipelineRunSpec) DeepCopyInto(out *DataPipelineRunSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineRunSpec) Descriptor

func (*DataPipelineRunSpec) Descriptor() ([]byte, []int)

func (*DataPipelineRunSpec) Marshal

func (m *DataPipelineRunSpec) Marshal() (dAtA []byte, err error)

func (*DataPipelineRunSpec) MarshalTo

func (m *DataPipelineRunSpec) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineRunSpec) MarshalToSizedBuffer

func (m *DataPipelineRunSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineRunSpec) ProtoMessage

func (*DataPipelineRunSpec) ProtoMessage()

func (*DataPipelineRunSpec) Reset

func (m *DataPipelineRunSpec) Reset()

func (*DataPipelineRunSpec) Size

func (m *DataPipelineRunSpec) Size() (n int)

func (*DataPipelineRunSpec) String

func (this *DataPipelineRunSpec) String() string

func (*DataPipelineRunSpec) Unmarshal

func (m *DataPipelineRunSpec) Unmarshal(dAtA []byte) error

func (*DataPipelineRunSpec) XXX_DiscardUnknown

func (m *DataPipelineRunSpec) XXX_DiscardUnknown()

func (*DataPipelineRunSpec) XXX_Marshal

func (m *DataPipelineRunSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineRunSpec) XXX_Merge

func (m *DataPipelineRunSpec) XXX_Merge(src proto.Message)

func (*DataPipelineRunSpec) XXX_Size

func (m *DataPipelineRunSpec) XXX_Size() int

func (*DataPipelineRunSpec) XXX_Unmarshal

func (m *DataPipelineRunSpec) XXX_Unmarshal(b []byte) error

type DataPipelineRunStatus

type DataPipelineRunStatus struct {
	// RecipeRuns is the names of the recipe runs that occur during running of the pipeline.
	//+kubebuilder:validation:Optional
	RecipeRuns []string `json:"recipeRuns" protobuf:"bytes,1,rep,name=recipeRuns"`
	// the resulting dataset from the flow
	//+kubebuilder:validation:Optional
	Output DataLocation `json:"output" protobuf:"bytes,2,opt,name=output"`
	// the phase of the run
	// +kubebuilder:default:="Pending"
	//+kubebuilder:validation:Optional
	Phase DataPipelineRunPhase `json:"phase" protobuf:"bytes,3,opt,name=phase"`
	// StartTime is the start time of the pipeline
	// +kubebuilder:validation:Optional
	StartTime *metav1.Time `json:"startTime,omitempty" protobuf:"bytes,4,opt,name=startTime"`
	// CompletionTime is the end time of the pipeline
	// +kubebuilder:validation:Optional
	EndTime *metav1.Time `json:"endTime,omitempty" protobuf:"bytes,5,opt,name=endTime"`
	//ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,6,opt,name=observedGeneration"`

	// Update in case of terminal failure
	// Borrowed from cluster api controller
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,7,opt,name=failureReason"`

	// Update in case of terminal failure message
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,8,opt,name=failureMessage"`

	// Pipeline progress Progress in percent, the progress takes into account the different stages of the pipeline
	// +kubebuilder:validation:Optional
	Progress *int32 `json:"progress" protobuf:"varint,9,opt,name=progress"`
	// Holds the location of log paths
	//+kubebuilder:validation:Optional
	Logs catalog.Logs `json:"logs,,omitempty" protobuf:"bytes,10,opt,name=logs"`

	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,11,opt,name=lastUpdated"`

	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []DataPipelineRunCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,12,rep,name=conditions"`
}

DataPipelineRunStatus defines the observed state of DataPipelineRun

func (*DataPipelineRunStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineRunStatus.

func (*DataPipelineRunStatus) DeepCopyInto

func (in *DataPipelineRunStatus) DeepCopyInto(out *DataPipelineRunStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineRunStatus) Descriptor

func (*DataPipelineRunStatus) Descriptor() ([]byte, []int)

func (*DataPipelineRunStatus) Marshal

func (m *DataPipelineRunStatus) Marshal() (dAtA []byte, err error)

func (*DataPipelineRunStatus) MarshalTo

func (m *DataPipelineRunStatus) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineRunStatus) MarshalToSizedBuffer

func (m *DataPipelineRunStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineRunStatus) ProtoMessage

func (*DataPipelineRunStatus) ProtoMessage()

func (*DataPipelineRunStatus) Reset

func (m *DataPipelineRunStatus) Reset()

func (*DataPipelineRunStatus) Size

func (m *DataPipelineRunStatus) Size() (n int)

func (*DataPipelineRunStatus) String

func (this *DataPipelineRunStatus) String() string

func (*DataPipelineRunStatus) Unmarshal

func (m *DataPipelineRunStatus) Unmarshal(dAtA []byte) error

func (*DataPipelineRunStatus) XXX_DiscardUnknown

func (m *DataPipelineRunStatus) XXX_DiscardUnknown()

func (*DataPipelineRunStatus) XXX_Marshal

func (m *DataPipelineRunStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineRunStatus) XXX_Merge

func (m *DataPipelineRunStatus) XXX_Merge(src proto.Message)

func (*DataPipelineRunStatus) XXX_Size

func (m *DataPipelineRunStatus) XXX_Size() int

func (*DataPipelineRunStatus) XXX_Unmarshal

func (m *DataPipelineRunStatus) XXX_Unmarshal(b []byte) error

type DataPipelineSpec

type DataPipelineSpec struct {
	// VersionName is the data product version of the data pipeline
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,1,opt,name=versionName"`
	// Description of the data pipeline
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Description *string `json:"description,omitempty" protobuf:"bytes,2,opt,name=description"`
	// DatasetSelector is used to select datasets for processing in the pipeline
	// +kubebuilder:validation:Optional
	DatasetSelector map[string]string `json:"datasetSelector,omitempty" protobuf:"bytes,3,opt,name=datasetSelector"`
	// RecipeOrder defines the list of recipes and the order they need to run
	// +kubebuilder:validation:Optional
	Recipes []RecipePartSpec `json:"recipes,omitempty" protobuf:"bytes,4,rep,name=recipes"`
	// The output definition
	// +kubebuilder:validation:Optional
	Output DataOutputSpec `json:"output,omitempty" protobuf:"bytes,5,opt,name=output"`
	// Schedule for running the pipeline
	// +kubebuilder:validation:Optional
	Schedule catalog.RunSchedule `json:"schedule,omitempty" protobuf:"bytes,6,opt,name=schedule"`
	// Specification for notification
	// +kubebuilder:validation:Optional
	Notification catalog.NotificationSpec `json:"notification,omitempty" protobuf:"bytes,7,opt,name=notification"`
	// Owner of this data pipeline
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,8,opt,name=owner"`
	// Resources are hardware resource req.
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,9,opt,name=resources"`
	// The priority of this data pipeline. The default is medium.
	// +kubebuilder:default:="medium"
	// +kubebuilder:validation:Optional
	Priority *catalog.PriorityLevel `json:"priority,omitempty" protobuf:"bytes,10,opt,name=priority"`
	// Set to true to pause the cron prediction
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Paused *bool `json:"paused,omitempty" protobuf:"varint,11,opt,name=paused"`
	// TTL for the data product run. Set to 0 if no garbage collected is needed.
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	TTL *int32 `json:"ttl,omitempty" protobuf:"varint,12,opt,name=ttl"`
}

DataPipelineSpec defines the desired state of a DataPipeline

func (*DataPipelineSpec) DeepCopy

func (in *DataPipelineSpec) DeepCopy() *DataPipelineSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineSpec.

func (*DataPipelineSpec) DeepCopyInto

func (in *DataPipelineSpec) DeepCopyInto(out *DataPipelineSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineSpec) Descriptor

func (*DataPipelineSpec) Descriptor() ([]byte, []int)

func (*DataPipelineSpec) Marshal

func (m *DataPipelineSpec) Marshal() (dAtA []byte, err error)

func (*DataPipelineSpec) MarshalTo

func (m *DataPipelineSpec) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineSpec) MarshalToSizedBuffer

func (m *DataPipelineSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineSpec) ProtoMessage

func (*DataPipelineSpec) ProtoMessage()

func (*DataPipelineSpec) Reset

func (m *DataPipelineSpec) Reset()

func (*DataPipelineSpec) Size

func (m *DataPipelineSpec) Size() (n int)

func (*DataPipelineSpec) String

func (this *DataPipelineSpec) String() string

func (*DataPipelineSpec) Unmarshal

func (m *DataPipelineSpec) Unmarshal(dAtA []byte) error

func (*DataPipelineSpec) XXX_DiscardUnknown

func (m *DataPipelineSpec) XXX_DiscardUnknown()

func (*DataPipelineSpec) XXX_Marshal

func (m *DataPipelineSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineSpec) XXX_Merge

func (m *DataPipelineSpec) XXX_Merge(src proto.Message)

func (*DataPipelineSpec) XXX_Size

func (m *DataPipelineSpec) XXX_Size() int

func (*DataPipelineSpec) XXX_Unmarshal

func (m *DataPipelineSpec) XXX_Unmarshal(b []byte) error

type DataPipelineStatus

type DataPipelineStatus struct {
	//ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,2,opt,name=lastUpdated"`
	// Last run is the last time a data pipeline run was created
	//+kubebuilder:validation:Optional
	LastRun catalog.LastRunStatus `json:"lastRun,omitempty" protobuf:"bytes,3,opt,name=lastRun"`
	// The time of the next schedule run
	//+kubebuilder:validation:Optional
	NextRun *metav1.Time `json:"nextRun,omitempty" protobuf:"bytes,4,opt,name=nextRun"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []DataPipelineCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,5,rep,name=conditions"`
}

DataPipelineStatus is the observed state of the DataPipeline object.

func (*DataPipelineStatus) DeepCopy

func (in *DataPipelineStatus) DeepCopy() *DataPipelineStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPipelineStatus.

func (*DataPipelineStatus) DeepCopyInto

func (in *DataPipelineStatus) DeepCopyInto(out *DataPipelineStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataPipelineStatus) Descriptor

func (*DataPipelineStatus) Descriptor() ([]byte, []int)

func (*DataPipelineStatus) Marshal

func (m *DataPipelineStatus) Marshal() (dAtA []byte, err error)

func (*DataPipelineStatus) MarshalTo

func (m *DataPipelineStatus) MarshalTo(dAtA []byte) (int, error)

func (*DataPipelineStatus) MarshalToSizedBuffer

func (m *DataPipelineStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataPipelineStatus) ProtoMessage

func (*DataPipelineStatus) ProtoMessage()

func (*DataPipelineStatus) Reset

func (m *DataPipelineStatus) Reset()

func (*DataPipelineStatus) Size

func (m *DataPipelineStatus) Size() (n int)

func (*DataPipelineStatus) String

func (this *DataPipelineStatus) String() string

func (*DataPipelineStatus) Unmarshal

func (m *DataPipelineStatus) Unmarshal(dAtA []byte) error

func (*DataPipelineStatus) XXX_DiscardUnknown

func (m *DataPipelineStatus) XXX_DiscardUnknown()

func (*DataPipelineStatus) XXX_Marshal

func (m *DataPipelineStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataPipelineStatus) XXX_Merge

func (m *DataPipelineStatus) XXX_Merge(src proto.Message)

func (*DataPipelineStatus) XXX_Size

func (m *DataPipelineStatus) XXX_Size() int

func (*DataPipelineStatus) XXX_Unmarshal

func (m *DataPipelineStatus) XXX_Unmarshal(b []byte) error

type DataProduct

type DataProduct struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              DataProductSpec   `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	Status            DataProductStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

DataProduct represents a single DataProduct namespace, which contains all non-infrastructure resources. Additionally, it specifies default parameters for resources to be created under the namespace, such as workload class and storage location +kubebuilder:object:root=true +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Task",type="string",JSONPath=".spec.task" +kubebuilder:printcolumn:name="Public",type="boolean",JSONPath=".spec.public" +kubebuilder:printcolumn:name="Errors",type="integer",JSONPath=".status.totalErrorAlerts" +kubebuilder:printcolumn:name="Data Sources",type="integer",JSONPath=".status.totalDatasources",priority=1 +kubebuilder:printcolumn:name="Datasets",type="integer",JSONPath=".status.totalDatasets",priority=1 +kubebuilder:printcolumn:name="Studies",type="integer",JSONPath=".status.totalStudies",priority=1 +kubebuilder:printcolumn:name="Models",type="integer",JSONPath=".status.totalModels",priority=1 +kubebuilder:printcolumn:name="Predictors",type="integer",JSONPath=".status.totalPredictors",priority=1 +kubebuilder:printcolumn:name="Apps",type="integer",JSONPath=".status.totalApps",priority=1 +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=dataproducts,shortName=prod,singular=dataproduct,shortName="prod",categories={data,modela,all}

func (*DataProduct) AddFinalizer

func (r *DataProduct) AddFinalizer()

func (*DataProduct) CreateNamespace

func (r *DataProduct) CreateNamespace() *v1.Namespace

func (*DataProduct) CreateOrUpdateCond

func (r *DataProduct) CreateOrUpdateCond(cond DataProductCondition)

Merge or update condition

func (*DataProduct) DataEngineer

func (r *DataProduct) DataEngineer() *rbacv1.Role

DataEngineer role

func (*DataProduct) DataLabler

func (r *DataProduct) DataLabler() *rbacv1.Role

Labeler role

func (*DataProduct) DataScientist

func (r *DataProduct) DataScientist() *rbacv1.Role

DataScientist role

func (*DataProduct) DeepCopy

func (in *DataProduct) DeepCopy() *DataProduct

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProduct.

func (*DataProduct) DeepCopyInto

func (in *DataProduct) DeepCopyInto(out *DataProduct)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataProduct) DeepCopyObject

func (in *DataProduct) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataProduct) Default

func (r *DataProduct) Default()

No defaults in this current release

func (*DataProduct) Descriptor

func (*DataProduct) Descriptor() ([]byte, []int)

func (*DataProduct) GetCond

func (*DataProduct) GetCondIdx

func (r *DataProduct) GetCondIdx(t DataProductConditionType) int

func (*DataProduct) GetRolesForAccount added in v0.4.547

func (in *DataProduct) GetRolesForAccount(account *infra.Account) []string

func (*DataProduct) HasFinalizer

func (r *DataProduct) HasFinalizer() bool

func (*DataProduct) IsClassification

func (r *DataProduct) IsClassification() bool

func (*DataProduct) IsReady

func (r *DataProduct) IsReady() bool

func (*DataProduct) IsSaved

func (p *DataProduct) IsSaved() bool

func (*DataProduct) MarkFailed

func (p *DataProduct) MarkFailed(err error)

func (*DataProduct) MarkReady added in v0.4.467

func (in *DataProduct) MarkReady()

func (*DataProduct) MarkSaved

func (version *DataProduct) MarkSaved()

func (*DataProduct) Marshal

func (m *DataProduct) Marshal() (dAtA []byte, err error)

func (*DataProduct) MarshalTo

func (m *DataProduct) MarshalTo(dAtA []byte) (int, error)

func (*DataProduct) MarshalToSizedBuffer

func (m *DataProduct) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataProduct) PredictionConsumer

func (r *DataProduct) PredictionConsumer() *rbacv1.Role

PredictionConsumer role

func (*DataProduct) PrefixArchiveUri

func (r *DataProduct) PrefixArchiveUri(uri string) string

func (*DataProduct) PrefixDepotUri

func (r *DataProduct) PrefixDepotUri(uri string) string

func (*DataProduct) PrefixLiveUri

func (r *DataProduct) PrefixLiveUri(uri string) string

func (*DataProduct) ProductAdmin

func (r *DataProduct) ProductAdmin() *rbacv1.Role

DataEnv product roles

func (*DataProduct) ProtoMessage

func (*DataProduct) ProtoMessage()

func (*DataProduct) RemoveFinalizer

func (r *DataProduct) RemoveFinalizer()

func (*DataProduct) ReportConsumer

func (r *DataProduct) ReportConsumer() *rbacv1.Role

ReportsConsumer role

func (*DataProduct) Reset

func (m *DataProduct) Reset()

func (*DataProduct) SetupWebhookWithManager

func (r *DataProduct) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DataProduct) Size

func (m *DataProduct) Size() (n int)

func (*DataProduct) String

func (this *DataProduct) String() string

func (*DataProduct) ToYamlFile

func (r *DataProduct) ToYamlFile() ([]byte, error)

func (*DataProduct) Unmarshal

func (m *DataProduct) Unmarshal(dAtA []byte) error

func (*DataProduct) UpdateBaselineVersion added in v0.4.671

func (in *DataProduct) UpdateBaselineVersion(versions DataProductVersionList)

func (*DataProduct) ValidateCreate

func (r *DataProduct) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*DataProduct) ValidateDelete

func (r *DataProduct) ValidateDelete() error

func (*DataProduct) ValidateUpdate

func (r *DataProduct) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*DataProduct) XXX_DiscardUnknown

func (m *DataProduct) XXX_DiscardUnknown()

func (*DataProduct) XXX_Marshal

func (m *DataProduct) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataProduct) XXX_Merge

func (m *DataProduct) XXX_Merge(src proto.Message)

func (*DataProduct) XXX_Size

func (m *DataProduct) XXX_Size() int

func (*DataProduct) XXX_Unmarshal

func (m *DataProduct) XXX_Unmarshal(b []byte) error

func (*DataProduct) YamlUri

func (r *DataProduct) YamlUri() string

type DataProductCondition

type DataProductCondition struct {
	// Type of condition.
	Type DataProductConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=DataProductConditionType"`
	// Status of the condition, one of True, False, Unknown
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,4,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition
	Reason string `json:"reason,omitempty" protobuf:"bytes,5,opt,name=reason"`
	// A human readable message indicating details about the transition
	Message string `json:"message,omitempty" protobuf:"bytes,6,opt,name=message"`
}

DataProductCondition describes the state of a DataProduct at a certain point

func (*DataProductCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductCondition.

func (*DataProductCondition) DeepCopyInto

func (in *DataProductCondition) DeepCopyInto(out *DataProductCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataProductCondition) Descriptor

func (*DataProductCondition) Descriptor() ([]byte, []int)

func (*DataProductCondition) Marshal

func (m *DataProductCondition) Marshal() (dAtA []byte, err error)

func (*DataProductCondition) MarshalTo

func (m *DataProductCondition) MarshalTo(dAtA []byte) (int, error)

func (*DataProductCondition) MarshalToSizedBuffer

func (m *DataProductCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataProductCondition) ProtoMessage

func (*DataProductCondition) ProtoMessage()

func (*DataProductCondition) Reset

func (m *DataProductCondition) Reset()

func (*DataProductCondition) Size

func (m *DataProductCondition) Size() (n int)

func (*DataProductCondition) String

func (this *DataProductCondition) String() string

func (*DataProductCondition) Unmarshal

func (m *DataProductCondition) Unmarshal(dAtA []byte) error

func (*DataProductCondition) XXX_DiscardUnknown

func (m *DataProductCondition) XXX_DiscardUnknown()

func (*DataProductCondition) XXX_Marshal

func (m *DataProductCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataProductCondition) XXX_Merge

func (m *DataProductCondition) XXX_Merge(src proto.Message)

func (*DataProductCondition) XXX_Size

func (m *DataProductCondition) XXX_Size() int

func (*DataProductCondition) XXX_Unmarshal

func (m *DataProductCondition) XXX_Unmarshal(b []byte) error

type DataProductConditionType

type DataProductConditionType string

Condition on the dataset

const (
	DataProductReady DataProductConditionType = "Ready"
	DataProductSaved DataProductConditionType = "Saved"
)

/ ProductRef Condition

type DataProductList

type DataProductList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []DataProduct `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true DataProductList contains a list of DataProducts

func (*DataProductList) DeepCopy

func (in *DataProductList) DeepCopy() *DataProductList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductList.

func (*DataProductList) DeepCopyInto

func (in *DataProductList) DeepCopyInto(out *DataProductList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataProductList) DeepCopyObject

func (in *DataProductList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataProductList) Descriptor

func (*DataProductList) Descriptor() ([]byte, []int)

func (*DataProductList) Marshal

func (m *DataProductList) Marshal() (dAtA []byte, err error)

func (*DataProductList) MarshalTo

func (m *DataProductList) MarshalTo(dAtA []byte) (int, error)

func (*DataProductList) MarshalToSizedBuffer

func (m *DataProductList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataProductList) ProtoMessage

func (*DataProductList) ProtoMessage()

func (*DataProductList) Reset

func (m *DataProductList) Reset()

func (*DataProductList) Size

func (m *DataProductList) Size() (n int)

func (*DataProductList) String

func (this *DataProductList) String() string

func (*DataProductList) Unmarshal

func (m *DataProductList) Unmarshal(dAtA []byte) error

func (*DataProductList) XXX_DiscardUnknown

func (m *DataProductList) XXX_DiscardUnknown()

func (*DataProductList) XXX_Marshal

func (m *DataProductList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataProductList) XXX_Merge

func (m *DataProductList) XXX_Merge(src proto.Message)

func (*DataProductList) XXX_Size

func (m *DataProductList) XXX_Size() int

func (*DataProductList) XXX_Unmarshal

func (m *DataProductList) XXX_Unmarshal(b []byte) error

type DataProductSpec

type DataProductSpec struct {
	// The name of the Account which created the object, which exists in the same tenant as the object
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// Indicates if the DataProduct is public and can be accessed without permissions
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Public *bool `json:"public,omitempty" protobuf:"varint,2,opt,name=public"`
	// The reference to the Tenant which owns the DataProduct. Defaults to `default-tenant`
	// +kubebuilder:validation:Optional
	TenantRef *v1.ObjectReference `json:"tenantRef,omitempty" protobuf:"bytes,3,opt,name=tenantRef"`
	// GitLocation is the default Git location where all child resources will be tracked as YAML
	// +kubebuilder:validation:Optional
	GitLocation GitLocation `json:"gitLocation,omitempty" protobuf:"bytes,4,opt,name=gitLocation"`
	// ImageLocation is the default Docker image repository where model images produced under the DataProduct will be stored
	// +kubebuilder:validation:Optional
	ImageLocation *ImageLocation `json:"imageLocation,omitempty" protobuf:"bytes,5,opt,name=imageLocation"`
	// The name of the Lab that will be used by default with all compute-requiring child resources
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:validation:Optional
	LabName *string `json:"labName" protobuf:"bytes,7,opt,name=labName"`
	// The name of the Serving Site which will be used by default with all Predictor resources
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:Optional
	ServingSiteName *string `json:"servingSiteName" protobuf:"bytes,8,opt,name=servingSiteName"`
	// Task specifies the default machine learning task of the product (classification, regression, etc.)
	// +kubebuilder:validation:Optional
	Task *catalog.MLTask `json:"task,omitempty" protobuf:"bytes,9,opt,name=task"`
	// Subtask specifies the default subtask relevant to the primary task (text classification, image object detection, etc.)
	// +kubebuilder:default:=none
	// +kubebuilder:validation:Optional
	SubTask *catalog.MLSubtask `json:"subtask,omitempty" protobuf:"bytes,10,opt,name=subtask"`
	// User-provided description of the object
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	// +kubebuilder:validation:MaxLength=512
	Description *string `json:"description,omitempty" protobuf:"bytes,11,opt,name=description"`
	// The default location for all artifacts created under the DataProduct. All data-producing resources will
	// use the VirtualBucket specified by the DataLocation by default
	// +kubebuilder:validation:Optional
	DataLocation DataLocation `json:"dataLocation,omitempty" protobuf:"bytes,12,opt,name=dataLocation"`
	// The default notification specification for all resources under the DataProduct
	// +kubebuilder:validation:Optional
	Notification catalog.NotificationSpec `json:"notification,omitempty" protobuf:"bytes,13,opt,name=notification"`
	// The default resource allocation for model training and data workloads that takes place under the DataProduct
	// +kubebuilder:validation:Optional
	DefaultTrainingResources catalog.ResourceSpec `json:"trainingResources,omitempty" protobuf:"bytes,14,opt,name=trainingResources"`
	// The default resource allocation for model serving workloads that takes place under the DataProduct
	// +kubebuilder:validation:Optional
	DefaultServingResources catalog.ResourceSpec `json:"servingResources,omitempty" protobuf:"bytes,15,opt,name=servingResources"`
	// Specifies how many times Jobs created under the DataProduct namespace will retry after failure
	// +kubebuilder:default:=3
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Maximum=10
	// +kubebuilder:validation:Optional
	RetriesOnFailure *int32 `json:"retriesOnFailure,omitempty" protobuf:"varint,16,opt,name=retriesOnFailure"`
	// KPIs define key performance indicators for the DataProduct (not functional as of the current release)
	//+kubebuilder:validation:Optional
	KPIs []KPI `json:"kpis,omitempty" protobuf:"bytes,17,rep,name=kpis"`
	// The name of the Account which should be responsible for events that occur under the DataProduct
	//+kubebuilder:validation:Optional
	OnCallAccountName string `json:"onCallAccountName,omitempty" protobuf:"bytes,18,opt,name=onCallAccountName"`
	// The default compilation specification for Study resources created under the DataProduct
	//+kubebuilder:validation:Optional
	Compilation catalog.CompilerSpec `json:"compilation,omitempty" protobuf:"bytes,19,opt,name=compilation"`
	// The clearance level required to access the DataProduct. Accounts which do not have a clearance level
	// greater than or equal to ClearanceLevel will be denied access to the DataProduct namespace
	// +kubebuilder:default:=unclassified
	// +kubebuilder:validation:Optional
	ClearanceLevel *catalog.SecurityClearanceLevel `json:"clearanceLevel,omitempty" protobuf:"bytes,20,opt,name=clearanceLevel"`
	// The default priority level assigned to Jobs created under the DataProduct namespace
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:="medium"
	DefaultPriority *catalog.PriorityLevel `json:"priority,omitempty" protobuf:"bytes,21,opt,name=priority"`
	// The color assigned to the product, for visual purposes only
	// +kubebuilder:default:="none"
	// +kubebuilder:validation:Optional
	Color *catalog.Color `json:"color,omitempty" protobuf:"bytes,22,opt,name=color"`
	// The Governance requirements (not functional as of the current release)
	// +kubebuilder:validation:Optional
	Governance GovernanceSpec `json:"governance,omitempty" protobuf:"bytes,23,opt,name=governance"`
	// Permissions denotes the specification that determines which Accounts
	// can access the DataProduct and what actions they can perform
	// +kubebuilder:validation:Optional
	Permissions catalog.PermissionsSpec `json:"permissions,omitempty" protobuf:"bytes,24,opt,name=permissions"`
}

DataProductSpec defines the desired state of the DataProduct

func (*DataProductSpec) DeepCopy

func (in *DataProductSpec) DeepCopy() *DataProductSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductSpec.

func (*DataProductSpec) DeepCopyInto

func (in *DataProductSpec) DeepCopyInto(out *DataProductSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataProductSpec) Descriptor

func (*DataProductSpec) Descriptor() ([]byte, []int)

func (*DataProductSpec) Marshal

func (m *DataProductSpec) Marshal() (dAtA []byte, err error)

func (*DataProductSpec) MarshalTo

func (m *DataProductSpec) MarshalTo(dAtA []byte) (int, error)

func (*DataProductSpec) MarshalToSizedBuffer

func (m *DataProductSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataProductSpec) ProtoMessage

func (*DataProductSpec) ProtoMessage()

func (*DataProductSpec) Reset

func (m *DataProductSpec) Reset()

func (*DataProductSpec) Size

func (m *DataProductSpec) Size() (n int)

func (*DataProductSpec) String

func (this *DataProductSpec) String() string

func (*DataProductSpec) Unmarshal

func (m *DataProductSpec) Unmarshal(dAtA []byte) error

func (*DataProductSpec) XXX_DiscardUnknown

func (m *DataProductSpec) XXX_DiscardUnknown()

func (*DataProductSpec) XXX_Marshal

func (m *DataProductSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataProductSpec) XXX_Merge

func (m *DataProductSpec) XXX_Merge(src proto.Message)

func (*DataProductSpec) XXX_Size

func (m *DataProductSpec) XXX_Size() int

func (*DataProductSpec) XXX_Unmarshal

func (m *DataProductSpec) XXX_Unmarshal(b []byte) error

type DataProductStatus

type DataProductStatus struct {
	// ObservedGeneration is the last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`
	// The last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,2,opt,name=lastUpdated"`
	// The number of DataSource resources that exist under the namespace
	//+kubebuilder:validation:Optional
	TotalDatasources int32 `json:"totalDatasources,omitempty" protobuf:"varint,3,opt,name=totalDatasources"`
	// The number of Dataset resources that exist under the namespace
	//+kubebuilder:validation:Optional
	TotalDatasets int32 `json:"totalDatasets,omitempty" protobuf:"varint,4,opt,name=totalDatasets"`
	// The number of DataPipeline resources that exist under the namespace
	//+kubebuilder:validation:Optional
	TotalDataPipelines int32 `json:"totalDataPipelines,omitempty" protobuf:"varint,5,opt,name=totalDataPipelines"`
	// The number of DataPipelineRun resources that exist under the namespace
	//+kubebuilder:validation:Optional
	TotalDataPipelineRuns int32 `json:"totalDataPipelineRuns,omitempty" protobuf:"varint,6,opt,name=totalDataPipelineRuns"`
	// The number of Study resources that exist under the namespace
	//+kubebuilder:validation:Optional
	TotalStudies int32 `json:"totalStudies,omitempty" protobuf:"varint,7,opt,name=totalStudies"`
	// The number of Model resources that exist under the namespace
	//+kubebuilder:validation:Optional
	TotalModels int32 `json:"totalModels,omitempty" protobuf:"varint,8,opt,name=totalModels"`
	// The number of ModelPipeline resources that exist under the namespace
	//+kubebuilder:validation:Optional
	TotalModelPipelines int32 `json:"totalModelPipelines,omitempty" protobuf:"varint,9,opt,name=totalModelPipelines"`
	// The number of ModelPipelineRun resources that exist under the namespace
	//+kubebuilder:validation:Optional
	TotalModelPipelineRuns int32 `json:"totalModelPipelineRuns,omitempty" protobuf:"varint,10,opt,name=totalModelPipelineRuns"`
	// The number of Predictor resources that exist under the namespace
	//+kubebuilder:validation:Optional
	TotalPredictors int32 `json:"totalPredictors,omitempty" protobuf:"varint,11,opt,name=totalPredictors"`
	// The number of ModelAutoBuilder resources that exist under the namespace
	//+kubebuilder:validation:Optional
	TotalBuilders int32 `json:"totalBuilders,omitempty" protobuf:"varint,12,opt,name=totalBuilders"`
	// The number of DataApp resources that exist under the namespace
	//+kubebuilder:validation:Optional
	TotalApps int32 `json:"totalApps,omitempty" protobuf:"varint,13,opt,name=totalApps"`
	// The number of Prediction resources that exist under the namespace
	//+kubebuilder:validation:Optional
	TotalPredictions int32 `json:"totalPredictions,omitempty" protobuf:"varint,14,opt,name=totalPredictions"`
	// The number of informative alerts produced under the namespace
	//+kubebuilder:validation:Optional
	TotalInfoAlerts int32 `json:"totalInfoAlerts,omitempty" protobuf:"varint,15,opt,name=totalInfoAlerts"`
	// The number of error alerts produced under the namespace
	//+kubebuilder:validation:Optional
	TotalErrorsAlerts int32 `json:"totalErrorAlerts,omitempty" protobuf:"varint,16,opt,name=totalErrorAlerts"`
	// In the case of failure, the DataProduct resource controller will set this field with a failure reason
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,17,opt,name=failureReason"`
	// In the case of failure, the DataProduct resource controller will set this field with a failure message
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,18,opt,name=failureMessage"`
	// The name of the DataProductVersion which currently represents the latest version of the DataProduct.
	// Newly-created resources will be instantiated with this version by default
	BaselineVersion *string `json:"baselineVersion,omitempty" protobuf:"bytes,19,opt,name=baselineVersion"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []DataProductCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,20,rep,name=conditions"`
}

DataProductStatus defines the observed state of DataProduct

func (*DataProductStatus) DeepCopy

func (in *DataProductStatus) DeepCopy() *DataProductStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductStatus.

func (*DataProductStatus) DeepCopyInto

func (in *DataProductStatus) DeepCopyInto(out *DataProductStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataProductStatus) Descriptor

func (*DataProductStatus) Descriptor() ([]byte, []int)

func (*DataProductStatus) Marshal

func (m *DataProductStatus) Marshal() (dAtA []byte, err error)

func (*DataProductStatus) MarshalTo

func (m *DataProductStatus) MarshalTo(dAtA []byte) (int, error)

func (*DataProductStatus) MarshalToSizedBuffer

func (m *DataProductStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataProductStatus) ProtoMessage

func (*DataProductStatus) ProtoMessage()

func (*DataProductStatus) Reset

func (m *DataProductStatus) Reset()

func (*DataProductStatus) Size

func (m *DataProductStatus) Size() (n int)

func (*DataProductStatus) String

func (this *DataProductStatus) String() string

func (*DataProductStatus) Unmarshal

func (m *DataProductStatus) Unmarshal(dAtA []byte) error

func (*DataProductStatus) XXX_DiscardUnknown

func (m *DataProductStatus) XXX_DiscardUnknown()

func (*DataProductStatus) XXX_Marshal

func (m *DataProductStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataProductStatus) XXX_Merge

func (m *DataProductStatus) XXX_Merge(src proto.Message)

func (*DataProductStatus) XXX_Size

func (m *DataProductStatus) XXX_Size() int

func (*DataProductStatus) XXX_Unmarshal

func (m *DataProductStatus) XXX_Unmarshal(b []byte) error

type DataProductVersion

type DataProductVersion struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              DataProductVersionSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status DataProductVersionStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

DataProductVersion represents a single version of a DataProduct, which should increment versions in response to changes in data +kubebuilder:object:root=true +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Product",type="string",JSONPath=".spec.productRef.name",description="" +kubebuilder:printcolumn:name="Base",type="boolean",JSONPath=".spec.baseline",description="" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=dataproductversions,shortName=dpv,singular=dataproductversion,categories={data,modela,all}

func ParseDataProductVersionYaml

func ParseDataProductVersionYaml(content []byte) (*DataProductVersion, error)

func (*DataProductVersion) AddFinalizer

func (version *DataProductVersion) AddFinalizer()

func (*DataProductVersion) Archived

func (version *DataProductVersion) Archived() bool

func (*DataProductVersion) CreateOrUpdateCond

func (version *DataProductVersion) CreateOrUpdateCond(cond DataProductVersionCondition)

Merge or update condition

func (*DataProductVersion) DeepCopy

func (in *DataProductVersion) DeepCopy() *DataProductVersion

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductVersion.

func (*DataProductVersion) DeepCopyInto

func (in *DataProductVersion) DeepCopyInto(out *DataProductVersion)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataProductVersion) DeepCopyObject

func (in *DataProductVersion) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataProductVersion) Default

func (dp *DataProductVersion) Default()

No defaults in this current release

func (*DataProductVersion) Descriptor

func (*DataProductVersion) Descriptor() ([]byte, []int)

func (*DataProductVersion) GetCond

func (*DataProductVersion) GetCondIdx

func (*DataProductVersion) HasFinalizer

func (version *DataProductVersion) HasFinalizer() bool

func (*DataProductVersion) IsReady

func (version *DataProductVersion) IsReady() bool

func (*DataProductVersion) MarkArchived

func (version *DataProductVersion) MarkArchived()

func (*DataProductVersion) MarkFailed

func (version *DataProductVersion) MarkFailed(err error)

func (*DataProductVersion) MarkReady

func (version *DataProductVersion) MarkReady()

func (*DataProductVersion) Marshal

func (m *DataProductVersion) Marshal() (dAtA []byte, err error)

func (*DataProductVersion) MarshalTo

func (m *DataProductVersion) MarshalTo(dAtA []byte) (int, error)

func (*DataProductVersion) MarshalToSizedBuffer

func (m *DataProductVersion) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataProductVersion) MessageUri

func (version *DataProductVersion) MessageUri() string

func (*DataProductVersion) ProtoMessage

func (*DataProductVersion) ProtoMessage()

func (*DataProductVersion) RemoveFinalizer

func (version *DataProductVersion) RemoveFinalizer()

func (*DataProductVersion) Reset

func (m *DataProductVersion) Reset()

func (*DataProductVersion) SetupWebhookWithManager

func (version *DataProductVersion) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DataProductVersion) Size

func (m *DataProductVersion) Size() (n int)

func (*DataProductVersion) String

func (this *DataProductVersion) String() string

func (*DataProductVersion) ToYamlFile

func (version *DataProductVersion) ToYamlFile() ([]byte, error)

func (*DataProductVersion) Unmarshal

func (m *DataProductVersion) Unmarshal(dAtA []byte) error

func (*DataProductVersion) ValidateCreate

func (version *DataProductVersion) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*DataProductVersion) ValidateDelete

func (version *DataProductVersion) ValidateDelete() error

func (*DataProductVersion) ValidateUpdate

func (version *DataProductVersion) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*DataProductVersion) XXX_DiscardUnknown

func (m *DataProductVersion) XXX_DiscardUnknown()

func (*DataProductVersion) XXX_Marshal

func (m *DataProductVersion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataProductVersion) XXX_Merge

func (m *DataProductVersion) XXX_Merge(src proto.Message)

func (*DataProductVersion) XXX_Size

func (m *DataProductVersion) XXX_Size() int

func (*DataProductVersion) XXX_Unmarshal

func (m *DataProductVersion) XXX_Unmarshal(b []byte) error

func (*DataProductVersion) YamlUri

func (version *DataProductVersion) YamlUri() string

type DataProductVersionCondition

type DataProductVersionCondition struct {
	// Type of condition.
	Type DataProductVersionConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=DataProductVersionConditionType"`
	// Status of the condition, one of True, False, Unknown.
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,4,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,5,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,6,opt,name=message"`
}

DataProductVersionCondition describes the state of a DataProductVersion at a certain point

func (*DataProductVersionCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductVersionCondition.

func (*DataProductVersionCondition) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataProductVersionCondition) Descriptor

func (*DataProductVersionCondition) Descriptor() ([]byte, []int)

func (*DataProductVersionCondition) Marshal

func (m *DataProductVersionCondition) Marshal() (dAtA []byte, err error)

func (*DataProductVersionCondition) MarshalTo

func (m *DataProductVersionCondition) MarshalTo(dAtA []byte) (int, error)

func (*DataProductVersionCondition) MarshalToSizedBuffer

func (m *DataProductVersionCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataProductVersionCondition) ProtoMessage

func (*DataProductVersionCondition) ProtoMessage()

func (*DataProductVersionCondition) Reset

func (m *DataProductVersionCondition) Reset()

func (*DataProductVersionCondition) Size

func (m *DataProductVersionCondition) Size() (n int)

func (*DataProductVersionCondition) String

func (this *DataProductVersionCondition) String() string

func (*DataProductVersionCondition) Unmarshal

func (m *DataProductVersionCondition) Unmarshal(dAtA []byte) error

func (*DataProductVersionCondition) XXX_DiscardUnknown

func (m *DataProductVersionCondition) XXX_DiscardUnknown()

func (*DataProductVersionCondition) XXX_Marshal

func (m *DataProductVersionCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataProductVersionCondition) XXX_Merge

func (m *DataProductVersionCondition) XXX_Merge(src proto.Message)

func (*DataProductVersionCondition) XXX_Size

func (m *DataProductVersionCondition) XXX_Size() int

func (*DataProductVersionCondition) XXX_Unmarshal

func (m *DataProductVersionCondition) XXX_Unmarshal(b []byte) error

type DataProductVersionConditionType

type DataProductVersionConditionType string

Condition on the dataset

const (
	DataProductVersionReady DataProductVersionConditionType = "Ready"
	DataProductVersionSaved DataProductVersionConditionType = "Saved"
)

/ DataProductVersion Condition

type DataProductVersionList

type DataProductVersionList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []DataProductVersion `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true DataProductVersionList contains a list of DataProductVersion

func (*DataProductVersionList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductVersionList.

func (*DataProductVersionList) DeepCopyInto

func (in *DataProductVersionList) DeepCopyInto(out *DataProductVersionList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataProductVersionList) DeepCopyObject

func (in *DataProductVersionList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataProductVersionList) Descriptor

func (*DataProductVersionList) Descriptor() ([]byte, []int)

func (*DataProductVersionList) Marshal

func (m *DataProductVersionList) Marshal() (dAtA []byte, err error)

func (*DataProductVersionList) MarshalTo

func (m *DataProductVersionList) MarshalTo(dAtA []byte) (int, error)

func (*DataProductVersionList) MarshalToSizedBuffer

func (m *DataProductVersionList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataProductVersionList) ProtoMessage

func (*DataProductVersionList) ProtoMessage()

func (*DataProductVersionList) Reset

func (m *DataProductVersionList) Reset()

func (*DataProductVersionList) Size

func (m *DataProductVersionList) Size() (n int)

func (*DataProductVersionList) String

func (this *DataProductVersionList) String() string

func (*DataProductVersionList) Unmarshal

func (m *DataProductVersionList) Unmarshal(dAtA []byte) error

func (*DataProductVersionList) XXX_DiscardUnknown

func (m *DataProductVersionList) XXX_DiscardUnknown()

func (*DataProductVersionList) XXX_Marshal

func (m *DataProductVersionList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataProductVersionList) XXX_Merge

func (m *DataProductVersionList) XXX_Merge(src proto.Message)

func (*DataProductVersionList) XXX_Size

func (m *DataProductVersionList) XXX_Size() int

func (*DataProductVersionList) XXX_Unmarshal

func (m *DataProductVersionList) XXX_Unmarshal(b []byte) error

type DataProductVersionSpec

type DataProductVersionSpec struct {
	// ProductRef contains the object reference to the DataProduct
	// resource which the DataProductVersion describes the version of
	// +kubebuilder:validation:Optional
	ProductRef v1.ObjectReference `json:"productRef,omitempty" protobuf:"bytes,1,opt,name=productRef"`
	// User-provided description of the object
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=512
	Description *string `json:"description,omitempty" protobuf:"bytes,2,opt,name=description"`
	// The name of the version which preceded the current version
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	PrevVersionName *string `json:"prevVersionName,omitempty" protobuf:"bytes,3,opt,name=prevVersionName"`
	// Indicates if the version is a baseline, and if so will cause Modela to garbage collect the resources from previous versions
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Baseline *bool `json:"baseline,omitempty" protobuf:"varint,4,opt,name=baseline"`
	// The name of the Account which created the object, which exists in the same tenant as the object
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,5,opt,name=owner"`
}

DataProductVersionSpec defines the desired state of a DataProductVersion

func (*DataProductVersionSpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductVersionSpec.

func (*DataProductVersionSpec) DeepCopyInto

func (in *DataProductVersionSpec) DeepCopyInto(out *DataProductVersionSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataProductVersionSpec) Descriptor

func (*DataProductVersionSpec) Descriptor() ([]byte, []int)

func (*DataProductVersionSpec) Marshal

func (m *DataProductVersionSpec) Marshal() (dAtA []byte, err error)

func (*DataProductVersionSpec) MarshalTo

func (m *DataProductVersionSpec) MarshalTo(dAtA []byte) (int, error)

func (*DataProductVersionSpec) MarshalToSizedBuffer

func (m *DataProductVersionSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataProductVersionSpec) ProtoMessage

func (*DataProductVersionSpec) ProtoMessage()

func (*DataProductVersionSpec) Reset

func (m *DataProductVersionSpec) Reset()

func (*DataProductVersionSpec) Size

func (m *DataProductVersionSpec) Size() (n int)

func (*DataProductVersionSpec) String

func (this *DataProductVersionSpec) String() string

func (*DataProductVersionSpec) Unmarshal

func (m *DataProductVersionSpec) Unmarshal(dAtA []byte) error

func (*DataProductVersionSpec) XXX_DiscardUnknown

func (m *DataProductVersionSpec) XXX_DiscardUnknown()

func (*DataProductVersionSpec) XXX_Marshal

func (m *DataProductVersionSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataProductVersionSpec) XXX_Merge

func (m *DataProductVersionSpec) XXX_Merge(src proto.Message)

func (*DataProductVersionSpec) XXX_Size

func (m *DataProductVersionSpec) XXX_Size() int

func (*DataProductVersionSpec) XXX_Unmarshal

func (m *DataProductVersionSpec) XXX_Unmarshal(b []byte) error

type DataProductVersionStatus

type DataProductVersionStatus struct {
	// ObservedGeneration is the last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`
	// The last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,2,opt,name=lastUpdated"`
	// In the case of failure, the Dataset resource controller will set this field with a failure reason
	// Borrowed from cluster api controller
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,3,opt,name=failureReason"`
	// In the case of failure, the Dataset resource controller will set this field with a failure message
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,4,opt,name=failureMessage"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []DataProductVersionCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,5,rep,name=conditions"`
}

DataProductVersionStatus defines the observed state of a DataProductVersion

func (*DataProductVersionStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProductVersionStatus.

func (*DataProductVersionStatus) DeepCopyInto

func (in *DataProductVersionStatus) DeepCopyInto(out *DataProductVersionStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataProductVersionStatus) Descriptor

func (*DataProductVersionStatus) Descriptor() ([]byte, []int)

func (*DataProductVersionStatus) Marshal

func (m *DataProductVersionStatus) Marshal() (dAtA []byte, err error)

func (*DataProductVersionStatus) MarshalTo

func (m *DataProductVersionStatus) MarshalTo(dAtA []byte) (int, error)

func (*DataProductVersionStatus) MarshalToSizedBuffer

func (m *DataProductVersionStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataProductVersionStatus) ProtoMessage

func (*DataProductVersionStatus) ProtoMessage()

func (*DataProductVersionStatus) Reset

func (m *DataProductVersionStatus) Reset()

func (*DataProductVersionStatus) Size

func (m *DataProductVersionStatus) Size() (n int)

func (*DataProductVersionStatus) String

func (this *DataProductVersionStatus) String() string

func (*DataProductVersionStatus) Unmarshal

func (m *DataProductVersionStatus) Unmarshal(dAtA []byte) error

func (*DataProductVersionStatus) XXX_DiscardUnknown

func (m *DataProductVersionStatus) XXX_DiscardUnknown()

func (*DataProductVersionStatus) XXX_Marshal

func (m *DataProductVersionStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataProductVersionStatus) XXX_Merge

func (m *DataProductVersionStatus) XXX_Merge(src proto.Message)

func (*DataProductVersionStatus) XXX_Size

func (m *DataProductVersionStatus) XXX_Size() int

func (*DataProductVersionStatus) XXX_Unmarshal

func (m *DataProductVersionStatus) XXX_Unmarshal(b []byte) error

type DataSource

type DataSource struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              DataSourceSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status DataSourceStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

DataSource defines the specification for the file format and column-level schema of data to be used within Modela +kubebuilder:object:root=true +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Format",type="string",JSONPath=".spec.fileType" +kubebuilder:printcolumn:name="Type",type="string",JSONPath=".spec.datasetType" +kubebuilder:printcolumn:name="Task",type="string",JSONPath=".spec.task" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:subresource:status +kubebuilder:resource:path=datasources,singular=datasource,shortName="dsrc",categories={data,modela,all}

func (*DataSource) ActiveColumns

func (in *DataSource) ActiveColumns() (string, error)

func (*DataSource) AddColumn

func (sc *DataSource) AddColumn(
	name string,
	dtype catalog.DataType,
	dformat catalog.DataDomain,
	Ignore bool,
	Target bool,
	Nullable bool)

func (*DataSource) AddFinalizer

func (sc *DataSource) AddFinalizer()

func (DataSource) CountActiveAttributes

func (sc DataSource) CountActiveAttributes() int

CountActiveAttributes counts the number of attributes that we should not ignore

func (DataSource) CountTargetAttributes

func (sc DataSource) CountTargetAttributes() int

Count the number of attributes that are mark as targets

func (*DataSource) CreateOrUpdateCond

func (sc *DataSource) CreateOrUpdateCond(cond DataSourceCondition)

Merge or update condition Merge or update condition

func (*DataSource) DeepCopy

func (in *DataSource) DeepCopy() *DataSource

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSource.

func (*DataSource) DeepCopyInto

func (in *DataSource) DeepCopyInto(out *DataSource)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataSource) DeepCopyObject

func (in *DataSource) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataSource) Default

func (datasource *DataSource) Default()

func (*DataSource) Descriptor

func (*DataSource) Descriptor() ([]byte, []int)

func (*DataSource) GetCond

func (*DataSource) GetCondIdx

func (sc *DataSource) GetCondIdx(t DataSourceConditionType) int

func (*DataSource) HasFinalizer

func (sc *DataSource) HasFinalizer() bool

func (*DataSource) HaveValidationRules added in v0.4.654

func (sc *DataSource) HaveValidationRules() bool

func (*DataSource) InferTask

func (sc *DataSource) InferTask() catalog.MLTask

func (*DataSource) IsReady

func (schema *DataSource) IsReady() bool

func (*DataSource) Key

func (sc *DataSource) Key() string

func (*DataSource) ManifestUri

func (schema *DataSource) ManifestUri() string

func (*DataSource) MarkFieldAsTarget

func (sc *DataSource) MarkFieldAsTarget(target string)

func (*DataSource) MarkLastFieldAsTarget

func (sc *DataSource) MarkLastFieldAsTarget()

func (*DataSource) MarkReady

func (sc *DataSource) MarkReady()

func (*DataSource) MarkSaved

func (sc *DataSource) MarkSaved()

func (*DataSource) Marshal

func (m *DataSource) Marshal() (dAtA []byte, err error)

func (*DataSource) MarshalTo

func (m *DataSource) MarshalTo(dAtA []byte) (int, error)

func (*DataSource) MarshalToSizedBuffer

func (m *DataSource) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataSource) Poplulate

func (sc *DataSource) Poplulate()

func (*DataSource) ProtoMessage

func (*DataSource) ProtoMessage()

func (*DataSource) RemoveFinalizer

func (sc *DataSource) RemoveFinalizer()

func (*DataSource) Reset

func (m *DataSource) Reset()

func (*DataSource) RootUri

func (schema *DataSource) RootUri() string

func (*DataSource) Saved

func (sc *DataSource) Saved() bool

func (*DataSource) SetupWebhookWithManager

func (sc *DataSource) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*DataSource) Size

func (m *DataSource) Size() (n int)

func (*DataSource) String

func (this *DataSource) String() string

func (*DataSource) ToYamlFile

func (schema *DataSource) ToYamlFile() ([]byte, error)

func (*DataSource) Unmarshal

func (m *DataSource) Unmarshal(dAtA []byte) error

func (DataSource) Validate

func (sc DataSource) Validate() (bool, []metav1.StatusCause)

func (*DataSource) ValidateCreate

func (ffile *DataSource) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*DataSource) ValidateDelete

func (ffile *DataSource) ValidateDelete() error

func (*DataSource) ValidateUpdate

func (ffile *DataSource) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*DataSource) XXX_DiscardUnknown

func (m *DataSource) XXX_DiscardUnknown()

func (*DataSource) XXX_Marshal

func (m *DataSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataSource) XXX_Merge

func (m *DataSource) XXX_Merge(src proto.Message)

func (*DataSource) XXX_Size

func (m *DataSource) XXX_Size() int

func (*DataSource) XXX_Unmarshal

func (m *DataSource) XXX_Unmarshal(b []byte) error

type DataSourceCondition

type DataSourceCondition struct {
	// Type of condition.
	Type DataSourceConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=DataSourceConditionType"`
	// Status of the condition, one of True, False, or Unknown
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,4,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,5,opt,name=reason"`
	// A human readable message indicating details about the transition
	Message string `json:"message,omitempty" protobuf:"bytes,6,opt,name=message"`
}

DataSourceCondition describes the state of a DataSource at a certain point

func (*DataSourceCondition) DeepCopy

func (in *DataSourceCondition) DeepCopy() *DataSourceCondition

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSourceCondition.

func (*DataSourceCondition) DeepCopyInto

func (in *DataSourceCondition) DeepCopyInto(out *DataSourceCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataSourceCondition) Descriptor

func (*DataSourceCondition) Descriptor() ([]byte, []int)

func (*DataSourceCondition) Marshal

func (m *DataSourceCondition) Marshal() (dAtA []byte, err error)

func (*DataSourceCondition) MarshalTo

func (m *DataSourceCondition) MarshalTo(dAtA []byte) (int, error)

func (*DataSourceCondition) MarshalToSizedBuffer

func (m *DataSourceCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataSourceCondition) ProtoMessage

func (*DataSourceCondition) ProtoMessage()

func (*DataSourceCondition) Reset

func (m *DataSourceCondition) Reset()

func (*DataSourceCondition) Size

func (m *DataSourceCondition) Size() (n int)

func (*DataSourceCondition) String

func (this *DataSourceCondition) String() string

func (*DataSourceCondition) Unmarshal

func (m *DataSourceCondition) Unmarshal(dAtA []byte) error

func (*DataSourceCondition) XXX_DiscardUnknown

func (m *DataSourceCondition) XXX_DiscardUnknown()

func (*DataSourceCondition) XXX_Marshal

func (m *DataSourceCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataSourceCondition) XXX_Merge

func (m *DataSourceCondition) XXX_Merge(src proto.Message)

func (*DataSourceCondition) XXX_Size

func (m *DataSourceCondition) XXX_Size() int

func (*DataSourceCondition) XXX_Unmarshal

func (m *DataSourceCondition) XXX_Unmarshal(b []byte) error

type DataSourceConditionType

type DataSourceConditionType string

Condition on the dataset

const (
	// Regular state of the store.
	DatasourceReady DataSourceConditionType = "Ready"
	DatasourceSaved DataSourceConditionType = "Saved"
)

/ Datastore Condition

type DataSourceList

type DataSourceList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []DataSource `json:"items" protobuf:"bytes,2,rep,name=items"`
}

DataSourceList contains a list of DataSource objects +kubebuilder:object:root=true

func (*DataSourceList) DeepCopy

func (in *DataSourceList) DeepCopy() *DataSourceList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSourceList.

func (*DataSourceList) DeepCopyInto

func (in *DataSourceList) DeepCopyInto(out *DataSourceList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataSourceList) DeepCopyObject

func (in *DataSourceList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DataSourceList) Descriptor

func (*DataSourceList) Descriptor() ([]byte, []int)

func (*DataSourceList) Marshal

func (m *DataSourceList) Marshal() (dAtA []byte, err error)

func (*DataSourceList) MarshalTo

func (m *DataSourceList) MarshalTo(dAtA []byte) (int, error)

func (*DataSourceList) MarshalToSizedBuffer

func (m *DataSourceList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataSourceList) ProtoMessage

func (*DataSourceList) ProtoMessage()

func (*DataSourceList) Reset

func (m *DataSourceList) Reset()

func (*DataSourceList) Size

func (m *DataSourceList) Size() (n int)

func (*DataSourceList) String

func (this *DataSourceList) String() string

func (*DataSourceList) Unmarshal

func (m *DataSourceList) Unmarshal(dAtA []byte) error

func (*DataSourceList) XXX_DiscardUnknown

func (m *DataSourceList) XXX_DiscardUnknown()

func (*DataSourceList) XXX_Marshal

func (m *DataSourceList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataSourceList) XXX_Merge

func (m *DataSourceList) XXX_Merge(src proto.Message)

func (*DataSourceList) XXX_Size

func (m *DataSourceList) XXX_Size() int

func (*DataSourceList) XXX_Unmarshal

func (m *DataSourceList) XXX_Unmarshal(b []byte) error

type DataSourceSpec

type DataSourceSpec struct {
	// The name of the Account which created the object, which exists in the same tenant as the object
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// The name of the DataProductVersion which describes the version of the resource
	// that exists in the same DataProduct namespace as the resource
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"`
	// User-provided description of the object
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=512
	// +kubebuilder:validation:Optional
	Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// The type of dataset; currently, the only supported type is `tabular`
	// +kubebuilder:default:="tabular"
	// +kubebuilder:validation:Optional
	DatasetType *catalog.DatasetType `json:"datasetType,omitempty" protobuf:"bytes,4,opt,name=datasetType"`
	// The schema which will be used during the ingestion process of any Dataset resources which specify the DataSource
	Schema Schema `json:"schema,omitempty" protobuf:"bytes,5,opt,name=schema"`
	// Flat file spec define the paramter needed to read a flat file.
	// +kubebuilder:validation:Optional
	FlatFile *FlatFileFormatSpec `json:"flatfile,omitempty" protobuf:"bytes,6,opt,name=flatfile"`
	// If true, this datasource is for labeled data.
	// +kubebuilder:validation:Optional
	Labeled *bool `json:"labeled,omitempty" protobuf:"bytes,7,opt,name=labeled"`
	// The specification for how incoming data should be sampled (i.e. how many rows should be used). Applicable
	// primarily for very large datasets
	Sample SampleSpec `json:"sample,omitempty" protobuf:"bytes,9,opt,name=sample"`
	// The default task for Dataset resources created from the Data Source. If null, the task type will default to the
	// the default task type of the Data Product which contains the Data Source
	// +kubebuilder:validation:Optional
	Task *catalog.MLTask `json:"task,omitempty" protobuf:"bytes,10,opt,name=task"`
	// The machine learning subtask relevant to the primary task (text classification, image object detection, etc.)
	// +kubebuilder:default:="none"
	// +kubebuilder:validation:Optional
	SubTask *catalog.MLSubtask `json:"subtask,omitempty" protobuf:"bytes,11,opt,name=subtask"`
	// List of relationships to other data sources
	// +kubebuilder:validation:Optional
	Relationships []RelationshipSpec `json:"relationships,omitempty" protobuf:"bytes,12,rep,name=relationships"`
	// Labeling specificies how to automatically label the dataset using positive and negative rules
	// +kubebuilder:validation:Optional
	Labeling LabelingSpec `json:"labeling,omitempty" protobuf:"bytes,13,rep,name=labeling"`
	// InferredFrom specifies the location of the data that was used to generate the schema of the Data Source
	// +kubebuilder:validation:Optional
	InferredFrom *DataLocation `json:"inferredFrom,omitempty" protobuf:"bytes,14,opt,name=inferredFrom"`
}

DataSourceSpec defines the desired state of the DataSource

func (*DataSourceSpec) DeepCopy

func (in *DataSourceSpec) DeepCopy() *DataSourceSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSourceSpec.

func (*DataSourceSpec) DeepCopyInto

func (in *DataSourceSpec) DeepCopyInto(out *DataSourceSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataSourceSpec) Descriptor

func (*DataSourceSpec) Descriptor() ([]byte, []int)

func (*DataSourceSpec) Marshal

func (m *DataSourceSpec) Marshal() (dAtA []byte, err error)

func (*DataSourceSpec) MarshalTo

func (m *DataSourceSpec) MarshalTo(dAtA []byte) (int, error)

func (*DataSourceSpec) MarshalToSizedBuffer

func (m *DataSourceSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataSourceSpec) ProtoMessage

func (*DataSourceSpec) ProtoMessage()

func (*DataSourceSpec) Reset

func (m *DataSourceSpec) Reset()

func (*DataSourceSpec) Size

func (m *DataSourceSpec) Size() (n int)

func (*DataSourceSpec) String

func (this *DataSourceSpec) String() string

func (*DataSourceSpec) Unmarshal

func (m *DataSourceSpec) Unmarshal(dAtA []byte) error

func (*DataSourceSpec) XXX_DiscardUnknown

func (m *DataSourceSpec) XXX_DiscardUnknown()

func (*DataSourceSpec) XXX_Marshal

func (m *DataSourceSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataSourceSpec) XXX_Merge

func (m *DataSourceSpec) XXX_Merge(src proto.Message)

func (*DataSourceSpec) XXX_Size

func (m *DataSourceSpec) XXX_Size() int

func (*DataSourceSpec) XXX_Unmarshal

func (m *DataSourceSpec) XXX_Unmarshal(b []byte) error

type DataSourceStatus

type DataSourceStatus struct {
	// The number of columns determined to be present in the DataSource's schema
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	Cols int32 `json:"cols,omitempty" protobuf:"varint,1,opt,name=cols"`
	// ObservedGeneration is the last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,2,opt,name=observedGeneration"`
	// Last time the DataSource was used to create a Dataset
	//+kubebuilder:validation:Optional
	LastDatasetCreated *metav1.Time `json:"lastDatasetCreated,omitempty" protobuf:"bytes,3,opt,name=lastDatasetCreated"`
	// The name of the last Dataset created using the DataSource
	//+kubebuilder:validation:Optional
	LastDatasetName string `json:"lastDatasetName,omitempty" protobuf:"bytes,4,opt,name=lastDatasetName"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,5,opt,name=lastUpdated"`
	// In the case of failure, the DataSource resource controller will set this field with a failure reason
	//+kubebuilder:validation:Optional
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,6,opt,name=failureReason"`
	// In the case of failure, the DataSource resource controller will set this field with a failure message
	//+kubebuilder:validation:Optional
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,7,opt,name=failureMessage"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []DataSourceCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,8,rep,name=conditions"`
}

DataSourceStatus defines the observed state of a DataSource object

func (*DataSourceStatus) DeepCopy

func (in *DataSourceStatus) DeepCopy() *DataSourceStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSourceStatus.

func (*DataSourceStatus) DeepCopyInto

func (in *DataSourceStatus) DeepCopyInto(out *DataSourceStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DataSourceStatus) Descriptor

func (*DataSourceStatus) Descriptor() ([]byte, []int)

func (*DataSourceStatus) Marshal

func (m *DataSourceStatus) Marshal() (dAtA []byte, err error)

func (*DataSourceStatus) MarshalTo

func (m *DataSourceStatus) MarshalTo(dAtA []byte) (int, error)

func (*DataSourceStatus) MarshalToSizedBuffer

func (m *DataSourceStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DataSourceStatus) ProtoMessage

func (*DataSourceStatus) ProtoMessage()

func (*DataSourceStatus) Reset

func (m *DataSourceStatus) Reset()

func (*DataSourceStatus) Size

func (m *DataSourceStatus) Size() (n int)

func (*DataSourceStatus) String

func (this *DataSourceStatus) String() string

func (*DataSourceStatus) Unmarshal

func (m *DataSourceStatus) Unmarshal(dAtA []byte) error

func (*DataSourceStatus) XXX_DiscardUnknown

func (m *DataSourceStatus) XXX_DiscardUnknown()

func (*DataSourceStatus) XXX_Marshal

func (m *DataSourceStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DataSourceStatus) XXX_Merge

func (m *DataSourceStatus) XXX_Merge(src proto.Message)

func (*DataSourceStatus) XXX_Size

func (m *DataSourceStatus) XXX_Size() int

func (*DataSourceStatus) XXX_Unmarshal

func (m *DataSourceStatus) XXX_Unmarshal(b []byte) error

type DatabaseServerType

type DatabaseServerType string

DatabaseServerType is the type of the database server +kubebuilder:validation:Enum="sqlserver";"mysql";"postgress";"sqlite";"oracle";"presto";"redshift";"apache-hive";"snowflake";"sybase";"vertica";"cockroach-db";"elasticsearch";"informix";"sap-hana";"teradata";"gcp-spanner";"apache-spark";"clickhouse";"greenplum";"couchbase";"exasol"

const (
	MsSqlServer    DatabaseServerType = "sqlserver"
	MySql          DatabaseServerType = "mysql"
	Postgres       DatabaseServerType = "postgres"
	SqlLite        DatabaseServerType = "sqlite"
	Oracle         DatabaseServerType = "oracle"
	Presto         DatabaseServerType = "presto"
	AmazonRedShift DatabaseServerType = "redshift"
	ApacheHive     DatabaseServerType = "apache-hive"
	Snowflake      DatabaseServerType = "snowflake"
	Sybase         DatabaseServerType = "sybase"
	Vertica        DatabaseServerType = "vertica"
	CockroachDB    DatabaseServerType = "cockroach-db"
	ElasticSearch  DatabaseServerType = "elasticsearch"
	Informix       DatabaseServerType = "informix"
	SAPHana        DatabaseServerType = "sap-hana"
	Teradata       DatabaseServerType = "teradata"
	GcpSpanner     DatabaseServerType = "gcp-spanner"
	ApacheSpark    DatabaseServerType = "apache-spark"
	ClickHouse     DatabaseServerType = "clickhouse"
	GreenPlum      DatabaseServerType = "greenplum"
	Couchbase      DatabaseServerType = "couchbase"
	Exasol         DatabaseServerType = "exasol"
)

type Dataset

type Dataset struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              DatasetSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status DatasetStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

Dataset represents a chunk of data that has been analyzed and stored inside a managed bucket +kubebuilder:object:root=true +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Status",type="string",JSONPath=".status.phase" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Data Source",type="string",JSONPath=".spec.datasourceName" +kubebuilder:printcolumn:name="Type",type="string",JSONPath=".spec.type" +kubebuilder:printcolumn:name="Rows",type="integer",JSONPath=".status.statistics.rows" +kubebuilder:printcolumn:name="Columns",type="integer",JSONPath=".status.statistics.cols" +kubebuilder:printcolumn:name="Size",type="integer",JSONPath=".status.statistics.fileSize" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=datasets,shortName=dset,singular=dataset,categories={data,modela,all}

func ParseDatasetYaml

func ParseDatasetYaml(content []byte) (*Dataset, error)

func (*Dataset) AddFinalizer

func (dataset *Dataset) AddFinalizer()

func (*Dataset) Archived

func (dataset *Dataset) Archived() bool

func (*Dataset) CompletionAlert added in v0.4.601

func (dataset *Dataset) CompletionAlert(tenantRef *v1.ObjectReference, notifierName *string) *infra.Alert

Generate a dataset completion alert

func (*Dataset) CreateOrUpdateCond

func (dataset *Dataset) CreateOrUpdateCond(cond DatasetCondition)

Merge or update condition

func (*Dataset) DeepCopy

func (in *Dataset) DeepCopy() *Dataset

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Dataset.

func (*Dataset) DeepCopyInto

func (in *Dataset) DeepCopyInto(out *Dataset)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Dataset) DeepCopyObject

func (in *Dataset) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Dataset) Default

func (dataset *Dataset) Default()

func (*Dataset) Deleted

func (dataset *Dataset) Deleted() bool

func (*Dataset) Descriptor

func (*Dataset) Descriptor() ([]byte, []int)

func (*Dataset) ErrorAlert added in v0.4.601

func (dataset *Dataset) ErrorAlert(tenantRef *v1.ObjectReference, notifierName *string, err error) *infra.Alert

func (*Dataset) Generated

func (dataset *Dataset) Generated() bool

func (*Dataset) GetCond

func (dataset *Dataset) GetCond(t DatasetConditionType) DatasetCondition

func (*Dataset) GetCondIdx

func (dataset *Dataset) GetCondIdx(t DatasetConditionType) int

func (*Dataset) HasFinalizer

func (dataset *Dataset) HasFinalizer() bool

func (*Dataset) Ingested

func (dataset *Dataset) Ingested() bool

func (*Dataset) IsFailed added in v0.4.675

func (dataset *Dataset) IsFailed() bool

func (*Dataset) IsInCond

func (dataset *Dataset) IsInCond(ct DatasetConditionType) bool

func (*Dataset) IsReady

func (dataset *Dataset) IsReady() bool

func (*Dataset) ManifestUri

func (dataset *Dataset) ManifestUri() string

func (*Dataset) MarkArchived

func (dataset *Dataset) MarkArchived()

func (*Dataset) MarkGenerated

func (dataset *Dataset) MarkGenerated()

func (*Dataset) MarkGeneratedFailed

func (dataset *Dataset) MarkGeneratedFailed(msg string)

func (*Dataset) MarkGenerting

func (dataset *Dataset) MarkGenerting()

func (*Dataset) MarkIngestFailed

func (dataset *Dataset) MarkIngestFailed(msg string)

func (*Dataset) MarkIngested

func (dataset *Dataset) MarkIngested()

func (*Dataset) MarkIngesting

func (dataset *Dataset) MarkIngesting()

func (*Dataset) MarkProfiled

func (dataset *Dataset) MarkProfiled(uri string)

func (*Dataset) MarkProfiledFailed

func (dataset *Dataset) MarkProfiledFailed(msg string)

func (*Dataset) MarkProfiling

func (dataset *Dataset) MarkProfiling()

func (*Dataset) MarkReady

func (dataset *Dataset) MarkReady()

func (*Dataset) MarkReportFailed

func (dataset *Dataset) MarkReportFailed(msg string)

func (*Dataset) MarkReported

func (dataset *Dataset) MarkReported()

func (*Dataset) MarkReporting

func (dataset *Dataset) MarkReporting()

func (*Dataset) MarkSaved

func (dataset *Dataset) MarkSaved()

func (*Dataset) MarkSkewColumns

func (dataset *Dataset) MarkSkewColumns()

func (*Dataset) MarkSnapshotFailed

func (dataset *Dataset) MarkSnapshotFailed(msg string)

func (*Dataset) MarkSnapshotSuccess

func (dataset *Dataset) MarkSnapshotSuccess()

func (*Dataset) MarkTakingSnapshot

func (dataset *Dataset) MarkTakingSnapshot()

func (*Dataset) MarkValidated

func (dataset *Dataset) MarkValidated()

func (*Dataset) MarkValidating

func (dataset *Dataset) MarkValidating()

func (*Dataset) MarkValidationFailed

func (dataset *Dataset) MarkValidationFailed(msg string)

func (*Dataset) Marshal

func (m *Dataset) Marshal() (dAtA []byte, err error)

func (*Dataset) MarshalTo

func (m *Dataset) MarshalTo(dAtA []byte) (int, error)

func (*Dataset) MarshalToSizedBuffer

func (m *Dataset) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*Dataset) OpName

func (r *Dataset) OpName() string

func (*Dataset) Populate

func (dataset *Dataset) Populate(name string)

func (*Dataset) PrintConditions

func (dataset *Dataset) PrintConditions()

func (*Dataset) ProfileUri

func (dataset *Dataset) ProfileUri() string

func (*Dataset) Profiled

func (dataset *Dataset) Profiled() bool

func (*Dataset) ProtoMessage

func (*Dataset) ProtoMessage()

func (*Dataset) RemoveFinalizer

func (dataset *Dataset) RemoveFinalizer()

func (*Dataset) ReportName

func (dataset *Dataset) ReportName() string

func (*Dataset) ReportUri

func (dataset *Dataset) ReportUri() string

func (*Dataset) Reported

func (dataset *Dataset) Reported() bool

func (*Dataset) Reset

func (m *Dataset) Reset()

func (*Dataset) RootUri

func (dataset *Dataset) RootUri() string

func (*Dataset) Saved

func (dataset *Dataset) Saved() bool

func (*Dataset) SetupWebhookWithManager

func (r *Dataset) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*Dataset) Size

func (m *Dataset) Size() (n int)

func (*Dataset) Snapshotted

func (dataset *Dataset) Snapshotted() bool

func (*Dataset) StatusString

func (dataset *Dataset) StatusString() string

func (*Dataset) String

func (this *Dataset) String() string

func (*Dataset) ToYamlFile

func (dataset *Dataset) ToYamlFile() ([]byte, error)

func (*Dataset) Unmarshal

func (m *Dataset) Unmarshal(dAtA []byte) error

func (*Dataset) UpdatePhaseFromConditions

func (dataset *Dataset) UpdatePhaseFromConditions()

update the phase of the study based on its condition

func (*Dataset) ValidateCreate

func (dataset *Dataset) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*Dataset) ValidateDelete

func (dataset *Dataset) ValidateDelete() error

func (*Dataset) ValidateUpdate

func (dataset *Dataset) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*Dataset) Validated

func (dataset *Dataset) Validated() bool

func (*Dataset) XXX_DiscardUnknown

func (m *Dataset) XXX_DiscardUnknown()

func (*Dataset) XXX_Marshal

func (m *Dataset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Dataset) XXX_Merge

func (m *Dataset) XXX_Merge(src proto.Message)

func (*Dataset) XXX_Size

func (m *Dataset) XXX_Size() int

func (*Dataset) XXX_Unmarshal

func (m *Dataset) XXX_Unmarshal(b []byte) error

type DatasetCondition

type DatasetCondition struct {
	Type DatasetConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=DatasetConditionType"`
	// Status of the condition, one of True, False, Unknown
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	// +kubebuilder:validation:Optional
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,4,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	// +kubebuilder:validation:Optional
	Reason string `json:"reason,omitempty" protobuf:"bytes,5,opt,name=reason"`
	// A human readable message indicating details about the transition.
	// +kubebuilder:validation:Optional
	Message string `json:"message,omitempty" protobuf:"bytes,6,opt,name=message"`
}

DatasetCondition describes the state of a dataset at a certain point

func (*DatasetCondition) DeepCopy

func (in *DatasetCondition) DeepCopy() *DatasetCondition

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetCondition.

func (*DatasetCondition) DeepCopyInto

func (in *DatasetCondition) DeepCopyInto(out *DatasetCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetCondition) Descriptor

func (*DatasetCondition) Descriptor() ([]byte, []int)

func (*DatasetCondition) Marshal

func (m *DatasetCondition) Marshal() (dAtA []byte, err error)

func (*DatasetCondition) MarshalTo

func (m *DatasetCondition) MarshalTo(dAtA []byte) (int, error)

func (*DatasetCondition) MarshalToSizedBuffer

func (m *DatasetCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetCondition) ProtoMessage

func (*DatasetCondition) ProtoMessage()

func (*DatasetCondition) Reset

func (m *DatasetCondition) Reset()

func (*DatasetCondition) Size

func (m *DatasetCondition) Size() (n int)

func (*DatasetCondition) String

func (this *DatasetCondition) String() string

func (*DatasetCondition) Unmarshal

func (m *DatasetCondition) Unmarshal(dAtA []byte) error

func (*DatasetCondition) XXX_DiscardUnknown

func (m *DatasetCondition) XXX_DiscardUnknown()

func (*DatasetCondition) XXX_Marshal

func (m *DatasetCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetCondition) XXX_Merge

func (m *DatasetCondition) XXX_Merge(src proto.Message)

func (*DatasetCondition) XXX_Size

func (m *DatasetCondition) XXX_Size() int

func (*DatasetCondition) XXX_Unmarshal

func (m *DatasetCondition) XXX_Unmarshal(b []byte) error

type DatasetConditionType

type DatasetConditionType string

Condition on the dataset

const (
	DatasetReported    DatasetConditionType = "Reported"
	DatasetValidated   DatasetConditionType = "Validated"
	DatasetSnapshotted DatasetConditionType = "Snapshotted"
	DatasetProfiled    DatasetConditionType = "Profiled"
	DatasetIngested    DatasetConditionType = "Ingested"
	DatasetGenerated   DatasetConditionType = "Generated"
	DatasetSaved       DatasetConditionType = "Saved"
	DatasetArchived    DatasetConditionType = "Archived"
	DatasetReady       DatasetConditionType = "Ready"
)

/ DatasetName Condition

type DatasetList

type DatasetList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []Dataset `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true DatasetList contains a list of Datasets

func (*DatasetList) DeepCopy

func (in *DatasetList) DeepCopy() *DatasetList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetList.

func (*DatasetList) DeepCopyInto

func (in *DatasetList) DeepCopyInto(out *DatasetList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetList) DeepCopyObject

func (in *DatasetList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*DatasetList) Descriptor

func (*DatasetList) Descriptor() ([]byte, []int)

func (*DatasetList) Marshal

func (m *DatasetList) Marshal() (dAtA []byte, err error)

func (*DatasetList) MarshalTo

func (m *DatasetList) MarshalTo(dAtA []byte) (int, error)

func (*DatasetList) MarshalToSizedBuffer

func (m *DatasetList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetList) ProtoMessage

func (*DatasetList) ProtoMessage()

func (*DatasetList) Reset

func (m *DatasetList) Reset()

func (*DatasetList) Size

func (m *DatasetList) Size() (n int)

func (*DatasetList) String

func (this *DatasetList) String() string

func (*DatasetList) Unmarshal

func (m *DatasetList) Unmarshal(dAtA []byte) error

func (*DatasetList) XXX_DiscardUnknown

func (m *DatasetList) XXX_DiscardUnknown()

func (*DatasetList) XXX_Marshal

func (m *DatasetList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetList) XXX_Merge

func (m *DatasetList) XXX_Merge(src proto.Message)

func (*DatasetList) XXX_Size

func (m *DatasetList) XXX_Size() int

func (*DatasetList) XXX_Unmarshal

func (m *DatasetList) XXX_Unmarshal(b []byte) error

type DatasetPhase

type DatasetPhase string
const (
	DatasetPhasePending           DatasetPhase = "Pending"    // when generating
	DatasetPhaseGenerating        DatasetPhase = "Generating" // when generating
	DatasetPhaseGenSuccess        DatasetPhase = "GenSuccess" // when synthetic gen success
	DatasetPhaseIngestRunning     DatasetPhase = "Ingesting"
	DatasetPhaseIngestSuccess     DatasetPhase = "Ingested"
	DatasetPhaseReportRunning     DatasetPhase = "Reporting"
	DatasetPhaseReportSuccess     DatasetPhase = "Reported"
	DatasetPhaseProfileRunning    DatasetPhase = "Profiling"
	DatasetPhaseProfileSuccess    DatasetPhase = "Profiled"
	DatasetPhaseValidationRunning DatasetPhase = "Validating"
	DatasetPhaseValidationSuccess DatasetPhase = "Validated"
	DatasetPhaseSnapshotRunning   DatasetPhase = "TakingSnapshot"
	DatasetPhaseSnapshotSuccess   DatasetPhase = "Snapshotted"
	DatasetPhaseFailed            DatasetPhase = "Failed"
	DatasetPhaseAborted           DatasetPhase = "Aborted"
	DatasetPhaseReady             DatasetPhase = "Ready"
)

type DatasetRole added in v0.4.914

type DatasetRole string

+kubebuilder:validation:Enum="unlabled";"predictions";"ground-truth";"training";

const (
	DatasetRoleUnlabeled   DatasetRole = "unlabeled"    // Unlabeled dataset
	DatasetRolePredictions DatasetRole = "predictions"  // Dataset contain prediction
	DatasetRoleGroundTruth DatasetRole = "ground-truth" // ground true dataset which was labeled
	DatasetRoleTraining    DatasetRole = "training"     // Regular Labeled dataset

)

type DatasetSpec

type DatasetSpec struct {
	// The name of the Account which created the object, which exists in the same tenant as the object
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// The name of the DataProductVersion which describes the version of the resource
	// that exists in the same DataProduct namespace as the resource
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:validation:Required
	// +required
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"`
	// The reference to the Data Source resource which exists in the same Data Product namespace as the object.
	// The Data Source must represent the columns and the task type of the Dataset. The validation rules associated with
	// the Data Source will be validated against the raw data of the Dataset once it is created
	// +kubebuilder:validation:Required
	// +kubebuilder:validation:MaxLength=63
	// +kubebuilder:default:=""
	// +required
	DataSourceName *string `json:"datasourceName,omitempty" protobuf:"bytes,3,opt,name=datasourceName"`
	// User-provided description of the object
	// +kubebuilder:validation:MaxLength=512
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Description *string `json:"description,omitempty" protobuf:"bytes,4,opt,name=description"`
	// User-provided display name of the object
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	DisplayName *string `json:"displayName,omitempty" protobuf:"bytes,5,opt,name=displayName"`
	// The dataset role
	// +kubebuilder:default:="training"
	// +kubebuilder:validation:Optional
	Role *DatasetRole `json:"role,omitempty" protobuf:"bytes,6,opt,name=role"`
	// Indicates if a PDF report containing the Dataset's profile should be generated
	// +kubebuilder:default:=true
	// +kubebuilder:validation:Optional
	Reported *bool `json:"reported,omitempty" protobuf:"varint,7,opt,name=reported"`
	// Indicates if the resource controller has created a snapshot of the data in the case that it is being read
	// directly from a database, and must be converted to a flat-file type such as a CSV as a result
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Snapshotted *bool `json:"snapshotted,omitempty" protobuf:"varint,8,opt,name=snapshotted"`
	// Indicates if the Dataset should be checked against the validation rules of its Data Source
	// +kubebuilder:default:=true
	// +kubebuilder:validation:Optional
	Validated *bool `json:"validate,omitempty" protobuf:"varint,9,opt,name=validate"`
	// Indicates if synthetic data should be generated (currently unimplemented)
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Synthetic *bool `json:"synthetic,omitempty" protobuf:"varint,11,opt,name=synthetic"`
	// If `Synthetic` is set to true, SyntheticRows indicates how many rows of synthetic data should be generated
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	SyntheticRows *int32 `json:"syntheticRows,omitempty" protobuf:"varint,12,opt,name=syntheticRows"`
	// Origin is the location of the data file or database query which holds the raw data of the Dataset. When the Dataset is
	// created, the resource controller will retrieve the data from the location, validate it against its Data Source
	// if applicable, and store it inside the `live` section of the Virtual Bucket resource specified by the location
	// +kubebuilder:validation:Optional
	Origin DataLocation `json:"origin,omitempty" protobuf:"bytes,13,opt,name=origin"`
	// Location is the final location of the data which was copied from the `Origin` location during the ingestion phase.
	// This field is set by the Dataset resource controller and should not be changed by any end-users
	// +kubebuilder:validation:Required
	// +required
	Location DataLocation `json:"location,omitempty" protobuf:"bytes,14,opt,name=location"`
	// Resources specifies the resource requirements which the Dataset will request when creating Jobs to analyze the data
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,15,opt,name=resources"`
	// The deadline in seconds for all Jobs created by the Dataset
	// +kubebuilder:default:=600
	// +kubebuilder:validation:Optional
	ActiveDeadlineSeconds *int64 `json:"activeDeadlineSeconds,omitempty" protobuf:"varint,16,opt,name=activeDeadlineSeconds"`
	// The type of dataset which was uploaded. `tabular` is the only supported type as of the current release
	// +kubebuilder:default:="tabular"
	// +kubebuilder:validation:Optional
	Type *catalog.DatasetType `json:"type,omitempty" protobuf:"bytes,17,opt,name=type"`
	// The specification for how the data should be sampled, if applicable. Sampling may improve dataset and model creation
	// time in the case of very large datasets that are being rapidly prototyped and iterated on
	// +kubebuilder:validation:Optional
	Sample SampleSpec `json:"sample,omitempty" protobuf:"bytes,18,opt,name=sample"`
	// If the dataset is syntactic, this is the syntactic spec
	Syntactic SyntacticSpec `json:"syntactic,omitempty" protobuf:"bytes,19,opt,name=syntactic"`
	// The machine learning task relevant to the Dataset. This field *must* be the same as the Data Source of the object
	// +kubebuilder:validation:Optional
	Task *catalog.MLTask `json:"task,omitempty" protobuf:"bytes,20,opt,name=task"`
	// The machine learning sub task relevant to the Dataset. This field *must* be the same as the Data Source of the object
	// +kubebuilder:default:=none
	// +kubebuilder:validation:Optional
	SubTask *catalog.MLSubtask `json:"subtask,omitempty" protobuf:"bytes,21,opt,name=subtask"`
	// The notification specification that determines which notifiers will receive Alerts generated by the object
	// +kubebuilder:validation:Optional
	Notification catalog.NotificationSpec `json:"notification,omitempty" protobuf:"bytes,22,opt,name=notification"`
	// The specification for how to find the correlations of the Dataset's features during the profiling phase.
	// Based on the specification, the data plane will compute the correlation between each feature and will store the highest-scoring
	// +kubebuilder:validation:Optional
	Correlation CorrelationSpec `json:"correlation,omitempty" protobuf:"bytes,23,opt,name=correlation"`
	// Indicates if the Dataset should be quickly processed.
	// If enabled, the validation, profiling, and reporting phases will be skipped.
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Fast *bool `json:"fast,omitempty" protobuf:"varint,24,opt,name=fast"`
	// The reference to the Lab under which Jobs created by the Dataset will be executed
	// +kubebuilder:validation:Optional
	LabRef v1.ObjectReference `json:"labRef,omitempty" protobuf:"bytes,25,opt,name=labRef"`
	// Prediction dataset ref is a reference to a prediction dataset ref (dataset that contain the predictions log).
	// +kubebuilder:validation:Optional
	PredictionDatasetRef v1.ObjectReference `json:"predictionDatasetRef,omitempty" protobuf:"bytes,26,opt,name=predictionDatasetRef"`
	// Used for prediction dataset, contain a reference to the predictor resource that created this dataset
	// +kubebuilder:validation:Optional
	PredictorRef v1.ObjectReference `json:"predictorRef,omitempty" protobuf:"bytes,27,opt,name=predictorRef"`
}

DatasetSpec defines the desired state of the Dataset

func (*DatasetSpec) DeepCopy

func (in *DatasetSpec) DeepCopy() *DatasetSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSpec.

func (*DatasetSpec) DeepCopyInto

func (in *DatasetSpec) DeepCopyInto(out *DatasetSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetSpec) Descriptor

func (*DatasetSpec) Descriptor() ([]byte, []int)

func (*DatasetSpec) Marshal

func (m *DatasetSpec) Marshal() (dAtA []byte, err error)

func (*DatasetSpec) MarshalTo

func (m *DatasetSpec) MarshalTo(dAtA []byte) (int, error)

func (*DatasetSpec) MarshalToSizedBuffer

func (m *DatasetSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetSpec) ProtoMessage

func (*DatasetSpec) ProtoMessage()

func (*DatasetSpec) Reset

func (m *DatasetSpec) Reset()

func (*DatasetSpec) Size

func (m *DatasetSpec) Size() (n int)

func (*DatasetSpec) String

func (this *DatasetSpec) String() string

func (*DatasetSpec) Unmarshal

func (m *DatasetSpec) Unmarshal(dAtA []byte) error

func (*DatasetSpec) XXX_DiscardUnknown

func (m *DatasetSpec) XXX_DiscardUnknown()

func (*DatasetSpec) XXX_Marshal

func (m *DatasetSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetSpec) XXX_Merge

func (m *DatasetSpec) XXX_Merge(src proto.Message)

func (*DatasetSpec) XXX_Size

func (m *DatasetSpec) XXX_Size() int

func (*DatasetSpec) XXX_Unmarshal

func (m *DatasetSpec) XXX_Unmarshal(b []byte) error

type DatasetStatistics

type DatasetStatistics struct {
	// Columns contains the collection of statistics for each feature
	// +kubebuilder:validation:Optional
	Columns []ColumnStatistics `json:"columns,omitempty" protobuf:"bytes,1,rep,name=columns"`
	// Number of rows observed from the data
	// +kubebuilder:validation:Optional
	Rows int32 `json:"rows,omitempty" protobuf:"varint,3,opt,name=rows"`
	// Number of columns observed from the data
	// +kubebuilder:validation:Optional
	Cols int32 `json:"cols,omitempty" protobuf:"varint,4,opt,name=cols"`
	// The file size of the data in bytes
	// +kubebuilder:validation:Optional
	FileSize int32 `json:"fileSize,omitempty" protobuf:"varint,5,opt,name=fileSize"`
	// The top correlations between all features and the target feature
	// +kubebuilder:validation:Optional
	CorrelationsWithTarget []Correlation `json:"correlationsWithTarget,omitempty" protobuf:"bytes,6,rep,name=correlationsWithTarget"`
	// The top correlations between features, computed per the CorrelationSpec of the parent Dataset
	// +kubebuilder:validation:Optional
	TopCorrelations []Correlation `json:"topCorrelations,omitempty" protobuf:"bytes,7,rep,name=topCorrelations"`
}

DatasetStatistics contains statistics about the Dataset's overall data, as well as every feature of the data. The data structure is populated with information during the `Profiling` phase of the parent Dataset.

func (*DatasetStatistics) DeepCopy

func (in *DatasetStatistics) DeepCopy() *DatasetStatistics

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetStatistics.

func (*DatasetStatistics) DeepCopyInto

func (in *DatasetStatistics) DeepCopyInto(out *DatasetStatistics)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetStatistics) Descriptor

func (*DatasetStatistics) Descriptor() ([]byte, []int)

func (*DatasetStatistics) Marshal

func (m *DatasetStatistics) Marshal() (dAtA []byte, err error)

func (*DatasetStatistics) MarshalTo

func (m *DatasetStatistics) MarshalTo(dAtA []byte) (int, error)

func (*DatasetStatistics) MarshalToSizedBuffer

func (m *DatasetStatistics) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetStatistics) ProtoMessage

func (*DatasetStatistics) ProtoMessage()

func (*DatasetStatistics) Reset

func (m *DatasetStatistics) Reset()

func (*DatasetStatistics) Size

func (m *DatasetStatistics) Size() (n int)

func (*DatasetStatistics) String

func (this *DatasetStatistics) String() string

func (*DatasetStatistics) Unmarshal

func (m *DatasetStatistics) Unmarshal(dAtA []byte) error

func (*DatasetStatistics) XXX_DiscardUnknown

func (m *DatasetStatistics) XXX_DiscardUnknown()

func (*DatasetStatistics) XXX_Marshal

func (m *DatasetStatistics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetStatistics) XXX_Merge

func (m *DatasetStatistics) XXX_Merge(src proto.Message)

func (*DatasetStatistics) XXX_Size

func (m *DatasetStatistics) XXX_Size() int

func (*DatasetStatistics) XXX_Unmarshal

func (m *DatasetStatistics) XXX_Unmarshal(b []byte) error

type DatasetStatus

type DatasetStatus struct {
	// Statistics for each column of the Dataset, which are generated during the profiling phase.
	// +kubebuilder:validation:Optional
	Statistics DatasetStatistics `json:"statistics,omitempty" protobuf:"bytes,1,opt,name=statistics"`
	// The current phase of the Dataset progress
	// +kubebuilder:default:="Pending"
	// +kubebuilder:validation:Optional
	Phase DatasetPhase `json:"phase,omitempty" protobuf:"bytes,2,opt,name=phase"`
	// Reference to the report object that was generated for the dataset, which exists in the same Data Product namespace
	// as the object
	// +kubebuilder:validation:Optional
	ReportName string `json:"reportName,omitempty" protobuf:"bytes,3,opt,name=reportName"`
	// The location of report generated during the reporting phase. This field is intended for internal use
	// +kubebuilder:validation:Optional
	ReportUri string `json:"reportUri,omitempty" protobuf:"bytes,4,opt,name=reportUri"`
	// The location of raw profile data. This field is intended for internal use
	// +kubebuilder:validation:Optional
	ProfileUri string `json:"profileUri" protobuf:"bytes,5,opt,name=profileUri"`
	// Whether or not the data was detected as imbalanced
	//+kubebuilder:validation:Optional
	Imbalanced bool `json:"imbalanced,omitempty" protobuf:"bytes,6,opt,name=imbalanced"`
	// The location of anomaly file. The file contain the list of rows that were marked as anomaly by an isolation forest.
	// algorithm
	// +kubebuilder:validation:Optional
	AnomaliesUri string `json:"anomaliesUri" protobuf:"bytes,7,opt,name=anomaliesUri"`
	// ObservedGeneration is the last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,8,opt,name=observedGeneration"`
	// List of validation results which are generated for every validation rule associated with the Dataset's Data Source
	//+kubebuilder:validation:Optional
	TestResults DatasetTestSuiteResult `json:"testResults,omitempty" protobuf:"bytes,9,rep,name=testResults"`
	// Last time the Dataset was used with a Study
	//+kubebuilder:validation:Optional
	LastStudyTime *metav1.Time `json:"lastStudyTime,omitempty" protobuf:"bytes,10,opt,name=lastStudyTime"`
	// In the case of failure, the Dataset resource controller will set this field with a failure reason
	//+kubebuilder:validation:Optional
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,12,opt,name=failureReason"`
	// In the case of failure, the Dataset resource controller will set this field with a failure message
	//+kubebuilder:validation:Optional
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,13,opt,name=failureMessage"`
	// The current progress of the Dataset, with a maximum of 100, that is associated with the current phase
	//+kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	Progress int32 `json:"progress,omitempty" protobuf:"varint,14,opt,name=progress"`
	// Sha256 signature of the raw data. Intended for internal use
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Hash string `json:"hash,omitempty" protobuf:"bytes,15,opt,name=hash"`
	// The log file specification that determines the location of all logs produced by the object
	Logs catalog.Logs `json:"logs" protobuf:"bytes,16,opt,name=logs"`
	// If the dataset is derived, the name of the Dataset that the object is derived from
	// +kubebuilder:validation:Optional
	DerivedFromDataset *string `json:"derivedFromDataset,omitempty" protobuf:"bytes,17,opt,name=derivedFromDataset"`
	// The last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,18,opt,name=lastUpdated"`
	// The Docker images used during the analysis of the Dataset
	// +kubebuilder:validation:Optional
	Images catalog.Images `json:"images,omitempty" protobuf:"bytes,19,opt,name=images"`
	// The time that the system started processing the Dataset, usually after the creation of the object
	// +kubebuilder:validation:Optional
	StartTime *metav1.Time `json:"startTime,omitempty" protobuf:"bytes,20,opt,name=startTime"`
	// The time that the Dataset finished processing, either due to completion or failure
	// +kubebuilder:validation:Optional
	EndTime *metav1.Time `json:"endTime,omitempty" protobuf:"bytes,21,opt,name=endTime"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []DatasetCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,22,rep,name=conditions"`
}

DatasetStatus defines the observed state of a Dataset object

func (*DatasetStatus) DeepCopy

func (in *DatasetStatus) DeepCopy() *DatasetStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetStatus.

func (*DatasetStatus) DeepCopyInto

func (in *DatasetStatus) DeepCopyInto(out *DatasetStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetStatus) Descriptor

func (*DatasetStatus) Descriptor() ([]byte, []int)

func (*DatasetStatus) Marshal

func (m *DatasetStatus) Marshal() (dAtA []byte, err error)

func (*DatasetStatus) MarshalTo

func (m *DatasetStatus) MarshalTo(dAtA []byte) (int, error)

func (*DatasetStatus) MarshalToSizedBuffer

func (m *DatasetStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetStatus) ProtoMessage

func (*DatasetStatus) ProtoMessage()

func (*DatasetStatus) Reset

func (m *DatasetStatus) Reset()

func (*DatasetStatus) Size

func (m *DatasetStatus) Size() (n int)

func (*DatasetStatus) String

func (this *DatasetStatus) String() string

func (*DatasetStatus) Unmarshal

func (m *DatasetStatus) Unmarshal(dAtA []byte) error

func (*DatasetStatus) XXX_DiscardUnknown

func (m *DatasetStatus) XXX_DiscardUnknown()

func (*DatasetStatus) XXX_Marshal

func (m *DatasetStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetStatus) XXX_Merge

func (m *DatasetStatus) XXX_Merge(src proto.Message)

func (*DatasetStatus) XXX_Size

func (m *DatasetStatus) XXX_Size() int

func (*DatasetStatus) XXX_Unmarshal

func (m *DatasetStatus) XXX_Unmarshal(b []byte) error

type DatasetTemplate

type DatasetTemplate struct {
	// Standard object's metadata.
	// More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
	// +kubebuilder:validation:Optional
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              DatasetSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
}

DatasetTemplate is used to generate new datasets

func (*DatasetTemplate) DeepCopy

func (in *DatasetTemplate) DeepCopy() *DatasetTemplate

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetTemplate.

func (*DatasetTemplate) DeepCopyInto

func (in *DatasetTemplate) DeepCopyInto(out *DatasetTemplate)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetTemplate) Descriptor

func (*DatasetTemplate) Descriptor() ([]byte, []int)

func (*DatasetTemplate) Marshal

func (m *DatasetTemplate) Marshal() (dAtA []byte, err error)

func (*DatasetTemplate) MarshalTo

func (m *DatasetTemplate) MarshalTo(dAtA []byte) (int, error)

func (*DatasetTemplate) MarshalToSizedBuffer

func (m *DatasetTemplate) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetTemplate) ProtoMessage

func (*DatasetTemplate) ProtoMessage()

func (*DatasetTemplate) Reset

func (m *DatasetTemplate) Reset()

func (*DatasetTemplate) Size

func (m *DatasetTemplate) Size() (n int)

func (*DatasetTemplate) String

func (this *DatasetTemplate) String() string

func (*DatasetTemplate) Unmarshal

func (m *DatasetTemplate) Unmarshal(dAtA []byte) error

func (*DatasetTemplate) XXX_DiscardUnknown

func (m *DatasetTemplate) XXX_DiscardUnknown()

func (*DatasetTemplate) XXX_Marshal

func (m *DatasetTemplate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetTemplate) XXX_Merge

func (m *DatasetTemplate) XXX_Merge(src proto.Message)

func (*DatasetTemplate) XXX_Size

func (m *DatasetTemplate) XXX_Size() int

func (*DatasetTemplate) XXX_Unmarshal

func (m *DatasetTemplate) XXX_Unmarshal(b []byte) error

type DatasetTestSuite added in v0.4.939

type DatasetTestSuite struct {
	// MultiDatasetTest contains validations for multiple datasets
	// +kubebuilder:validation:Optional
	MultiDatasetSuite catalog.TestSuite `json:"multiDatasetSuite,omitempty" protobuf:"bytes,1,opt,name=multiDatasetSuite"`
	// DatasetTests contains validations for the whole dataset
	// +kubebuilder:validation:Optional
	DatasetSuite catalog.TestSuite `json:"datasetSuite,omitempty" protobuf:"bytes,2,opt,name=datasetSuite"`
	// MultiColumnTests defines validations for multiple columns from the dataset
	// +kubebuilder:validation:Optional
	MultiColumnSuite catalog.TestSuite `json:"multiColumnSuite,omitempty" protobuf:"bytes,3,opt,name=multiColumnSuite"`
	// ColumnTests defines assertions for columns from the dataset
	// +kubebuilder:validation:Optional
	ColumnSuite catalog.TestSuite `json:"columnSuite,omitempty" protobuf:"bytes,4,opt,name=columnSuite"`
	// FileTests defines assertions for the contents of the data file
	// +kubebuilder:validation:Optional
	FileSuite catalog.TestSuite `json:"fileSuite,omitempty" protobuf:"bytes,5,opt,name=fileSuite"`
}

func (*DatasetTestSuite) DeepCopy added in v0.4.939

func (in *DatasetTestSuite) DeepCopy() *DatasetTestSuite

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetTestSuite.

func (*DatasetTestSuite) DeepCopyInto added in v0.4.939

func (in *DatasetTestSuite) DeepCopyInto(out *DatasetTestSuite)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetTestSuite) Descriptor added in v0.4.939

func (*DatasetTestSuite) Descriptor() ([]byte, []int)

func (*DatasetTestSuite) Marshal added in v0.4.939

func (m *DatasetTestSuite) Marshal() (dAtA []byte, err error)

func (*DatasetTestSuite) MarshalTo added in v0.4.939

func (m *DatasetTestSuite) MarshalTo(dAtA []byte) (int, error)

func (*DatasetTestSuite) MarshalToSizedBuffer added in v0.4.939

func (m *DatasetTestSuite) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetTestSuite) ProtoMessage added in v0.4.939

func (*DatasetTestSuite) ProtoMessage()

func (*DatasetTestSuite) Reset added in v0.4.939

func (m *DatasetTestSuite) Reset()

func (*DatasetTestSuite) Size added in v0.4.939

func (m *DatasetTestSuite) Size() (n int)

func (*DatasetTestSuite) String added in v0.4.939

func (this *DatasetTestSuite) String() string

func (*DatasetTestSuite) Unmarshal added in v0.4.939

func (m *DatasetTestSuite) Unmarshal(dAtA []byte) error

func (*DatasetTestSuite) XXX_DiscardUnknown added in v0.4.939

func (m *DatasetTestSuite) XXX_DiscardUnknown()

func (*DatasetTestSuite) XXX_Marshal added in v0.4.939

func (m *DatasetTestSuite) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetTestSuite) XXX_Merge added in v0.4.939

func (m *DatasetTestSuite) XXX_Merge(src proto.Message)

func (*DatasetTestSuite) XXX_Size added in v0.4.939

func (m *DatasetTestSuite) XXX_Size() int

func (*DatasetTestSuite) XXX_Unmarshal added in v0.4.939

func (m *DatasetTestSuite) XXX_Unmarshal(b []byte) error

type DatasetTestSuiteResult added in v0.4.948

type DatasetTestSuiteResult struct {
	// MultiDatasetTest contains validations for multiple datasets
	// +kubebuilder:validation:Optional
	MultiDatasetSuite catalog.TestSuiteResult `json:"multiDatasetSuite,omitempty" protobuf:"bytes,1,opt,name=multiDatasetSuite"`
	// DatasetTests contains validations for the whole dataset
	// +kubebuilder:validation:Optional
	DatasetSuite catalog.TestSuiteResult `json:"datasetSuite,omitempty" protobuf:"bytes,2,opt,name=datasetSuite"`
	// MultiColumnTests defines validations for multiple columns from the dataset
	// +kubebuilder:validation:Optional
	MultiColumnSuite catalog.TestSuiteResult `json:"multiColumnSuite,omitempty" protobuf:"bytes,3,opt,name=multiColumnSuite"`
	// ColumnTests defines assertions for columns from the dataset
	// +kubebuilder:validation:Optional
	ColumnSuite catalog.TestSuiteResult `json:"columnSuite,omitempty" protobuf:"bytes,4,opt,name=columnSuite"`
	// FileTests defines assertions for the contents of the data file
	// +kubebuilder:validation:Optional
	FileSuite catalog.TestSuiteResult `json:"fileSuite,omitempty" protobuf:"bytes,5,opt,name=fileSuite"`
}

Holds the dataset test suite result

func (*DatasetTestSuiteResult) DeepCopy added in v0.4.949

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetTestSuiteResult.

func (*DatasetTestSuiteResult) DeepCopyInto added in v0.4.949

func (in *DatasetTestSuiteResult) DeepCopyInto(out *DatasetTestSuiteResult)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*DatasetTestSuiteResult) Descriptor added in v0.4.948

func (*DatasetTestSuiteResult) Descriptor() ([]byte, []int)

func (*DatasetTestSuiteResult) Marshal added in v0.4.948

func (m *DatasetTestSuiteResult) Marshal() (dAtA []byte, err error)

func (*DatasetTestSuiteResult) MarshalTo added in v0.4.948

func (m *DatasetTestSuiteResult) MarshalTo(dAtA []byte) (int, error)

func (*DatasetTestSuiteResult) MarshalToSizedBuffer added in v0.4.948

func (m *DatasetTestSuiteResult) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*DatasetTestSuiteResult) ProtoMessage added in v0.4.948

func (*DatasetTestSuiteResult) ProtoMessage()

func (*DatasetTestSuiteResult) Reset added in v0.4.948

func (m *DatasetTestSuiteResult) Reset()

func (*DatasetTestSuiteResult) Size added in v0.4.948

func (m *DatasetTestSuiteResult) Size() (n int)

func (*DatasetTestSuiteResult) String added in v0.4.948

func (this *DatasetTestSuiteResult) String() string

func (*DatasetTestSuiteResult) Unmarshal added in v0.4.948

func (m *DatasetTestSuiteResult) Unmarshal(dAtA []byte) error

func (*DatasetTestSuiteResult) XXX_DiscardUnknown added in v0.4.948

func (m *DatasetTestSuiteResult) XXX_DiscardUnknown()

func (*DatasetTestSuiteResult) XXX_Marshal added in v0.4.948

func (m *DatasetTestSuiteResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*DatasetTestSuiteResult) XXX_Merge added in v0.4.948

func (m *DatasetTestSuiteResult) XXX_Merge(src proto.Message)

func (*DatasetTestSuiteResult) XXX_Size added in v0.4.948

func (m *DatasetTestSuiteResult) XXX_Size() int

func (*DatasetTestSuiteResult) XXX_Unmarshal added in v0.4.948

func (m *DatasetTestSuiteResult) XXX_Unmarshal(b []byte) error

type Delimiter

type Delimiter string

Delimiter specify char or group of char +kubebuilder:validation:Enum="crlf";"cr";"lf";"semicolon";"colon";"comma";"tab";"space";"pipe"

const (
	DelimiterCRLF      Delimiter = "crlf"
	DelimiterCR        Delimiter = "cr"
	DelimiterLF        Delimiter = "lf"
	DelimiterSemicolon Delimiter = "semicolon"
	DelimiterColon     Delimiter = "colon"
	DelimiterComma     Delimiter = "comma"
	DelimiterTab       Delimiter = "tab"
	DelimiterSpace     Delimiter = "space"
	DelimiterPipe      Delimiter = "pipe"
)

type Entity

type Entity struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              EntitySpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status EntityStatus `json:"status" protobuf:"bytes,3,opt,name=status"`
}

Entity represents a entity object +kubebuilder:object:root=true +kubebuilder:subresource:status +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=entities,singular=entity,shortName=et,categories={data,modela}

func ParseEntity

func ParseEntity(content string) (*Entity, error)

Parse an data

func (*Entity) AddFinalizer

func (entity *Entity) AddFinalizer()

func (*Entity) Age

func (entity *Entity) Age() string

func (*Entity) Archived

func (entity *Entity) Archived() bool

func (*Entity) CreateOrUpdateCond

func (entity *Entity) CreateOrUpdateCond(cond EntityCondition)

Merge or update condition

func (*Entity) DeepCopy

func (in *Entity) DeepCopy() *Entity

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Entity.

func (*Entity) DeepCopyInto

func (in *Entity) DeepCopyInto(out *Entity)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Entity) DeepCopyObject

func (in *Entity) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Entity) Default

func (e *Entity) Default()

No defaults in this current release

func (*Entity) Descriptor

func (*Entity) Descriptor() ([]byte, []int)

func (*Entity) GetCond

func (entity *Entity) GetCond(t EntityConditionType) EntityCondition

func (*Entity) GetCondIdx

func (entity *Entity) GetCondIdx(t EntityConditionType) int

func (*Entity) HasFinalizer

func (entity *Entity) HasFinalizer() bool

func (*Entity) IsGitObj

func (entity *Entity) IsGitObj() bool

func (*Entity) IsReady

func (entity *Entity) IsReady() bool

func (*Entity) Key

func (entity *Entity) Key() string

func (*Entity) LabelWithCommit

func (entity *Entity) LabelWithCommit(commit string, uname string, branch string)

func (*Entity) MarkArchived

func (entity *Entity) MarkArchived()

func (*Entity) MarkReady

func (entity *Entity) MarkReady()

func (*Entity) Marshal

func (m *Entity) Marshal() (dAtA []byte, err error)

func (*Entity) MarshalTo

func (m *Entity) MarshalTo(dAtA []byte) (int, error)

func (*Entity) MarshalToSizedBuffer

func (m *Entity) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*Entity) ProtoMessage

func (*Entity) ProtoMessage()

func (*Entity) RemoveFinalizer

func (entity *Entity) RemoveFinalizer()

func (*Entity) RepEntry

func (entity *Entity) RepEntry() (string, error)

func (*Entity) RepPath

func (entity *Entity) RepPath(root string) (string, error)

Return the on disk rep location

func (*Entity) Reset

func (m *Entity) Reset()

func (*Entity) SetChanged

func (entity *Entity) SetChanged()

func (*Entity) SetupWebhookWithManager

func (r *Entity) SetupWebhookWithManager(mgr ctrl.Manager) error

Set up the webhook with the manager.

func (*Entity) Size

func (m *Entity) Size() (n int)

func (*Entity) String

func (this *Entity) String() string

func (*Entity) ToYamlFile

func (entity *Entity) ToYamlFile() ([]byte, error)

func (*Entity) Unmarshal

func (m *Entity) Unmarshal(dAtA []byte) error

func (*Entity) ValidateCreate

func (entity *Entity) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*Entity) ValidateDelete

func (r *Entity) ValidateDelete() error

func (*Entity) ValidateUpdate

func (entity *Entity) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*Entity) XXX_DiscardUnknown

func (m *Entity) XXX_DiscardUnknown()

func (*Entity) XXX_Marshal

func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Entity) XXX_Merge

func (m *Entity) XXX_Merge(src proto.Message)

func (*Entity) XXX_Size

func (m *Entity) XXX_Size() int

func (*Entity) XXX_Unmarshal

func (m *Entity) XXX_Unmarshal(b []byte) error

type EntityCondition

type EntityCondition struct {
	// Type of account condition.
	Type EntityConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=EntityConditionType"`
	// Status of the condition, one of True, False, Unknown.
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,7,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}

EntityCondition describes the state of a deployment at a certain point.

func (*EntityCondition) DeepCopy

func (in *EntityCondition) DeepCopy() *EntityCondition

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EntityCondition.

func (*EntityCondition) DeepCopyInto

func (in *EntityCondition) DeepCopyInto(out *EntityCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*EntityCondition) Descriptor

func (*EntityCondition) Descriptor() ([]byte, []int)

func (*EntityCondition) Marshal

func (m *EntityCondition) Marshal() (dAtA []byte, err error)

func (*EntityCondition) MarshalTo

func (m *EntityCondition) MarshalTo(dAtA []byte) (int, error)

func (*EntityCondition) MarshalToSizedBuffer

func (m *EntityCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*EntityCondition) ProtoMessage

func (*EntityCondition) ProtoMessage()

func (*EntityCondition) Reset

func (m *EntityCondition) Reset()

func (*EntityCondition) Size

func (m *EntityCondition) Size() (n int)

func (*EntityCondition) String

func (this *EntityCondition) String() string

func (*EntityCondition) Unmarshal

func (m *EntityCondition) Unmarshal(dAtA []byte) error

func (*EntityCondition) XXX_DiscardUnknown

func (m *EntityCondition) XXX_DiscardUnknown()

func (*EntityCondition) XXX_Marshal

func (m *EntityCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*EntityCondition) XXX_Merge

func (m *EntityCondition) XXX_Merge(src proto.Message)

func (*EntityCondition) XXX_Size

func (m *EntityCondition) XXX_Size() int

func (*EntityCondition) XXX_Unmarshal

func (m *EntityCondition) XXX_Unmarshal(b []byte) error

type EntityConditionType

type EntityConditionType string

Condition on the entity

const (
	EntityReady EntityConditionType = "Ready"
	EntitySaved EntityConditionType = "Saved"
)

/ Entity Condition

type EntityList

type EntityList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Items           []Entity `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true EntityList contains a list of Entity

func (*EntityList) DeepCopy

func (in *EntityList) DeepCopy() *EntityList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EntityList.

func (*EntityList) DeepCopyInto

func (in *EntityList) DeepCopyInto(out *EntityList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*EntityList) DeepCopyObject

func (in *EntityList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*EntityList) Descriptor

func (*EntityList) Descriptor() ([]byte, []int)

func (*EntityList) Marshal

func (m *EntityList) Marshal() (dAtA []byte, err error)

func (*EntityList) MarshalTo

func (m *EntityList) MarshalTo(dAtA []byte) (int, error)

func (*EntityList) MarshalToSizedBuffer

func (m *EntityList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*EntityList) ProtoMessage

func (*EntityList) ProtoMessage()

func (*EntityList) Reset

func (m *EntityList) Reset()

func (*EntityList) Size

func (m *EntityList) Size() (n int)

func (*EntityList) String

func (this *EntityList) String() string

func (*EntityList) Unmarshal

func (m *EntityList) Unmarshal(dAtA []byte) error

func (*EntityList) XXX_DiscardUnknown

func (m *EntityList) XXX_DiscardUnknown()

func (*EntityList) XXX_Marshal

func (m *EntityList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*EntityList) XXX_Merge

func (m *EntityList) XXX_Merge(src proto.Message)

func (*EntityList) XXX_Size

func (m *EntityList) XXX_Size() int

func (*EntityList) XXX_Unmarshal

func (m *EntityList) XXX_Unmarshal(b []byte) error

type EntitySpec

type EntitySpec struct {
	// The product version of the entity
	// +kubebuilder:default:=""
	VersionName *string `json:"versionName" protobuf:"bytes,1,opt,name=versionName"`
	// Description of the entity
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	// +kubebuilder:validation:MaxLength=512
	Description *string `json:"description,omitempty" protobuf:"bytes,2,opt,name=description"`
	// Keys are the features that create a unique key to the entity.
	// +kubebuilder:validation:Optional
	Keys []string `json:"keys,omitempty" protobuf:"bytes,3,rep,name=keys"`
	// Owner of this Entity
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,4,opt,name=owner"`
}

EntitySpec contain the desired state of a Entity.

func (*EntitySpec) DeepCopy

func (in *EntitySpec) DeepCopy() *EntitySpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EntitySpec.

func (*EntitySpec) DeepCopyInto

func (in *EntitySpec) DeepCopyInto(out *EntitySpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*EntitySpec) Descriptor

func (*EntitySpec) Descriptor() ([]byte, []int)

func (*EntitySpec) Marshal

func (m *EntitySpec) Marshal() (dAtA []byte, err error)

func (*EntitySpec) MarshalTo

func (m *EntitySpec) MarshalTo(dAtA []byte) (int, error)

func (*EntitySpec) MarshalToSizedBuffer

func (m *EntitySpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*EntitySpec) ProtoMessage

func (*EntitySpec) ProtoMessage()

func (*EntitySpec) Reset

func (m *EntitySpec) Reset()

func (*EntitySpec) Size

func (m *EntitySpec) Size() (n int)

func (*EntitySpec) String

func (this *EntitySpec) String() string

func (*EntitySpec) Unmarshal

func (m *EntitySpec) Unmarshal(dAtA []byte) error

func (*EntitySpec) XXX_DiscardUnknown

func (m *EntitySpec) XXX_DiscardUnknown()

func (*EntitySpec) XXX_Marshal

func (m *EntitySpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*EntitySpec) XXX_Merge

func (m *EntitySpec) XXX_Merge(src proto.Message)

func (*EntitySpec) XXX_Size

func (m *EntitySpec) XXX_Size() int

func (*EntitySpec) XXX_Unmarshal

func (m *EntitySpec) XXX_Unmarshal(b []byte) error

type EntityStatus

type EntityStatus struct {
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`

	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,2,opt,name=lastUpdated"`

	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []EntityCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,3,rep,name=conditions"`
}

EntityStatus defines the observed state of Entity

func (*EntityStatus) DeepCopy

func (in *EntityStatus) DeepCopy() *EntityStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EntityStatus.

func (*EntityStatus) DeepCopyInto

func (in *EntityStatus) DeepCopyInto(out *EntityStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*EntityStatus) Descriptor

func (*EntityStatus) Descriptor() ([]byte, []int)

func (*EntityStatus) Marshal

func (m *EntityStatus) Marshal() (dAtA []byte, err error)

func (*EntityStatus) MarshalTo

func (m *EntityStatus) MarshalTo(dAtA []byte) (int, error)

func (*EntityStatus) MarshalToSizedBuffer

func (m *EntityStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*EntityStatus) ProtoMessage

func (*EntityStatus) ProtoMessage()

func (*EntityStatus) Reset

func (m *EntityStatus) Reset()

func (*EntityStatus) Size

func (m *EntityStatus) Size() (n int)

func (*EntityStatus) String

func (this *EntityStatus) String() string

func (*EntityStatus) Unmarshal

func (m *EntityStatus) Unmarshal(dAtA []byte) error

func (*EntityStatus) XXX_DiscardUnknown

func (m *EntityStatus) XXX_DiscardUnknown()

func (*EntityStatus) XXX_Marshal

func (m *EntityStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*EntityStatus) XXX_Merge

func (m *EntityStatus) XXX_Merge(src proto.Message)

func (*EntityStatus) XXX_Size

func (m *EntityStatus) XXX_Size() int

func (*EntityStatus) XXX_Unmarshal

func (m *EntityStatus) XXX_Unmarshal(b []byte) error

type EscapeChar

type EscapeChar string

+kubebuilder:validation:Enum="single-quote";"double-quote";"tilda";"none"

const (
	SingleEscapeChar EscapeChar = "single-quote"
	DoubleEscapeChar EscapeChar = "double-quote"
	TildaEscapeChar  EscapeChar = "tilda"
	NoneEscapeChar   EscapeChar = "none"
)

type ExcelNotebookSpec

type ExcelNotebookSpec struct {
	// Indicates if the excel reader should use the first sheet that contains data
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	FirstSheetWithData *bool `json:"firstSheetWithData,omitempty" protobuf:"varint,1,opt,name=firstSheetWithData"`
	// The name of the sheet that exists in the excel file to read data from
	// +kubebuilder:validation:Optional
	SheetName *string `json:"sheetName,omitempty" protobuf:"bytes,2,opt,name=sheetName"`
	// The index of the sheet in the excel file to read data from
	// +kubebuilder:validation:Optional
	SheetIndex *int32 `json:"sheetIndex,omitempty" protobuf:"varint,3,opt,name=sheetIndex"`
	// The position of the row that contains the column names (i.e. the header)
	// +kubebuilder:validation:Optional
	ColumnNamesRow *int32 `json:"columnNameRow,omitempty" protobuf:"varint,4,opt,name=columnNameRow"`
	// The specification for the bounds of the data
	// +kubebuilder:validation:Optional
	Data ExcelSheetArea `json:"data,omitempty" protobuf:"bytes,5,opt,name=data"`
}

ExcelNotebookSpec specifies the format of an excel file

func (*ExcelNotebookSpec) DeepCopy

func (in *ExcelNotebookSpec) DeepCopy() *ExcelNotebookSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExcelNotebookSpec.

func (*ExcelNotebookSpec) DeepCopyInto

func (in *ExcelNotebookSpec) DeepCopyInto(out *ExcelNotebookSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ExcelNotebookSpec) Descriptor

func (*ExcelNotebookSpec) Descriptor() ([]byte, []int)

func (*ExcelNotebookSpec) Marshal

func (m *ExcelNotebookSpec) Marshal() (dAtA []byte, err error)

func (*ExcelNotebookSpec) MarshalTo

func (m *ExcelNotebookSpec) MarshalTo(dAtA []byte) (int, error)

func (*ExcelNotebookSpec) MarshalToSizedBuffer

func (m *ExcelNotebookSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ExcelNotebookSpec) ProtoMessage

func (*ExcelNotebookSpec) ProtoMessage()

func (*ExcelNotebookSpec) Reset

func (m *ExcelNotebookSpec) Reset()

func (*ExcelNotebookSpec) Size

func (m *ExcelNotebookSpec) Size() (n int)

func (*ExcelNotebookSpec) String

func (this *ExcelNotebookSpec) String() string

func (*ExcelNotebookSpec) Unmarshal

func (m *ExcelNotebookSpec) Unmarshal(dAtA []byte) error

func (*ExcelNotebookSpec) XXX_DiscardUnknown

func (m *ExcelNotebookSpec) XXX_DiscardUnknown()

func (*ExcelNotebookSpec) XXX_Marshal

func (m *ExcelNotebookSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ExcelNotebookSpec) XXX_Merge

func (m *ExcelNotebookSpec) XXX_Merge(src proto.Message)

func (*ExcelNotebookSpec) XXX_Size

func (m *ExcelNotebookSpec) XXX_Size() int

func (*ExcelNotebookSpec) XXX_Unmarshal

func (m *ExcelNotebookSpec) XXX_Unmarshal(b []byte) error

type ExcelSheetArea

type ExcelSheetArea struct {
	// Indicates if the excel reader should read the entire sheet; if false, it will read only within the bounds
	// specified by the `To` and `From` fields of the ExcelSheetArea
	// +kubebuilder:default:=false
	EntireSheet *bool `json:"entireSheet,omitempty" protobuf:"varint,1,opt,name=entireSheet"`
	// If reading part of the excel sheet, start with the column in this position
	FromColumn *int32 `json:"fromColumn,omitempty" protobuf:"varint,2,opt,name=fromColumn"`
	// If reading part of the excel sheet, end with the column in this position
	ToColumn *int32 `json:"toColumn,omitempty" protobuf:"varint,3,opt,name=toColumn"`
	// If reading part of the excel sheet, start with the row in this position
	FromRow *int32 `json:"fromRow,omitempty" protobuf:"varint,4,opt,name=fromRow"`
	// If reading part of the excel sheet, end with the row in this position
	ToRow *int32 `json:"toRow,omitempty" protobuf:"varint,5,opt,name=toRow"`
}

ExcelSheetArea specifies the bounds of the data within an excel sheet

func (*ExcelSheetArea) DeepCopy

func (in *ExcelSheetArea) DeepCopy() *ExcelSheetArea

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExcelSheetArea.

func (*ExcelSheetArea) DeepCopyInto

func (in *ExcelSheetArea) DeepCopyInto(out *ExcelSheetArea)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ExcelSheetArea) Descriptor

func (*ExcelSheetArea) Descriptor() ([]byte, []int)

func (*ExcelSheetArea) Marshal

func (m *ExcelSheetArea) Marshal() (dAtA []byte, err error)

func (*ExcelSheetArea) MarshalTo

func (m *ExcelSheetArea) MarshalTo(dAtA []byte) (int, error)

func (*ExcelSheetArea) MarshalToSizedBuffer

func (m *ExcelSheetArea) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ExcelSheetArea) ProtoMessage

func (*ExcelSheetArea) ProtoMessage()

func (*ExcelSheetArea) Reset

func (m *ExcelSheetArea) Reset()

func (*ExcelSheetArea) Size

func (m *ExcelSheetArea) Size() (n int)

func (*ExcelSheetArea) String

func (this *ExcelSheetArea) String() string

func (*ExcelSheetArea) Unmarshal

func (m *ExcelSheetArea) Unmarshal(dAtA []byte) error

func (*ExcelSheetArea) XXX_DiscardUnknown

func (m *ExcelSheetArea) XXX_DiscardUnknown()

func (*ExcelSheetArea) XXX_Marshal

func (m *ExcelSheetArea) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ExcelSheetArea) XXX_Merge

func (m *ExcelSheetArea) XXX_Merge(src proto.Message)

func (*ExcelSheetArea) XXX_Size

func (m *ExcelSheetArea) XXX_Size() int

func (*ExcelSheetArea) XXX_Unmarshal

func (m *ExcelSheetArea) XXX_Unmarshal(b []byte) error

type Feature

type Feature struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              FeatureSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status FeatureStatus `json:"status" protobuf:"bytes,3,opt,name=status"`
}

Feature represent a single feature in the feature store. +kubebuilder:object:root=true +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=features,singular=feature,categories={data,modela} +kubebuilder:subresource:status

func ParseFeature

func ParseFeature(content string, user string, commit string) (*Feature, error)

Parse an data

func ParseFeatureYaml

func ParseFeatureYaml(content []byte) (*Feature, error)

func (*Feature) AddConfiditions

func (feature *Feature) AddConfiditions()

func (*Feature) AddFinalizer

func (feature *Feature) AddFinalizer()

func (*Feature) Age

func (feature *Feature) Age() string

func (*Feature) Archived

func (feature *Feature) Archived() bool

func (*Feature) CreateOrUpdateCond

func (feature *Feature) CreateOrUpdateCond(cond FeatureCondition)

Merge or update condition Merge or update condition

func (*Feature) DeepCopy

func (in *Feature) DeepCopy() *Feature

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Feature.

func (*Feature) DeepCopyInto

func (in *Feature) DeepCopyInto(out *Feature)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Feature) DeepCopyObject

func (in *Feature) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Feature) Default

func (feature *Feature) Default()

No defaults in this current release

func (*Feature) Descriptor

func (*Feature) Descriptor() ([]byte, []int)

func (*Feature) GetCond

func (feature *Feature) GetCond(t FeatureConditionType) FeatureCondition

func (*Feature) GetCondIdx

func (feature *Feature) GetCondIdx(t FeatureConditionType) int

func (*Feature) HasFinalizer

func (feature *Feature) HasFinalizer() bool

func (*Feature) IsGitObj

func (feature *Feature) IsGitObj() bool

func (*Feature) IsReady

func (feature *Feature) IsReady() bool

func (*Feature) Key

func (feature *Feature) Key() string

func (*Feature) LabelWithCommit

func (feature *Feature) LabelWithCommit(commit string, uname string, branch string)

func (*Feature) MarkArchived

func (feature *Feature) MarkArchived()

func (*Feature) MarkReady

func (feature *Feature) MarkReady()

func (*Feature) Marshal

func (m *Feature) Marshal() (dAtA []byte, err error)

func (*Feature) MarshalTo

func (m *Feature) MarshalTo(dAtA []byte) (int, error)

func (*Feature) MarshalToSizedBuffer

func (m *Feature) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*Feature) ProtoMessage

func (*Feature) ProtoMessage()

func (*Feature) RemoveFinalizer

func (feature *Feature) RemoveFinalizer()

func (*Feature) RepEntry

func (feature *Feature) RepEntry() (string, error)

func (*Feature) RepPath

func (feature *Feature) RepPath(root string) (string, error)

Return the on disk rep location

func (*Feature) Reset

func (m *Feature) Reset()

func (*Feature) SetChanged

func (feature *Feature) SetChanged()

func (*Feature) SetupWebhookWithManager

func (in *Feature) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*Feature) Size

func (m *Feature) Size() (n int)

func (*Feature) String

func (this *Feature) String() string

func (*Feature) ToYamlFile

func (feature *Feature) ToYamlFile() ([]byte, error)

func (*Feature) Unmarshal

func (m *Feature) Unmarshal(dAtA []byte) error

func (*Feature) ValidateCreate

func (feature *Feature) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*Feature) ValidateDelete

func (r *Feature) ValidateDelete() error

func (*Feature) ValidateUpdate

func (feature *Feature) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*Feature) XXX_DiscardUnknown

func (m *Feature) XXX_DiscardUnknown()

func (*Feature) XXX_Marshal

func (m *Feature) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Feature) XXX_Merge

func (m *Feature) XXX_Merge(src proto.Message)

func (*Feature) XXX_Size

func (m *Feature) XXX_Size() int

func (*Feature) XXX_Unmarshal

func (m *Feature) XXX_Unmarshal(b []byte) error

type FeatureAggrSpec

type FeatureAggrSpec struct {
	// The column
	Column string `json:"column,omitempty" protobuf:"bytes,1,opt,name=column"`
	// The aggregation function
	Functions []string `json:"functions,omitempty" protobuf:"bytes,2,rep,name=functions"`
	// List of windows functions
	Windows []string `json:"windows,omitempty" protobuf:"bytes,3,rep,name=windows"`
}

func (*FeatureAggrSpec) DeepCopy

func (in *FeatureAggrSpec) DeepCopy() *FeatureAggrSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureAggrSpec.

func (*FeatureAggrSpec) DeepCopyInto

func (in *FeatureAggrSpec) DeepCopyInto(out *FeatureAggrSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureAggrSpec) Descriptor

func (*FeatureAggrSpec) Descriptor() ([]byte, []int)

func (*FeatureAggrSpec) Marshal

func (m *FeatureAggrSpec) Marshal() (dAtA []byte, err error)

func (*FeatureAggrSpec) MarshalTo

func (m *FeatureAggrSpec) MarshalTo(dAtA []byte) (int, error)

func (*FeatureAggrSpec) MarshalToSizedBuffer

func (m *FeatureAggrSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureAggrSpec) ProtoMessage

func (*FeatureAggrSpec) ProtoMessage()

func (*FeatureAggrSpec) Reset

func (m *FeatureAggrSpec) Reset()

func (*FeatureAggrSpec) Size

func (m *FeatureAggrSpec) Size() (n int)

func (*FeatureAggrSpec) String

func (this *FeatureAggrSpec) String() string

func (*FeatureAggrSpec) Unmarshal

func (m *FeatureAggrSpec) Unmarshal(dAtA []byte) error

func (*FeatureAggrSpec) XXX_DiscardUnknown

func (m *FeatureAggrSpec) XXX_DiscardUnknown()

func (*FeatureAggrSpec) XXX_Marshal

func (m *FeatureAggrSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureAggrSpec) XXX_Merge

func (m *FeatureAggrSpec) XXX_Merge(src proto.Message)

func (*FeatureAggrSpec) XXX_Size

func (m *FeatureAggrSpec) XXX_Size() int

func (*FeatureAggrSpec) XXX_Unmarshal

func (m *FeatureAggrSpec) XXX_Unmarshal(b []byte) error

type FeatureCondition

type FeatureCondition struct {
	// Type of account condition.
	Type FeatureConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=FeatureConditionType"`
	// Status of the condition, one of True, False, Unknown
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,7,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}

FeatureCondition describes the state of a deployment at a certain point.

func (*FeatureCondition) DeepCopy

func (in *FeatureCondition) DeepCopy() *FeatureCondition

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureCondition.

func (*FeatureCondition) DeepCopyInto

func (in *FeatureCondition) DeepCopyInto(out *FeatureCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureCondition) Descriptor

func (*FeatureCondition) Descriptor() ([]byte, []int)

func (*FeatureCondition) Marshal

func (m *FeatureCondition) Marshal() (dAtA []byte, err error)

func (*FeatureCondition) MarshalTo

func (m *FeatureCondition) MarshalTo(dAtA []byte) (int, error)

func (*FeatureCondition) MarshalToSizedBuffer

func (m *FeatureCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureCondition) ProtoMessage

func (*FeatureCondition) ProtoMessage()

func (*FeatureCondition) Reset

func (m *FeatureCondition) Reset()

func (*FeatureCondition) Size

func (m *FeatureCondition) Size() (n int)

func (*FeatureCondition) String

func (this *FeatureCondition) String() string

func (*FeatureCondition) Unmarshal

func (m *FeatureCondition) Unmarshal(dAtA []byte) error

func (*FeatureCondition) XXX_DiscardUnknown

func (m *FeatureCondition) XXX_DiscardUnknown()

func (*FeatureCondition) XXX_Marshal

func (m *FeatureCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureCondition) XXX_Merge

func (m *FeatureCondition) XXX_Merge(src proto.Message)

func (*FeatureCondition) XXX_Size

func (m *FeatureCondition) XXX_Size() int

func (*FeatureCondition) XXX_Unmarshal

func (m *FeatureCondition) XXX_Unmarshal(b []byte) error

type FeatureConditionType

type FeatureConditionType string

FeatureConditionType is the condition of the feature

const (
	FeatureReady FeatureConditionType = "Ready"
	FeatureSaved FeatureConditionType = "Saved"
)

/ Feature Condition

type FeatureHistogram

type FeatureHistogram struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              FeatureHistogramSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status FeatureHistogramStatus `json:"status" protobuf:"bytes,3,opt,name=status"`
}

FeatureHistogram represent a single feature in the feature store. +kubebuilder:object:root=true +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Column",type="string",JSONPath=".spec.column" +kubebuilder:printcolumn:name="Dataset",type="string",JSONPath=".spec.dataset" +kubebuilder:printcolumn:name="Bins",type="number",JSONPath=".spec.bins" +kubebuilder:printcolumn:name="Missing",type="number",JSONPath=".status.missing" +kubebuilder:printcolumn:name="Invalid",type="number",JSONPath=".status.invalid" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=featurehistograms,singular=featurehistogram,categories={data,modela} +kubebuilder:subresource:status

func ParseFeatureHistogram

func ParseFeatureHistogram(content string, user string, commit string) (*FeatureHistogram, error)

Parse an data

func ParseFeatureHistogramYaml

func ParseFeatureHistogramYaml(content []byte) (*FeatureHistogram, error)

func (*FeatureHistogram) AddConditions added in v0.4.514

func (feature *FeatureHistogram) AddConditions()

func (*FeatureHistogram) AddFinalizer

func (feature *FeatureHistogram) AddFinalizer()

func (*FeatureHistogram) Age

func (feature *FeatureHistogram) Age() string

func (*FeatureHistogram) Archived

func (feature *FeatureHistogram) Archived() bool

func (*FeatureHistogram) CreateOrUpdateCond

func (feature *FeatureHistogram) CreateOrUpdateCond(cond FeatureHistogramCondition)

Merge or update condition Merge or update condition

func (*FeatureHistogram) DeepCopy

func (in *FeatureHistogram) DeepCopy() *FeatureHistogram

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureHistogram.

func (*FeatureHistogram) DeepCopyInto

func (in *FeatureHistogram) DeepCopyInto(out *FeatureHistogram)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureHistogram) DeepCopyObject

func (in *FeatureHistogram) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*FeatureHistogram) Default

func (feature *FeatureHistogram) Default()

No defaults in this current release

func (*FeatureHistogram) Descriptor

func (*FeatureHistogram) Descriptor() ([]byte, []int)

func (*FeatureHistogram) GetCond

func (*FeatureHistogram) GetCondIdx

func (feature *FeatureHistogram) GetCondIdx(t FeatureHistogramConditionType) int

func (*FeatureHistogram) HasFinalizer

func (feature *FeatureHistogram) HasFinalizer() bool

func (*FeatureHistogram) IsGitObj

func (feature *FeatureHistogram) IsGitObj() bool

func (*FeatureHistogram) IsReady

func (feature *FeatureHistogram) IsReady() bool

func (*FeatureHistogram) Key

func (feature *FeatureHistogram) Key() string

func (*FeatureHistogram) LabelWithCommit

func (feature *FeatureHistogram) LabelWithCommit(commit string, uname string, branch string)

func (*FeatureHistogram) MarkArchived

func (feature *FeatureHistogram) MarkArchived()

func (*FeatureHistogram) MarkReady

func (feature *FeatureHistogram) MarkReady()

func (*FeatureHistogram) Marshal

func (m *FeatureHistogram) Marshal() (dAtA []byte, err error)

func (*FeatureHistogram) MarshalTo

func (m *FeatureHistogram) MarshalTo(dAtA []byte) (int, error)

func (*FeatureHistogram) MarshalToSizedBuffer

func (m *FeatureHistogram) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureHistogram) ProtoMessage

func (*FeatureHistogram) ProtoMessage()

func (*FeatureHistogram) RemoveFinalizer

func (feature *FeatureHistogram) RemoveFinalizer()

func (*FeatureHistogram) RepEntry

func (feature *FeatureHistogram) RepEntry() (string, error)

func (*FeatureHistogram) RepPath

func (feature *FeatureHistogram) RepPath(root string) (string, error)

Return the on disk rep location

func (*FeatureHistogram) Reset

func (m *FeatureHistogram) Reset()

func (*FeatureHistogram) SetChanged

func (feature *FeatureHistogram) SetChanged()

func (*FeatureHistogram) SetupWebhookWithManager

func (in *FeatureHistogram) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*FeatureHistogram) Size

func (m *FeatureHistogram) Size() (n int)

func (*FeatureHistogram) String

func (this *FeatureHistogram) String() string

func (*FeatureHistogram) ToYamlFile

func (feature *FeatureHistogram) ToYamlFile() ([]byte, error)

func (*FeatureHistogram) Unmarshal

func (m *FeatureHistogram) Unmarshal(dAtA []byte) error

func (*FeatureHistogram) ValidateCreate

func (feature *FeatureHistogram) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*FeatureHistogram) ValidateDelete

func (r *FeatureHistogram) ValidateDelete() error

func (*FeatureHistogram) ValidateUpdate

func (feature *FeatureHistogram) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*FeatureHistogram) XXX_DiscardUnknown

func (m *FeatureHistogram) XXX_DiscardUnknown()

func (*FeatureHistogram) XXX_Marshal

func (m *FeatureHistogram) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureHistogram) XXX_Merge

func (m *FeatureHistogram) XXX_Merge(src proto.Message)

func (*FeatureHistogram) XXX_Size

func (m *FeatureHistogram) XXX_Size() int

func (*FeatureHistogram) XXX_Unmarshal

func (m *FeatureHistogram) XXX_Unmarshal(b []byte) error

type FeatureHistogramCondition

type FeatureHistogramCondition struct {
	// Type of account condition.
	Type FeatureHistogramConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=FeatureHistogramConditionType"`
	// Status of the condition, one of True, False, Unknown
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,7,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}

FeatureHistogramCondition describes the state of a deployment at a certain point.

func (*FeatureHistogramCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureHistogramCondition.

func (*FeatureHistogramCondition) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureHistogramCondition) Descriptor

func (*FeatureHistogramCondition) Descriptor() ([]byte, []int)

func (*FeatureHistogramCondition) Marshal

func (m *FeatureHistogramCondition) Marshal() (dAtA []byte, err error)

func (*FeatureHistogramCondition) MarshalTo

func (m *FeatureHistogramCondition) MarshalTo(dAtA []byte) (int, error)

func (*FeatureHistogramCondition) MarshalToSizedBuffer

func (m *FeatureHistogramCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureHistogramCondition) ProtoMessage

func (*FeatureHistogramCondition) ProtoMessage()

func (*FeatureHistogramCondition) Reset

func (m *FeatureHistogramCondition) Reset()

func (*FeatureHistogramCondition) Size

func (m *FeatureHistogramCondition) Size() (n int)

func (*FeatureHistogramCondition) String

func (this *FeatureHistogramCondition) String() string

func (*FeatureHistogramCondition) Unmarshal

func (m *FeatureHistogramCondition) Unmarshal(dAtA []byte) error

func (*FeatureHistogramCondition) XXX_DiscardUnknown

func (m *FeatureHistogramCondition) XXX_DiscardUnknown()

func (*FeatureHistogramCondition) XXX_Marshal

func (m *FeatureHistogramCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureHistogramCondition) XXX_Merge

func (m *FeatureHistogramCondition) XXX_Merge(src proto.Message)

func (*FeatureHistogramCondition) XXX_Size

func (m *FeatureHistogramCondition) XXX_Size() int

func (*FeatureHistogramCondition) XXX_Unmarshal

func (m *FeatureHistogramCondition) XXX_Unmarshal(b []byte) error

type FeatureHistogramConditionType

type FeatureHistogramConditionType string

FeatureHistogramConditionType is the condition of the feature

const (
	FeatureHistogramReady FeatureHistogramConditionType = "Ready"
	FeatureHistogramSaved FeatureHistogramConditionType = "Saved"
)

/ FeatureHistogram Condition

type FeatureHistogramList

type FeatureHistogramList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Items           []FeatureHistogram `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true FeatureHistogramList contain a list of feature objects

func (*FeatureHistogramList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureHistogramList.

func (*FeatureHistogramList) DeepCopyInto

func (in *FeatureHistogramList) DeepCopyInto(out *FeatureHistogramList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureHistogramList) DeepCopyObject

func (in *FeatureHistogramList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*FeatureHistogramList) Descriptor

func (*FeatureHistogramList) Descriptor() ([]byte, []int)

func (*FeatureHistogramList) Marshal

func (m *FeatureHistogramList) Marshal() (dAtA []byte, err error)

func (*FeatureHistogramList) MarshalTo

func (m *FeatureHistogramList) MarshalTo(dAtA []byte) (int, error)

func (*FeatureHistogramList) MarshalToSizedBuffer

func (m *FeatureHistogramList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureHistogramList) ProtoMessage

func (*FeatureHistogramList) ProtoMessage()

func (*FeatureHistogramList) Reset

func (m *FeatureHistogramList) Reset()

func (*FeatureHistogramList) Size

func (m *FeatureHistogramList) Size() (n int)

func (*FeatureHistogramList) String

func (this *FeatureHistogramList) String() string

func (*FeatureHistogramList) Unmarshal

func (m *FeatureHistogramList) Unmarshal(dAtA []byte) error

func (*FeatureHistogramList) XXX_DiscardUnknown

func (m *FeatureHistogramList) XXX_DiscardUnknown()

func (*FeatureHistogramList) XXX_Marshal

func (m *FeatureHistogramList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureHistogramList) XXX_Merge

func (m *FeatureHistogramList) XXX_Merge(src proto.Message)

func (*FeatureHistogramList) XXX_Size

func (m *FeatureHistogramList) XXX_Size() int

func (*FeatureHistogramList) XXX_Unmarshal

func (m *FeatureHistogramList) XXX_Unmarshal(b []byte) error

type FeatureHistogramSpec

type FeatureHistogramSpec struct {
	// The feature owner
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:="no-one"
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// The product version for the feature.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"`
	// Comments is a description of the feature
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=512
	Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// The list of columns to generate the histograms.
	// +kubebuilder:validation:Optional
	Columns []string `json:"columns,omitempty" protobuf:"bytes,5,opt,name=columns"`
	// A reference to the dataset or predictor that contain the column with this histogram
	// +kubebuilder:validation:Optional
	SourceRef *v1.ObjectReference `json:"sourceRef,omitempty" protobuf:"bytes,6,opt,name=sourceRef"`
	// If true, this is a training dataset feature histogram. If false the histogram was generated during serving.
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Training *bool `json:"training,omitempty" protobuf:"varint,7,opt,name=training"`
	// If true, this is a feature histogram of the target column
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Target *bool `json:"target,omitempty" protobuf:"varint,8,opt,name=target"`
	// If true, this is an active feature histogram. This feature histogram is being live updated by the predictorlet
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Active *bool `json:"active,omitempty" protobuf:"varint,9,opt,name=active"`
	// The start time of this feature histogram. For training dataset histogram this is set to the creation
	// time of the dataset
	Start *metav1.Time `json:"start,omitempty" protobuf:"bytes,10,opt,name=start"`
	// The end time of the feature histogram. If reached, the predictor will start a new feature histogram
	End *metav1.Time `json:"end,omitempty" protobuf:"bytes,11,opt,name=end"`
	// The histogram to comapre to for data drift calc
	// +kubebuilder:validation:Optional
	BaseFeatureHistogram v1.ObjectReference `json:"baseFeatureHistogram,omitempty" protobuf:"bytes,12,opt,name=baseFeatureHistogram"`
}

FeatureHistogramSpec contain the desired state of a FeatureHistogram

func (*FeatureHistogramSpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureHistogramSpec.

func (*FeatureHistogramSpec) DeepCopyInto

func (in *FeatureHistogramSpec) DeepCopyInto(out *FeatureHistogramSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureHistogramSpec) Descriptor

func (*FeatureHistogramSpec) Descriptor() ([]byte, []int)

func (*FeatureHistogramSpec) Marshal

func (m *FeatureHistogramSpec) Marshal() (dAtA []byte, err error)

func (*FeatureHistogramSpec) MarshalTo

func (m *FeatureHistogramSpec) MarshalTo(dAtA []byte) (int, error)

func (*FeatureHistogramSpec) MarshalToSizedBuffer

func (m *FeatureHistogramSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureHistogramSpec) ProtoMessage

func (*FeatureHistogramSpec) ProtoMessage()

func (*FeatureHistogramSpec) Reset

func (m *FeatureHistogramSpec) Reset()

func (*FeatureHistogramSpec) Size

func (m *FeatureHistogramSpec) Size() (n int)

func (*FeatureHistogramSpec) String

func (this *FeatureHistogramSpec) String() string

func (*FeatureHistogramSpec) Unmarshal

func (m *FeatureHistogramSpec) Unmarshal(dAtA []byte) error

func (*FeatureHistogramSpec) XXX_DiscardUnknown

func (m *FeatureHistogramSpec) XXX_DiscardUnknown()

func (*FeatureHistogramSpec) XXX_Marshal

func (m *FeatureHistogramSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureHistogramSpec) XXX_Merge

func (m *FeatureHistogramSpec) XXX_Merge(src proto.Message)

func (*FeatureHistogramSpec) XXX_Size

func (m *FeatureHistogramSpec) XXX_Size() int

func (*FeatureHistogramSpec) XXX_Unmarshal

func (m *FeatureHistogramSpec) XXX_Unmarshal(b []byte) error

type FeatureHistogramStatus

type FeatureHistogramStatus struct {
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`
	// The histogram values, map from column name to an histogram
	Data []ColumnHistogram `json:"data,omitempty" protobuf:"bytes,2,opt,name=data"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,3,opt,name=lastUpdated"`
	// The calculation of the drift metrics for each column in the histogram
	//+kubebuilder:validation:Optional
	Drift []ColumnDrift `json:"drift,omitempty" protobuf:"bytes,4,opt,name=drift"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []FeatureHistogramCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,5,rep,name=conditions"`
}

FeatureHistogramStatus defines the observed state of FeatureHistogram

func (*FeatureHistogramStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureHistogramStatus.

func (*FeatureHistogramStatus) DeepCopyInto

func (in *FeatureHistogramStatus) DeepCopyInto(out *FeatureHistogramStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureHistogramStatus) Descriptor

func (*FeatureHistogramStatus) Descriptor() ([]byte, []int)

func (*FeatureHistogramStatus) Marshal

func (m *FeatureHistogramStatus) Marshal() (dAtA []byte, err error)

func (*FeatureHistogramStatus) MarshalTo

func (m *FeatureHistogramStatus) MarshalTo(dAtA []byte) (int, error)

func (*FeatureHistogramStatus) MarshalToSizedBuffer

func (m *FeatureHistogramStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureHistogramStatus) ProtoMessage

func (*FeatureHistogramStatus) ProtoMessage()

func (*FeatureHistogramStatus) Reset

func (m *FeatureHistogramStatus) Reset()

func (*FeatureHistogramStatus) Size

func (m *FeatureHistogramStatus) Size() (n int)

func (*FeatureHistogramStatus) String

func (this *FeatureHistogramStatus) String() string

func (*FeatureHistogramStatus) Unmarshal

func (m *FeatureHistogramStatus) Unmarshal(dAtA []byte) error

func (*FeatureHistogramStatus) XXX_DiscardUnknown

func (m *FeatureHistogramStatus) XXX_DiscardUnknown()

func (*FeatureHistogramStatus) XXX_Marshal

func (m *FeatureHistogramStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureHistogramStatus) XXX_Merge

func (m *FeatureHistogramStatus) XXX_Merge(src proto.Message)

func (*FeatureHistogramStatus) XXX_Size

func (m *FeatureHistogramStatus) XXX_Size() int

func (*FeatureHistogramStatus) XXX_Unmarshal

func (m *FeatureHistogramStatus) XXX_Unmarshal(b []byte) error

type FeatureList

type FeatureList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Items           []Feature `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true FeatureList contain a list of feature objects

func (*FeatureList) DeepCopy

func (in *FeatureList) DeepCopy() *FeatureList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureList.

func (*FeatureList) DeepCopyInto

func (in *FeatureList) DeepCopyInto(out *FeatureList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureList) DeepCopyObject

func (in *FeatureList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*FeatureList) Descriptor

func (*FeatureList) Descriptor() ([]byte, []int)

func (*FeatureList) Marshal

func (m *FeatureList) Marshal() (dAtA []byte, err error)

func (*FeatureList) MarshalTo

func (m *FeatureList) MarshalTo(dAtA []byte) (int, error)

func (*FeatureList) MarshalToSizedBuffer

func (m *FeatureList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureList) ProtoMessage

func (*FeatureList) ProtoMessage()

func (*FeatureList) Reset

func (m *FeatureList) Reset()

func (*FeatureList) Size

func (m *FeatureList) Size() (n int)

func (*FeatureList) String

func (this *FeatureList) String() string

func (*FeatureList) Unmarshal

func (m *FeatureList) Unmarshal(dAtA []byte) error

func (*FeatureList) XXX_DiscardUnknown

func (m *FeatureList) XXX_DiscardUnknown()

func (*FeatureList) XXX_Marshal

func (m *FeatureList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureList) XXX_Merge

func (m *FeatureList) XXX_Merge(src proto.Message)

func (*FeatureList) XXX_Size

func (m *FeatureList) XXX_Size() int

func (*FeatureList) XXX_Unmarshal

func (m *FeatureList) XXX_Unmarshal(b []byte) error

type FeaturePipeline

type FeaturePipeline struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              FeaturePipelineSpec   `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	Status            FeaturePipelineStatus `json:"status" protobuf:"bytes,3,opt,name=status"`
}

FeaturePipeline represent the processing of feature in the store. +kubebuilder:object:root=true +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Schedule",type="string",JSONPath=".spec.schedule",description="" +kubebuilder:printcolumn:name="Last Run",type="date",JSONPath=".status.lastRun.at",description="" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=featurepipelines,singular=featurepipeline,shortName="fp",categories={data,modela} +kubebuilder:subresource:status

func ParseFeaturePipeline

func ParseFeaturePipeline(content string, user string, commit string) (*FeaturePipeline, error)

Parse an data

func ParseFeaturesetYaml

func ParseFeaturesetYaml(content []byte) (*FeaturePipeline, error)

func (*FeaturePipeline) AddConfiditions

func (feature *FeaturePipeline) AddConfiditions()

func (*FeaturePipeline) AddFinalizer

func (feature *FeaturePipeline) AddFinalizer()

func (*FeaturePipeline) Age

func (feature *FeaturePipeline) Age() string

func (*FeaturePipeline) Archived

func (pipeline *FeaturePipeline) Archived() bool

func (*FeaturePipeline) CreateOrUpdateCond

func (feature *FeaturePipeline) CreateOrUpdateCond(cond FeaturePipelineCondition)

Merge or update condition Merge or update condition

func (*FeaturePipeline) DeepCopy

func (in *FeaturePipeline) DeepCopy() *FeaturePipeline

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeaturePipeline.

func (*FeaturePipeline) DeepCopyInto

func (in *FeaturePipeline) DeepCopyInto(out *FeaturePipeline)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeaturePipeline) DeepCopyObject

func (in *FeaturePipeline) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*FeaturePipeline) Default

func (feature *FeaturePipeline) Default()

No defaults in this current release

func (*FeaturePipeline) Descriptor

func (*FeaturePipeline) Descriptor() ([]byte, []int)

func (*FeaturePipeline) GetCond

func (*FeaturePipeline) GetCondIdx

func (feature *FeaturePipeline) GetCondIdx(t FeaturePipelineConditionType) int

func (*FeaturePipeline) HasFinalizer

func (feature *FeaturePipeline) HasFinalizer() bool

func (*FeaturePipeline) IsGitObj

func (feature *FeaturePipeline) IsGitObj() bool

func (*FeaturePipeline) IsReady

func (feature *FeaturePipeline) IsReady() bool

func (*FeaturePipeline) Key

func (feature *FeaturePipeline) Key() string

func (*FeaturePipeline) LabelWithCommit

func (feature *FeaturePipeline) LabelWithCommit(commit string, uname string, branch string)

func (*FeaturePipeline) MarkArchived

func (pipeline *FeaturePipeline) MarkArchived()

func (*FeaturePipeline) MarkReady

func (pipeline *FeaturePipeline) MarkReady()

func (*FeaturePipeline) Marshal

func (m *FeaturePipeline) Marshal() (dAtA []byte, err error)

func (*FeaturePipeline) MarshalTo

func (m *FeaturePipeline) MarshalTo(dAtA []byte) (int, error)

func (*FeaturePipeline) MarshalToSizedBuffer

func (m *FeaturePipeline) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeaturePipeline) ProtoMessage

func (*FeaturePipeline) ProtoMessage()

func (*FeaturePipeline) RemoveFinalizer

func (feature *FeaturePipeline) RemoveFinalizer()

func (*FeaturePipeline) RepEntry

func (feature *FeaturePipeline) RepEntry() (string, error)

func (*FeaturePipeline) RepPath

func (feature *FeaturePipeline) RepPath(root string) (string, error)

Return the on disk rep location

func (*FeaturePipeline) Reset

func (m *FeaturePipeline) Reset()

func (*FeaturePipeline) SetChanged

func (feature *FeaturePipeline) SetChanged()

func (*FeaturePipeline) SetupWebhookWithManager

func (in *FeaturePipeline) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*FeaturePipeline) Size

func (m *FeaturePipeline) Size() (n int)

func (*FeaturePipeline) String

func (this *FeaturePipeline) String() string

func (*FeaturePipeline) ToYamlFile

func (feature *FeaturePipeline) ToYamlFile() ([]byte, error)

func (*FeaturePipeline) Unmarshal

func (m *FeaturePipeline) Unmarshal(dAtA []byte) error

func (*FeaturePipeline) UpdateRunStatus added in v0.4.612

func (in *FeaturePipeline) UpdateRunStatus(run FeaturePipelineRun)

func (*FeaturePipeline) ValidateCreate

func (feature *FeaturePipeline) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*FeaturePipeline) ValidateDelete

func (r *FeaturePipeline) ValidateDelete() error

func (*FeaturePipeline) ValidateUpdate

func (feature *FeaturePipeline) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*FeaturePipeline) XXX_DiscardUnknown

func (m *FeaturePipeline) XXX_DiscardUnknown()

func (*FeaturePipeline) XXX_Marshal

func (m *FeaturePipeline) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeaturePipeline) XXX_Merge

func (m *FeaturePipeline) XXX_Merge(src proto.Message)

func (*FeaturePipeline) XXX_Size

func (m *FeaturePipeline) XXX_Size() int

func (*FeaturePipeline) XXX_Unmarshal

func (m *FeaturePipeline) XXX_Unmarshal(b []byte) error

type FeaturePipelineCondition

type FeaturePipelineCondition struct {
	// Type of account condition.
	Type FeaturePipelineConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=FeatureConditionType"`
	// Status of the condition, one of True, False, Unknown
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,7,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}

FeaturePipelineCondition describes the state of a deployment at a certain point.

func (*FeaturePipelineCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeaturePipelineCondition.

func (*FeaturePipelineCondition) DeepCopyInto

func (in *FeaturePipelineCondition) DeepCopyInto(out *FeaturePipelineCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeaturePipelineCondition) Descriptor

func (*FeaturePipelineCondition) Descriptor() ([]byte, []int)

func (*FeaturePipelineCondition) Marshal

func (m *FeaturePipelineCondition) Marshal() (dAtA []byte, err error)

func (*FeaturePipelineCondition) MarshalTo

func (m *FeaturePipelineCondition) MarshalTo(dAtA []byte) (int, error)

func (*FeaturePipelineCondition) MarshalToSizedBuffer

func (m *FeaturePipelineCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeaturePipelineCondition) ProtoMessage

func (*FeaturePipelineCondition) ProtoMessage()

func (*FeaturePipelineCondition) Reset

func (m *FeaturePipelineCondition) Reset()

func (*FeaturePipelineCondition) Size

func (m *FeaturePipelineCondition) Size() (n int)

func (*FeaturePipelineCondition) String

func (this *FeaturePipelineCondition) String() string

func (*FeaturePipelineCondition) Unmarshal

func (m *FeaturePipelineCondition) Unmarshal(dAtA []byte) error

func (*FeaturePipelineCondition) XXX_DiscardUnknown

func (m *FeaturePipelineCondition) XXX_DiscardUnknown()

func (*FeaturePipelineCondition) XXX_Marshal

func (m *FeaturePipelineCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeaturePipelineCondition) XXX_Merge

func (m *FeaturePipelineCondition) XXX_Merge(src proto.Message)

func (*FeaturePipelineCondition) XXX_Size

func (m *FeaturePipelineCondition) XXX_Size() int

func (*FeaturePipelineCondition) XXX_Unmarshal

func (m *FeaturePipelineCondition) XXX_Unmarshal(b []byte) error

type FeaturePipelineConditionType

type FeaturePipelineConditionType string

FeaturePipelineConditionType

const (
	FeaturePipelineReady FeaturePipelineConditionType = "Ready"
	FeaturePipelineSaved FeaturePipelineConditionType = "Saved"
)

type FeaturePipelineList

type FeaturePipelineList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Items           []FeaturePipeline `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true FeatureList contain a list of feature objects

func (*FeaturePipelineList) DeepCopy

func (in *FeaturePipelineList) DeepCopy() *FeaturePipelineList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeaturePipelineList.

func (*FeaturePipelineList) DeepCopyInto

func (in *FeaturePipelineList) DeepCopyInto(out *FeaturePipelineList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeaturePipelineList) DeepCopyObject

func (in *FeaturePipelineList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*FeaturePipelineList) Descriptor

func (*FeaturePipelineList) Descriptor() ([]byte, []int)

func (*FeaturePipelineList) Marshal

func (m *FeaturePipelineList) Marshal() (dAtA []byte, err error)

func (*FeaturePipelineList) MarshalTo

func (m *FeaturePipelineList) MarshalTo(dAtA []byte) (int, error)

func (*FeaturePipelineList) MarshalToSizedBuffer

func (m *FeaturePipelineList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeaturePipelineList) ProtoMessage

func (*FeaturePipelineList) ProtoMessage()

func (*FeaturePipelineList) Reset

func (m *FeaturePipelineList) Reset()

func (*FeaturePipelineList) Size

func (m *FeaturePipelineList) Size() (n int)

func (*FeaturePipelineList) String

func (this *FeaturePipelineList) String() string

func (*FeaturePipelineList) Unmarshal

func (m *FeaturePipelineList) Unmarshal(dAtA []byte) error

func (*FeaturePipelineList) XXX_DiscardUnknown

func (m *FeaturePipelineList) XXX_DiscardUnknown()

func (*FeaturePipelineList) XXX_Marshal

func (m *FeaturePipelineList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeaturePipelineList) XXX_Merge

func (m *FeaturePipelineList) XXX_Merge(src proto.Message)

func (*FeaturePipelineList) XXX_Size

func (m *FeaturePipelineList) XXX_Size() int

func (*FeaturePipelineList) XXX_Unmarshal

func (m *FeaturePipelineList) XXX_Unmarshal(b []byte) error

type FeaturePipelineRun

type FeaturePipelineRun struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              FeaturePipelineRunSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status FeaturePipelineRunStatus `json:"status" protobuf:"bytes,3,opt,name=status"`
}

FeaturePipeline represent a feature set object in the feature store. +kubebuilder:object:root=true +kubebuilder:printcolumn:name="Status",type="string",JSONPath=".status.phase" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Pipeline",type="string",JSONPath=".spec.pipelineName" +kubebuilder:printcolumn:name="StartTime",type="date",JSONPath=".status.startTime",priority=1 +kubebuilder:printcolumn:name="CompletionTime",type="date",JSONPath=".status.completionTime",priority=1 +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=featurepipelineruns,shortName=fpr,singular=featurepipelinerun,categories={data,modela} +kubebuilder:subresource:status

func ParseFeaturePipelineRun

func ParseFeaturePipelineRun(content string, user string, commit string) (*FeaturePipelineRun, error)

Parse an data

func (*FeaturePipelineRun) AddConfiditions

func (run *FeaturePipelineRun) AddConfiditions()

func (*FeaturePipelineRun) AddFinalizer

func (run *FeaturePipelineRun) AddFinalizer()

func (*FeaturePipelineRun) Age

func (run *FeaturePipelineRun) Age() string

func (*FeaturePipelineRun) CreateOrUpdateCond

func (run *FeaturePipelineRun) CreateOrUpdateCond(cond FeaturePipelineRunCondition)

Merge or update condition Merge or update condition

func (*FeaturePipelineRun) DeepCopy

func (in *FeaturePipelineRun) DeepCopy() *FeaturePipelineRun

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeaturePipelineRun.

func (*FeaturePipelineRun) DeepCopyInto

func (in *FeaturePipelineRun) DeepCopyInto(out *FeaturePipelineRun)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeaturePipelineRun) DeepCopyObject

func (in *FeaturePipelineRun) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*FeaturePipelineRun) Default

func (run *FeaturePipelineRun) Default()

No defaults in this current release

func (*FeaturePipelineRun) Descriptor

func (*FeaturePipelineRun) Descriptor() ([]byte, []int)

func (*FeaturePipelineRun) GetCondIdx

func (*FeaturePipelineRun) HasFinalizer

func (run *FeaturePipelineRun) HasFinalizer() bool

func (*FeaturePipelineRun) IsCompleted

func (run *FeaturePipelineRun) IsCompleted() bool

func (*FeaturePipelineRun) IsFailed

func (run *FeaturePipelineRun) IsFailed() bool

func (*FeaturePipelineRun) IsGitObj

func (run *FeaturePipelineRun) IsGitObj() bool

func (*FeaturePipelineRun) IsRunning

func (run *FeaturePipelineRun) IsRunning() bool

func (*FeaturePipelineRun) Key

func (run *FeaturePipelineRun) Key() string

func (*FeaturePipelineRun) LabelWithCommit

func (run *FeaturePipelineRun) LabelWithCommit(commit string, uname string, branch string)

func (*FeaturePipelineRun) MarkComplete

func (run *FeaturePipelineRun) MarkComplete()

func (*FeaturePipelineRun) MarkFailed

func (run *FeaturePipelineRun) MarkFailed(err error)

func (*FeaturePipelineRun) MarkReady

func (run *FeaturePipelineRun) MarkReady()

func (*FeaturePipelineRun) MarkRunning

func (run *FeaturePipelineRun) MarkRunning()

func (*FeaturePipelineRun) Marshal

func (m *FeaturePipelineRun) Marshal() (dAtA []byte, err error)

func (*FeaturePipelineRun) MarshalTo

func (m *FeaturePipelineRun) MarshalTo(dAtA []byte) (int, error)

func (*FeaturePipelineRun) MarshalToSizedBuffer

func (m *FeaturePipelineRun) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeaturePipelineRun) ProtoMessage

func (*FeaturePipelineRun) ProtoMessage()

func (*FeaturePipelineRun) RemoveFinalizer

func (run *FeaturePipelineRun) RemoveFinalizer()

func (*FeaturePipelineRun) Reset

func (m *FeaturePipelineRun) Reset()

func (*FeaturePipelineRun) RunStatus added in v0.4.614

func (run *FeaturePipelineRun) RunStatus() *catalog.LastRunStatus

Return the state of the run as RunStatus

func (*FeaturePipelineRun) SetChanged

func (run *FeaturePipelineRun) SetChanged()

func (*FeaturePipelineRun) SetupWebhookWithManager

func (run *FeaturePipelineRun) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*FeaturePipelineRun) Size

func (m *FeaturePipelineRun) Size() (n int)

func (*FeaturePipelineRun) String

func (this *FeaturePipelineRun) String() string

func (*FeaturePipelineRun) ToYamlFile

func (run *FeaturePipelineRun) ToYamlFile() ([]byte, error)

func (*FeaturePipelineRun) Unmarshal

func (m *FeaturePipelineRun) Unmarshal(dAtA []byte) error

func (*FeaturePipelineRun) ValidateCreate

func (run *FeaturePipelineRun) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*FeaturePipelineRun) ValidateDelete

func (run *FeaturePipelineRun) ValidateDelete() error

func (*FeaturePipelineRun) ValidateUpdate

func (run *FeaturePipelineRun) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*FeaturePipelineRun) XXX_DiscardUnknown

func (m *FeaturePipelineRun) XXX_DiscardUnknown()

func (*FeaturePipelineRun) XXX_Marshal

func (m *FeaturePipelineRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeaturePipelineRun) XXX_Merge

func (m *FeaturePipelineRun) XXX_Merge(src proto.Message)

func (*FeaturePipelineRun) XXX_Size

func (m *FeaturePipelineRun) XXX_Size() int

func (*FeaturePipelineRun) XXX_Unmarshal

func (m *FeaturePipelineRun) XXX_Unmarshal(b []byte) error

type FeaturePipelineRunCondition

type FeaturePipelineRunCondition struct {
	// Type of account condition.
	Type FeaturePipelineRunConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=FeatureConditionType"`
	// Status of the condition, one of True, False, Unknown
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,7,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}

FeaturePipelineRunCondition describes the state of a deployment at a certain point.

func (*FeaturePipelineRunCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeaturePipelineRunCondition.

func (*FeaturePipelineRunCondition) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeaturePipelineRunCondition) Descriptor

func (*FeaturePipelineRunCondition) Descriptor() ([]byte, []int)

func (*FeaturePipelineRunCondition) Marshal

func (m *FeaturePipelineRunCondition) Marshal() (dAtA []byte, err error)

func (*FeaturePipelineRunCondition) MarshalTo

func (m *FeaturePipelineRunCondition) MarshalTo(dAtA []byte) (int, error)

func (*FeaturePipelineRunCondition) MarshalToSizedBuffer

func (m *FeaturePipelineRunCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeaturePipelineRunCondition) ProtoMessage

func (*FeaturePipelineRunCondition) ProtoMessage()

func (*FeaturePipelineRunCondition) Reset

func (m *FeaturePipelineRunCondition) Reset()

func (*FeaturePipelineRunCondition) Size

func (m *FeaturePipelineRunCondition) Size() (n int)

func (*FeaturePipelineRunCondition) String

func (this *FeaturePipelineRunCondition) String() string

func (*FeaturePipelineRunCondition) Unmarshal

func (m *FeaturePipelineRunCondition) Unmarshal(dAtA []byte) error

func (*FeaturePipelineRunCondition) XXX_DiscardUnknown

func (m *FeaturePipelineRunCondition) XXX_DiscardUnknown()

func (*FeaturePipelineRunCondition) XXX_Marshal

func (m *FeaturePipelineRunCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeaturePipelineRunCondition) XXX_Merge

func (m *FeaturePipelineRunCondition) XXX_Merge(src proto.Message)

func (*FeaturePipelineRunCondition) XXX_Size

func (m *FeaturePipelineRunCondition) XXX_Size() int

func (*FeaturePipelineRunCondition) XXX_Unmarshal

func (m *FeaturePipelineRunCondition) XXX_Unmarshal(b []byte) error

type FeaturePipelineRunConditionType

type FeaturePipelineRunConditionType string

FeaturePipelineRunConditionType

const (
	FeaturePipelineRunCompleted FeaturePipelineRunConditionType = "Completed"
	FeaturePipelineRunSaved     FeaturePipelineRunConditionType = "Saved"
)

/ Feature Condition

type FeaturePipelineRunList

type FeaturePipelineRunList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Items           []FeaturePipelineRun `json:"items" protobuf:"bytes,2,rep,name=items"`
}

FeaturePipelineRunList contain a list of FeaturePipelineRun objects +kubebuilder:object:root=true

func (*FeaturePipelineRunList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeaturePipelineRunList.

func (*FeaturePipelineRunList) DeepCopyInto

func (in *FeaturePipelineRunList) DeepCopyInto(out *FeaturePipelineRunList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeaturePipelineRunList) DeepCopyObject

func (in *FeaturePipelineRunList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*FeaturePipelineRunList) Descriptor

func (*FeaturePipelineRunList) Descriptor() ([]byte, []int)

func (*FeaturePipelineRunList) Marshal

func (m *FeaturePipelineRunList) Marshal() (dAtA []byte, err error)

func (*FeaturePipelineRunList) MarshalTo

func (m *FeaturePipelineRunList) MarshalTo(dAtA []byte) (int, error)

func (*FeaturePipelineRunList) MarshalToSizedBuffer

func (m *FeaturePipelineRunList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeaturePipelineRunList) ProtoMessage

func (*FeaturePipelineRunList) ProtoMessage()

func (*FeaturePipelineRunList) Reset

func (m *FeaturePipelineRunList) Reset()

func (*FeaturePipelineRunList) Size

func (m *FeaturePipelineRunList) Size() (n int)

func (*FeaturePipelineRunList) String

func (this *FeaturePipelineRunList) String() string

func (*FeaturePipelineRunList) Unmarshal

func (m *FeaturePipelineRunList) Unmarshal(dAtA []byte) error

func (*FeaturePipelineRunList) XXX_DiscardUnknown

func (m *FeaturePipelineRunList) XXX_DiscardUnknown()

func (*FeaturePipelineRunList) XXX_Marshal

func (m *FeaturePipelineRunList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeaturePipelineRunList) XXX_Merge

func (m *FeaturePipelineRunList) XXX_Merge(src proto.Message)

func (*FeaturePipelineRunList) XXX_Size

func (m *FeaturePipelineRunList) XXX_Size() int

func (*FeaturePipelineRunList) XXX_Unmarshal

func (m *FeaturePipelineRunList) XXX_Unmarshal(b []byte) error

type FeaturePipelineRunPhase added in v0.4.451

type FeaturePipelineRunPhase string
const (
	FeaturePipelineRunPhasePending   FeaturePipelineRunPhase = "Pending"
	FeaturePipelineRunPhaseRunning   FeaturePipelineRunPhase = "Running"
	FeaturePipelineRunPhaseCompleted FeaturePipelineRunPhase = "Completed"
	FeaturePipelineRunPhaseAborted   FeaturePipelineRunPhase = "Aborted"
	FeaturePipelineRunPhaseFailed    FeaturePipelineRunPhase = "Failed"
)

type FeaturePipelineRunSpec

type FeaturePipelineRunSpec struct {
	// The feature owner
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// The product version for the feature.
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"`
	// FileName specify the name of the attribute
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:MaxLength=64
	FeaturePipelineName *string `json:"featurePipelineName,omitempty" protobuf:"bytes,3,opt,name=featurePipelineName"`
	// A reference to the resource class.
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,4,opt,name=resources"`
	// TTL.
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	TTL *int32 `json:"ttl,omitempty" protobuf:"varint,5,opt,name=ttl"`
}

FeatureSpec contain the desired state of a Feature

func (*FeaturePipelineRunSpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeaturePipelineRunSpec.

func (*FeaturePipelineRunSpec) DeepCopyInto

func (in *FeaturePipelineRunSpec) DeepCopyInto(out *FeaturePipelineRunSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeaturePipelineRunSpec) Descriptor

func (*FeaturePipelineRunSpec) Descriptor() ([]byte, []int)

func (*FeaturePipelineRunSpec) Marshal

func (m *FeaturePipelineRunSpec) Marshal() (dAtA []byte, err error)

func (*FeaturePipelineRunSpec) MarshalTo

func (m *FeaturePipelineRunSpec) MarshalTo(dAtA []byte) (int, error)

func (*FeaturePipelineRunSpec) MarshalToSizedBuffer

func (m *FeaturePipelineRunSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeaturePipelineRunSpec) ProtoMessage

func (*FeaturePipelineRunSpec) ProtoMessage()

func (*FeaturePipelineRunSpec) Reset

func (m *FeaturePipelineRunSpec) Reset()

func (*FeaturePipelineRunSpec) Size

func (m *FeaturePipelineRunSpec) Size() (n int)

func (*FeaturePipelineRunSpec) String

func (this *FeaturePipelineRunSpec) String() string

func (*FeaturePipelineRunSpec) Unmarshal

func (m *FeaturePipelineRunSpec) Unmarshal(dAtA []byte) error

func (*FeaturePipelineRunSpec) XXX_DiscardUnknown

func (m *FeaturePipelineRunSpec) XXX_DiscardUnknown()

func (*FeaturePipelineRunSpec) XXX_Marshal

func (m *FeaturePipelineRunSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeaturePipelineRunSpec) XXX_Merge

func (m *FeaturePipelineRunSpec) XXX_Merge(src proto.Message)

func (*FeaturePipelineRunSpec) XXX_Size

func (m *FeaturePipelineRunSpec) XXX_Size() int

func (*FeaturePipelineRunSpec) XXX_Unmarshal

func (m *FeaturePipelineRunSpec) XXX_Unmarshal(b []byte) error

type FeaturePipelineRunStatus

type FeaturePipelineRunStatus struct {
	// The Start time of the run
	//+option
	StartTime *metav1.Time `json:"startTime,omitempty" protobuf:"bytes,1,opt,name=startTime"`
	// The End time of the run
	//+option
	EndTime *metav1.Time `json:"endTime,omitempty" protobuf:"bytes,2,opt,name=endTime"`
	// The phase of the dataset processing
	// +kubebuilder:default:="Pending"
	// +kubebuilder:validation:Optional
	Phase FeaturePipelineRunPhase `json:"phase,omitempty" protobuf:"bytes,3,opt,name=phase"`
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,4,opt,name=observedGeneration"`
	// What triggered the run
	//+kubebuilder:validation:Optional
	TriggeredBy catalog.TriggerType `json:"triggeredBy,omitempty" protobuf:"bytes,5,opt,name=triggeredBy"`
	// Holds the location of log paths
	//+kubebuilder:validation:Optional
	Logs catalog.Logs `json:"logs,,omitempty" protobuf:"bytes,6,opt,name=logs"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,7,opt,name=lastUpdated"`
	// Update in case of terminal failure
	// Borrowed from cluster api controller
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,8,opt,name=failureReason"`
	// Update in case of terminal failure message
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,9,opt,name=failureMessage"`

	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []FeaturePipelineRunCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,10,rep,name=conditions"`
}

FeatureStatus defines the observed state of Feature

func (*FeaturePipelineRunStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeaturePipelineRunStatus.

func (*FeaturePipelineRunStatus) DeepCopyInto

func (in *FeaturePipelineRunStatus) DeepCopyInto(out *FeaturePipelineRunStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeaturePipelineRunStatus) Descriptor

func (*FeaturePipelineRunStatus) Descriptor() ([]byte, []int)

func (*FeaturePipelineRunStatus) Marshal

func (m *FeaturePipelineRunStatus) Marshal() (dAtA []byte, err error)

func (*FeaturePipelineRunStatus) MarshalTo

func (m *FeaturePipelineRunStatus) MarshalTo(dAtA []byte) (int, error)

func (*FeaturePipelineRunStatus) MarshalToSizedBuffer

func (m *FeaturePipelineRunStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeaturePipelineRunStatus) ProtoMessage

func (*FeaturePipelineRunStatus) ProtoMessage()

func (*FeaturePipelineRunStatus) Reset

func (m *FeaturePipelineRunStatus) Reset()

func (*FeaturePipelineRunStatus) Size

func (m *FeaturePipelineRunStatus) Size() (n int)

func (*FeaturePipelineRunStatus) String

func (this *FeaturePipelineRunStatus) String() string

func (*FeaturePipelineRunStatus) Unmarshal

func (m *FeaturePipelineRunStatus) Unmarshal(dAtA []byte) error

func (*FeaturePipelineRunStatus) XXX_DiscardUnknown

func (m *FeaturePipelineRunStatus) XXX_DiscardUnknown()

func (*FeaturePipelineRunStatus) XXX_Marshal

func (m *FeaturePipelineRunStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeaturePipelineRunStatus) XXX_Merge

func (m *FeaturePipelineRunStatus) XXX_Merge(src proto.Message)

func (*FeaturePipelineRunStatus) XXX_Size

func (m *FeaturePipelineRunStatus) XXX_Size() int

func (*FeaturePipelineRunStatus) XXX_Unmarshal

func (m *FeaturePipelineRunStatus) XXX_Unmarshal(b []byte) error

type FeaturePipelineSpec

type FeaturePipelineSpec struct {
	// Owner is the owner of the feature pipeline
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Pattern="[a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// DatasetSelector is used to select datasets to process the features from.
	// +kubebuilder:validation:Optional
	DatasetSelector map[string]string `json:"datasetSelector,omitempty" protobuf:"bytes,2,opt,name=datasetSelector"`
	// Version name is the the product version for the feature.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,3,opt,name=versionName"`
	// Description of the feature pipeline
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	// +kubebuilder:validation:MaxLength=512
	Description string `json:"description,omitempty" protobuf:"bytes,4,opt,name=description"`
	// Aggregation is the aggregation spec
	// +kubebuilder:validation:Optional
	Aggregation *AggregationSpec `json:"aggregation,omitempty" protobuf:"bytes,5,opt,name=aggregation"`
	// Materialization
	Materialization *MaterializationSpec `json:"materialization,omitempty" protobuf:"bytes,6,opt,name=materialization"`
	// Family is the feature family
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Family *string `json:"family,omitempty" protobuf:"bytes,7,opt,name=family"`
	// Reference to the entity name
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	EntityName *string `json:"entityName,omitempty" protobuf:"bytes,8,opt,name=entityName"`
	// Resources is the hardware resource req.
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,9,opt,name=resources"`
	// Schedule for running the pipeline
	// +kubebuilder:validation:Optional
	Schedule catalog.RunSchedule `json:"schedule,omitempty" protobuf:"bytes,10,opt,name=schedule"`
	// ActiveDeadlineSeconds is the deadline setup on jobs for this labeling pipeline.
	// +kubebuilder:default:=600
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	ActiveDeadlineSeconds *int64 `json:"activeDeadlineSeconds,omitempty" protobuf:"varint,11,opt,name=activeDeadlineSeconds"`
	// Set to true to pause the data pipeline
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Paused *bool `json:"paused,omitempty" protobuf:"varint,12,opt,name=paused"`
	// TTL.
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	TTL *int32 `json:"ttl,omitempty" protobuf:"varint,13,opt,name=ttl"`
}

FeaturePipelineSpec contain the desired state of a FeaturePipeline

func (*FeaturePipelineSpec) DeepCopy

func (in *FeaturePipelineSpec) DeepCopy() *FeaturePipelineSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeaturePipelineSpec.

func (*FeaturePipelineSpec) DeepCopyInto

func (in *FeaturePipelineSpec) DeepCopyInto(out *FeaturePipelineSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeaturePipelineSpec) Descriptor

func (*FeaturePipelineSpec) Descriptor() ([]byte, []int)

func (*FeaturePipelineSpec) Marshal

func (m *FeaturePipelineSpec) Marshal() (dAtA []byte, err error)

func (*FeaturePipelineSpec) MarshalTo

func (m *FeaturePipelineSpec) MarshalTo(dAtA []byte) (int, error)

func (*FeaturePipelineSpec) MarshalToSizedBuffer

func (m *FeaturePipelineSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeaturePipelineSpec) ProtoMessage

func (*FeaturePipelineSpec) ProtoMessage()

func (*FeaturePipelineSpec) Reset

func (m *FeaturePipelineSpec) Reset()

func (*FeaturePipelineSpec) Size

func (m *FeaturePipelineSpec) Size() (n int)

func (*FeaturePipelineSpec) String

func (this *FeaturePipelineSpec) String() string

func (*FeaturePipelineSpec) Unmarshal

func (m *FeaturePipelineSpec) Unmarshal(dAtA []byte) error

func (*FeaturePipelineSpec) XXX_DiscardUnknown

func (m *FeaturePipelineSpec) XXX_DiscardUnknown()

func (*FeaturePipelineSpec) XXX_Marshal

func (m *FeaturePipelineSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeaturePipelineSpec) XXX_Merge

func (m *FeaturePipelineSpec) XXX_Merge(src proto.Message)

func (*FeaturePipelineSpec) XXX_Size

func (m *FeaturePipelineSpec) XXX_Size() int

func (*FeaturePipelineSpec) XXX_Unmarshal

func (m *FeaturePipelineSpec) XXX_Unmarshal(b []byte) error

type FeaturePipelineStatus

type FeaturePipelineStatus struct {
	// Last run is the last time a data pipeline run was created
	//+kubebuilder:validation:Optional
	LastRun catalog.LastRunStatus `json:"lastRun,omitempty" protobuf:"bytes,1,opt,name=lastRun"`
	// The time of the next schedule run
	//+kubebuilder:validation:Optional
	NextRun *metav1.Time `json:"nextRun,omitempty" protobuf:"bytes,2,opt,name=nextRun"`
	// Store the avg cost of running this pipeline
	AverageCost float64 `json:"averageCost,omitempty" protobuf:"bytes,3,opt,name=averageCost"`
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,4,opt,name=observedGeneration"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,5,opt,name=lastUpdated"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []FeaturePipelineCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,6,rep,name=conditions"`
}

FeatureStatus defines the observed state of Feature

func (*FeaturePipelineStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeaturePipelineStatus.

func (*FeaturePipelineStatus) DeepCopyInto

func (in *FeaturePipelineStatus) DeepCopyInto(out *FeaturePipelineStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeaturePipelineStatus) Descriptor

func (*FeaturePipelineStatus) Descriptor() ([]byte, []int)

func (*FeaturePipelineStatus) Marshal

func (m *FeaturePipelineStatus) Marshal() (dAtA []byte, err error)

func (*FeaturePipelineStatus) MarshalTo

func (m *FeaturePipelineStatus) MarshalTo(dAtA []byte) (int, error)

func (*FeaturePipelineStatus) MarshalToSizedBuffer

func (m *FeaturePipelineStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeaturePipelineStatus) ProtoMessage

func (*FeaturePipelineStatus) ProtoMessage()

func (*FeaturePipelineStatus) Reset

func (m *FeaturePipelineStatus) Reset()

func (*FeaturePipelineStatus) Size

func (m *FeaturePipelineStatus) Size() (n int)

func (*FeaturePipelineStatus) String

func (this *FeaturePipelineStatus) String() string

func (*FeaturePipelineStatus) Unmarshal

func (m *FeaturePipelineStatus) Unmarshal(dAtA []byte) error

func (*FeaturePipelineStatus) XXX_DiscardUnknown

func (m *FeaturePipelineStatus) XXX_DiscardUnknown()

func (*FeaturePipelineStatus) XXX_Marshal

func (m *FeaturePipelineStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeaturePipelineStatus) XXX_Merge

func (m *FeaturePipelineStatus) XXX_Merge(src proto.Message)

func (*FeaturePipelineStatus) XXX_Size

func (m *FeaturePipelineStatus) XXX_Size() int

func (*FeaturePipelineStatus) XXX_Unmarshal

func (m *FeaturePipelineStatus) XXX_Unmarshal(b []byte) error

type FeatureSpec

type FeatureSpec struct {
	// The feature owner
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:="no-one"
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// The product version for the feature.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"`
	// Comments is a description of the feature
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=512
	Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// Type name of the column key, this column is the key column in the entity.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	KeyColumn *string `json:"keyColumn,omitempty" protobuf:"bytes,5,opt,name=keyColumn"`
	// The name of the time stamp column
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	TimestampColumn *string `json:"timestampColumn,omitempty" protobuf:"bytes,6,opt,name=timestampColumn"`
	// The name of the feature column
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	FeatureColumn *string `json:"featureColumn,omitempty" protobuf:"bytes,7,opt,name=featureColumn"`
	// The name of the entity containing this feature
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	EntityName *string `json:"entityName,omitempty" protobuf:"bytes,8,opt,name=entityName"`
	// The name of the feature pipeline that is producing this feature
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	FeaturePipelineName *string `json:"featurePipelineName,omitempty" protobuf:"bytes,9,opt,name=featurePipelineName"`
}

FeatureSpec contain the desired state of a Feature

func (*FeatureSpec) DeepCopy

func (in *FeatureSpec) DeepCopy() *FeatureSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureSpec.

func (*FeatureSpec) DeepCopyInto

func (in *FeatureSpec) DeepCopyInto(out *FeatureSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureSpec) Descriptor

func (*FeatureSpec) Descriptor() ([]byte, []int)

func (*FeatureSpec) Marshal

func (m *FeatureSpec) Marshal() (dAtA []byte, err error)

func (*FeatureSpec) MarshalTo

func (m *FeatureSpec) MarshalTo(dAtA []byte) (int, error)

func (*FeatureSpec) MarshalToSizedBuffer

func (m *FeatureSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureSpec) ProtoMessage

func (*FeatureSpec) ProtoMessage()

func (*FeatureSpec) Reset

func (m *FeatureSpec) Reset()

func (*FeatureSpec) Size

func (m *FeatureSpec) Size() (n int)

func (*FeatureSpec) String

func (this *FeatureSpec) String() string

func (*FeatureSpec) Unmarshal

func (m *FeatureSpec) Unmarshal(dAtA []byte) error

func (*FeatureSpec) XXX_DiscardUnknown

func (m *FeatureSpec) XXX_DiscardUnknown()

func (*FeatureSpec) XXX_Marshal

func (m *FeatureSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureSpec) XXX_Merge

func (m *FeatureSpec) XXX_Merge(src proto.Message)

func (*FeatureSpec) XXX_Size

func (m *FeatureSpec) XXX_Size() int

func (*FeatureSpec) XXX_Unmarshal

func (m *FeatureSpec) XXX_Unmarshal(b []byte) error

type FeatureStatus

type FeatureStatus struct {
	// Min is the minimum value of the attribute
	Min float64 `json:"min,omitempty" protobuf:"bytes,2,opt,name=min"`
	// Max is the maximum value of the attribute
	Max float64 `json:"max,omitempty" protobuf:"bytes,3,opt,name=max"`
	// Mean is the mean value of the attribute
	Mean float64 `json:"mean,omitempty" protobuf:"bytes,4,opt,name=mean"`
	// StdDev is the standard deviation value of the attribute
	StdDev float64 `json:"stddev,omitempty" protobuf:"bytes,5,opt,name=stddev"`
	// Skewness is the standard deviation value of the attribute
	Skewness float64 `json:"skewness,omitempty" protobuf:"bytes,6,opt,name=skewness"`
	// Kurtosis is the standard deviation value of the attribute
	Kurtosis float64 `json:"kurtosis,omitempty" protobuf:"bytes,7,opt,name=kurtosis"`
	// Zeros is the numbers of zeros in the feature
	Zeros float64 `json:"zeros,omitempty" protobuf:"bytes,8,opt,name=zeros"`
	// Pct25 is the 25 percent point
	P25 float64 `json:"p25,omitempty" protobuf:"bytes,10,opt,name=p25"`
	// Pct50 is the median
	P50 float64 `json:"p50,omitempty" protobuf:"bytes,11,opt,name=p50"`
	// Pct75 is the 75% point
	P75 float64 `json:"p75,omitempty" protobuf:"bytes,12,opt,name=p75"`
	// The number of missing values
	// +kubebuilder:validation:Minimum=0
	Missing int32 `json:"missing,omitempty" protobuf:"varint,14,opt,name=missing"`
	// The number of invalid values
	// +kubebuilder:validation:Minimum=0
	Invalid int32 `json:"invalid,omitempty" protobuf:"varint,15,opt,name=invalid"`
	// Is this the target attribute, the value is derived from the schema
	Target bool `json:"target,omitempty" protobuf:"varint,16,opt,name=target"`
	// The feature importance
	Importance float64 `json:"importance,omitempty" protobuf:"bytes,17,opt,name=importance"`
	//
	Distinct int32 `json:"distinc,omitempty" protobuf:"varint,18,opt,name=distinc"`
	// Should this column be ignored, as specified by the user.
	// This value is derived from the schema
	Ignored bool `json:"ignored,omitempty" protobuf:"varint,19,opt,name=ignored"`
	// Is this column is nullable.
	// This value is derived from the schema.
	Nullable bool `json:"nullable,omitempty" protobuf:"varint,20,opt,name=nullable"`
	// This column has high cardinality and should be ignored.
	// The value is set during the profile process.
	HighCred bool `json:"highCred,omitempty" protobuf:"varint,21,opt,name=highCred"`
	// This column has high corrolation with another feature and should be dropped.
	// The value is set during the profile process.
	HighCorr bool `json:"highCorr,omitempty" protobuf:"varint,22,opt,name=highCorr"`
	// Mark that this column is skewed and would require a power transform
	//If skewness is less than -1 or greater than 1, the distribution is highly skewed.
	//If skewness is between -1 and -0.5 or between 0.5 and 1, the distribution is moderately skewed.
	//If skewness is between -0.5 and 0.5, the distribution is approximately symmetric
	Skew bool `json:"skew,omitempty" protobuf:"varint,23,opt,name=skew"`
	// Completeness is the ratio between non null to null
	Completeness float64 `json:"completeness,omitempty" protobuf:"bytes,24,opt,name=completeness"`
	// The ratio between distinc to total
	DistinctValueCount float64 `json:"distinctValueCount,omitempty" protobuf:"bytes,25,opt,name=distinctValueCount"`
	// The ratio between most freq value to total
	MostFreqValuesRatio float64 `json:"mostFreqValuesRatio,omitempty" protobuf:"bytes,26,opt,name=mostFreqValuesRatio"`
	// Used for text attributes
	IndexOfPeculiarity float64 `json:"indexOfPeculiarity,omitempty" protobuf:"bytes,27,opt,name=indexOfPeculiarity"`
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,28,opt,name=observedGeneration"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,29,opt,name=lastUpdated"`

	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []FeatureCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,30,rep,name=conditions"`
}

FeatureStatus defines the observed state of Feature

func (*FeatureStatus) DeepCopy

func (in *FeatureStatus) DeepCopy() *FeatureStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureStatus.

func (*FeatureStatus) DeepCopyInto

func (in *FeatureStatus) DeepCopyInto(out *FeatureStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeatureStatus) Descriptor

func (*FeatureStatus) Descriptor() ([]byte, []int)

func (*FeatureStatus) Marshal

func (m *FeatureStatus) Marshal() (dAtA []byte, err error)

func (*FeatureStatus) MarshalTo

func (m *FeatureStatus) MarshalTo(dAtA []byte) (int, error)

func (*FeatureStatus) MarshalToSizedBuffer

func (m *FeatureStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeatureStatus) ProtoMessage

func (*FeatureStatus) ProtoMessage()

func (*FeatureStatus) Reset

func (m *FeatureStatus) Reset()

func (*FeatureStatus) Size

func (m *FeatureStatus) Size() (n int)

func (*FeatureStatus) String

func (this *FeatureStatus) String() string

func (*FeatureStatus) Unmarshal

func (m *FeatureStatus) Unmarshal(dAtA []byte) error

func (*FeatureStatus) XXX_DiscardUnknown

func (m *FeatureStatus) XXX_DiscardUnknown()

func (*FeatureStatus) XXX_Marshal

func (m *FeatureStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeatureStatus) XXX_Merge

func (m *FeatureStatus) XXX_Merge(src proto.Message)

func (*FeatureStatus) XXX_Size

func (m *FeatureStatus) XXX_Size() int

func (*FeatureStatus) XXX_Unmarshal

func (m *FeatureStatus) XXX_Unmarshal(b []byte) error

type Featureset

type Featureset struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              FeaturesetSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status FeaturesetStatus `json:"status" protobuf:"bytes,3,opt,name=status"`
}

Featureset represents a featureset object +kubebuilder:object:root=true +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=featuresets,singular=featureset,shortName=fset,categories={data,modela} +kubebuilder:subresource:status

func ParseFeatureSet

func ParseFeatureSet(content string, user string, commit string) (*Featureset, error)

Parse an data

func (*Featureset) AddFinalizer

func (entity *Featureset) AddFinalizer()

func (*Featureset) Age

func (entity *Featureset) Age() string

func (*Featureset) Archived

func (pipeline *Featureset) Archived() bool

func (*Featureset) CreateOrUpdateCond

func (entity *Featureset) CreateOrUpdateCond(cond FeaturesetCondition)

Merge or update condition

func (*Featureset) DeepCopy

func (in *Featureset) DeepCopy() *Featureset

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Featureset.

func (*Featureset) DeepCopyInto

func (in *Featureset) DeepCopyInto(out *Featureset)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Featureset) DeepCopyObject

func (in *Featureset) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Featureset) Default

func (feature *Featureset) Default()

No defaults in this current release

func (*Featureset) Descriptor

func (*Featureset) Descriptor() ([]byte, []int)

func (*Featureset) GetCond

func (*Featureset) GetCondIdx

func (entity *Featureset) GetCondIdx(t FeaturesetConditionType) int

func (*Featureset) HasFinalizer

func (entity *Featureset) HasFinalizer() bool

func (*Featureset) IsGitObj

func (entity *Featureset) IsGitObj() bool

func (*Featureset) IsReady

func (entity *Featureset) IsReady() bool

func (*Featureset) Key

func (entity *Featureset) Key() string

func (*Featureset) LabelWithCommit

func (entity *Featureset) LabelWithCommit(commit string, uname string, branch string)

func (*Featureset) MarkArchived

func (fset *Featureset) MarkArchived()

func (*Featureset) MarkReady

func (fset *Featureset) MarkReady()

func (*Featureset) Marshal

func (m *Featureset) Marshal() (dAtA []byte, err error)

func (*Featureset) MarshalTo

func (m *Featureset) MarshalTo(dAtA []byte) (int, error)

func (*Featureset) MarshalToSizedBuffer

func (m *Featureset) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*Featureset) ProtoMessage

func (*Featureset) ProtoMessage()

func (*Featureset) RemoveFinalizer

func (entity *Featureset) RemoveFinalizer()

func (*Featureset) RepEntry

func (entity *Featureset) RepEntry() (string, error)

func (*Featureset) RepPath

func (entity *Featureset) RepPath(root string) (string, error)

Return the on disk rep location

func (*Featureset) Reset

func (m *Featureset) Reset()

func (*Featureset) SetChanged

func (entity *Featureset) SetChanged()

func (*Featureset) SetupWebhookWithManager

func (in *Featureset) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*Featureset) Size

func (m *Featureset) Size() (n int)

func (*Featureset) String

func (this *Featureset) String() string

func (*Featureset) ToYamlFile

func (entity *Featureset) ToYamlFile() ([]byte, error)

func (*Featureset) Unmarshal

func (m *Featureset) Unmarshal(dAtA []byte) error

func (*Featureset) ValidateCreate

func (feature *Featureset) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*Featureset) ValidateDelete

func (r *Featureset) ValidateDelete() error

func (*Featureset) ValidateUpdate

func (feature *Featureset) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*Featureset) XXX_DiscardUnknown

func (m *Featureset) XXX_DiscardUnknown()

func (*Featureset) XXX_Marshal

func (m *Featureset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Featureset) XXX_Merge

func (m *Featureset) XXX_Merge(src proto.Message)

func (*Featureset) XXX_Size

func (m *Featureset) XXX_Size() int

func (*Featureset) XXX_Unmarshal

func (m *Featureset) XXX_Unmarshal(b []byte) error

type FeaturesetCondition

type FeaturesetCondition struct {
	// Type of account condition.
	Type FeaturesetConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=FeaturesetConditionType"`
	// Status of the condition, one of True, False, Unknown.
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,3,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}

FeaturesetCondition describes the state of a deployment at a certain point.

func (*FeaturesetCondition) DeepCopy

func (in *FeaturesetCondition) DeepCopy() *FeaturesetCondition

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeaturesetCondition.

func (*FeaturesetCondition) DeepCopyInto

func (in *FeaturesetCondition) DeepCopyInto(out *FeaturesetCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeaturesetCondition) Descriptor

func (*FeaturesetCondition) Descriptor() ([]byte, []int)

func (*FeaturesetCondition) Marshal

func (m *FeaturesetCondition) Marshal() (dAtA []byte, err error)

func (*FeaturesetCondition) MarshalTo

func (m *FeaturesetCondition) MarshalTo(dAtA []byte) (int, error)

func (*FeaturesetCondition) MarshalToSizedBuffer

func (m *FeaturesetCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeaturesetCondition) ProtoMessage

func (*FeaturesetCondition) ProtoMessage()

func (*FeaturesetCondition) Reset

func (m *FeaturesetCondition) Reset()

func (*FeaturesetCondition) Size

func (m *FeaturesetCondition) Size() (n int)

func (*FeaturesetCondition) String

func (this *FeaturesetCondition) String() string

func (*FeaturesetCondition) Unmarshal

func (m *FeaturesetCondition) Unmarshal(dAtA []byte) error

func (*FeaturesetCondition) XXX_DiscardUnknown

func (m *FeaturesetCondition) XXX_DiscardUnknown()

func (*FeaturesetCondition) XXX_Marshal

func (m *FeaturesetCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeaturesetCondition) XXX_Merge

func (m *FeaturesetCondition) XXX_Merge(src proto.Message)

func (*FeaturesetCondition) XXX_Size

func (m *FeaturesetCondition) XXX_Size() int

func (*FeaturesetCondition) XXX_Unmarshal

func (m *FeaturesetCondition) XXX_Unmarshal(b []byte) error

type FeaturesetConditionType

type FeaturesetConditionType string

Condition on the featureset

const (
	FeaturesetReady FeaturesetConditionType = "Ready"
	FeaturesetSaved FeaturesetConditionType = "Saved"
)

/ Featureset Condition

type FeaturesetList

type FeaturesetList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Items           []Featureset `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true FeaturesetList contains a list of Featureset

func (*FeaturesetList) DeepCopy

func (in *FeaturesetList) DeepCopy() *FeaturesetList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeaturesetList.

func (*FeaturesetList) DeepCopyInto

func (in *FeaturesetList) DeepCopyInto(out *FeaturesetList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeaturesetList) DeepCopyObject

func (in *FeaturesetList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*FeaturesetList) Descriptor

func (*FeaturesetList) Descriptor() ([]byte, []int)

func (*FeaturesetList) Marshal

func (m *FeaturesetList) Marshal() (dAtA []byte, err error)

func (*FeaturesetList) MarshalTo

func (m *FeaturesetList) MarshalTo(dAtA []byte) (int, error)

func (*FeaturesetList) MarshalToSizedBuffer

func (m *FeaturesetList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeaturesetList) ProtoMessage

func (*FeaturesetList) ProtoMessage()

func (*FeaturesetList) Reset

func (m *FeaturesetList) Reset()

func (*FeaturesetList) Size

func (m *FeaturesetList) Size() (n int)

func (*FeaturesetList) String

func (this *FeaturesetList) String() string

func (*FeaturesetList) Unmarshal

func (m *FeaturesetList) Unmarshal(dAtA []byte) error

func (*FeaturesetList) XXX_DiscardUnknown

func (m *FeaturesetList) XXX_DiscardUnknown()

func (*FeaturesetList) XXX_Marshal

func (m *FeaturesetList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeaturesetList) XXX_Merge

func (m *FeaturesetList) XXX_Merge(src proto.Message)

func (*FeaturesetList) XXX_Size

func (m *FeaturesetList) XXX_Size() int

func (*FeaturesetList) XXX_Unmarshal

func (m *FeaturesetList) XXX_Unmarshal(b []byte) error

type FeaturesetSpec

type FeaturesetSpec struct {
	// The product version of the featureset
	// +kubebuilder:default:=""
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,1,opt,name=versionName"`
	// User provided description
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=512
	Description *string `json:"description,omitempty" protobuf:"bytes,2,opt,name=description"`
	// Reference to the feature names of this featureset
	Features []string `json:"features,omitempty" protobuf:"bytes,3,rep,name=features"`
	// The owner account name
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,4,opt,name=owner"`
}

FeaturesetSpec contain the desired state of a Featureset.

func (*FeaturesetSpec) DeepCopy

func (in *FeaturesetSpec) DeepCopy() *FeaturesetSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeaturesetSpec.

func (*FeaturesetSpec) DeepCopyInto

func (in *FeaturesetSpec) DeepCopyInto(out *FeaturesetSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeaturesetSpec) Descriptor

func (*FeaturesetSpec) Descriptor() ([]byte, []int)

func (*FeaturesetSpec) Marshal

func (m *FeaturesetSpec) Marshal() (dAtA []byte, err error)

func (*FeaturesetSpec) MarshalTo

func (m *FeaturesetSpec) MarshalTo(dAtA []byte) (int, error)

func (*FeaturesetSpec) MarshalToSizedBuffer

func (m *FeaturesetSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeaturesetSpec) ProtoMessage

func (*FeaturesetSpec) ProtoMessage()

func (*FeaturesetSpec) Reset

func (m *FeaturesetSpec) Reset()

func (*FeaturesetSpec) Size

func (m *FeaturesetSpec) Size() (n int)

func (*FeaturesetSpec) String

func (this *FeaturesetSpec) String() string

func (*FeaturesetSpec) Unmarshal

func (m *FeaturesetSpec) Unmarshal(dAtA []byte) error

func (*FeaturesetSpec) XXX_DiscardUnknown

func (m *FeaturesetSpec) XXX_DiscardUnknown()

func (*FeaturesetSpec) XXX_Marshal

func (m *FeaturesetSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeaturesetSpec) XXX_Merge

func (m *FeaturesetSpec) XXX_Merge(src proto.Message)

func (*FeaturesetSpec) XXX_Size

func (m *FeaturesetSpec) XXX_Size() int

func (*FeaturesetSpec) XXX_Unmarshal

func (m *FeaturesetSpec) XXX_Unmarshal(b []byte) error

type FeaturesetStatus

type FeaturesetStatus struct {
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,2,opt,name=lastUpdated"`

	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []FeaturesetCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,3,rep,name=conditions"`
}

FeaturesetStatus defines the observed state of Featureset

func (*FeaturesetStatus) DeepCopy

func (in *FeaturesetStatus) DeepCopy() *FeaturesetStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeaturesetStatus.

func (*FeaturesetStatus) DeepCopyInto

func (in *FeaturesetStatus) DeepCopyInto(out *FeaturesetStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FeaturesetStatus) Descriptor

func (*FeaturesetStatus) Descriptor() ([]byte, []int)

func (*FeaturesetStatus) Marshal

func (m *FeaturesetStatus) Marshal() (dAtA []byte, err error)

func (*FeaturesetStatus) MarshalTo

func (m *FeaturesetStatus) MarshalTo(dAtA []byte) (int, error)

func (*FeaturesetStatus) MarshalToSizedBuffer

func (m *FeaturesetStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FeaturesetStatus) ProtoMessage

func (*FeaturesetStatus) ProtoMessage()

func (*FeaturesetStatus) Reset

func (m *FeaturesetStatus) Reset()

func (*FeaturesetStatus) Size

func (m *FeaturesetStatus) Size() (n int)

func (*FeaturesetStatus) String

func (this *FeaturesetStatus) String() string

func (*FeaturesetStatus) Unmarshal

func (m *FeaturesetStatus) Unmarshal(dAtA []byte) error

func (*FeaturesetStatus) XXX_DiscardUnknown

func (m *FeaturesetStatus) XXX_DiscardUnknown()

func (*FeaturesetStatus) XXX_Marshal

func (m *FeaturesetStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FeaturesetStatus) XXX_Merge

func (m *FeaturesetStatus) XXX_Merge(src proto.Message)

func (*FeaturesetStatus) XXX_Size

func (m *FeaturesetStatus) XXX_Size() int

func (*FeaturesetStatus) XXX_Unmarshal

func (m *FeaturesetStatus) XXX_Unmarshal(b []byte) error

type FlatFileFormatSpec added in v0.4.861

type FlatFileFormatSpec struct {
	// The file type of incoming data which uses the DataSource (by default, a CSV file)
	// +kubebuilder:default:="csv"
	// +kubebuilder:validation:Optional
	FileType *FlatFileType `json:"fileType,omitempty" protobuf:"bytes,1,opt,name=fileType"`
	// The file format for CSV files, if applicable
	// +kubebuilder:validation:Optional
	Csv CsvFileSpec `json:"csv,omitempty" protobuf:"bytes,2,opt,name=csv"`
	// The file format for Excel files, if applicable
	// +kubebuilder:validation:Optional
	Excel ExcelNotebookSpec `json:"excel,omitempty" protobuf:"bytes,3,opt,name=excel"`
	// The file format for Parquet files, if applicable
	// +kubebuilder:validation:Optional
	Parquet ParquetFileSpec `json:"parquet,omitempty" protobuf:"bytes,4,opt,name=parquet"`
}

FlatFileFormatSpec defines the format for incoming flat-files to be parsed

func (*FlatFileFormatSpec) DeepCopy added in v0.4.861

func (in *FlatFileFormatSpec) DeepCopy() *FlatFileFormatSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FlatFileFormatSpec.

func (*FlatFileFormatSpec) DeepCopyInto added in v0.4.861

func (in *FlatFileFormatSpec) DeepCopyInto(out *FlatFileFormatSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*FlatFileFormatSpec) Descriptor added in v0.4.861

func (*FlatFileFormatSpec) Descriptor() ([]byte, []int)

func (*FlatFileFormatSpec) Marshal added in v0.4.861

func (m *FlatFileFormatSpec) Marshal() (dAtA []byte, err error)

func (*FlatFileFormatSpec) MarshalTo added in v0.4.861

func (m *FlatFileFormatSpec) MarshalTo(dAtA []byte) (int, error)

func (*FlatFileFormatSpec) MarshalToSizedBuffer added in v0.4.861

func (m *FlatFileFormatSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*FlatFileFormatSpec) ProtoMessage added in v0.4.861

func (*FlatFileFormatSpec) ProtoMessage()

func (*FlatFileFormatSpec) Reset added in v0.4.861

func (m *FlatFileFormatSpec) Reset()

func (*FlatFileFormatSpec) Size added in v0.4.861

func (m *FlatFileFormatSpec) Size() (n int)

func (*FlatFileFormatSpec) String added in v0.4.861

func (this *FlatFileFormatSpec) String() string

func (*FlatFileFormatSpec) Unmarshal added in v0.4.861

func (m *FlatFileFormatSpec) Unmarshal(dAtA []byte) error

func (*FlatFileFormatSpec) XXX_DiscardUnknown added in v0.4.861

func (m *FlatFileFormatSpec) XXX_DiscardUnknown()

func (*FlatFileFormatSpec) XXX_Marshal added in v0.4.861

func (m *FlatFileFormatSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*FlatFileFormatSpec) XXX_Merge added in v0.4.861

func (m *FlatFileFormatSpec) XXX_Merge(src proto.Message)

func (*FlatFileFormatSpec) XXX_Size added in v0.4.861

func (m *FlatFileFormatSpec) XXX_Size() int

func (*FlatFileFormatSpec) XXX_Unmarshal added in v0.4.861

func (m *FlatFileFormatSpec) XXX_Unmarshal(b []byte) error

type FlatFileType

type FlatFileType string

FlatFileType represent the type of the file +kubebuilder:validation:Enum="csv";"tsv";"excel";"fwf";"hdf";"html";"json";"pickle";"sas";"stata";"feather";"parquet";

const (
	FlatFileTypeCsv     FlatFileType = "csv"
	FlatFileTypeTable   FlatFileType = "tsv"
	FlatFileTypeExcel   FlatFileType = "excel"
	FlatFileTypeFwf     FlatFileType = "fwf"
	FlatFileTypeHdf5    FlatFileType = "hdf"
	FlatFileTypeHtml    FlatFileType = "html"
	FlatFileTypeJson    FlatFileType = "json"
	FlatFileTypePickle  FlatFileType = "pickle"
	FlatFileTypeSas     FlatFileType = "sas"
	FlatFileTypeStata   FlatFileType = "stata"
	FlatFileTypeFeather FlatFileType = "feather"
	FlatFileTypeParquet FlatFileType = "parquet"
)

type GaugeSpec

type GaugeSpec struct {
	// Dataset is the name of the dataset
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// Column is the name of the column
	// +kubebuilder:validation:Optional
	Column *string `json:"column,omitempty" protobuf:"bytes,2,opt,name=column"`
	// Row is the row number
	// +kubebuilder:validation:Optional
	Row *int32 `json:"row,omitempty" protobuf:"varint,3,opt,name=row"`
	// Scalar the a const value
	// +kubebuilder:validation:Optional
	Scalar *string `json:"scalar,omitempty" protobuf:"bytes,4,opt,name=scalar"`
}

func (*GaugeSpec) DeepCopy

func (in *GaugeSpec) DeepCopy() *GaugeSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GaugeSpec.

func (*GaugeSpec) DeepCopyInto

func (in *GaugeSpec) DeepCopyInto(out *GaugeSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*GaugeSpec) Descriptor

func (*GaugeSpec) Descriptor() ([]byte, []int)

func (*GaugeSpec) Marshal

func (m *GaugeSpec) Marshal() (dAtA []byte, err error)

func (*GaugeSpec) MarshalTo

func (m *GaugeSpec) MarshalTo(dAtA []byte) (int, error)

func (*GaugeSpec) MarshalToSizedBuffer

func (m *GaugeSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*GaugeSpec) ProtoMessage

func (*GaugeSpec) ProtoMessage()

func (*GaugeSpec) Reset

func (m *GaugeSpec) Reset()

func (*GaugeSpec) Size

func (m *GaugeSpec) Size() (n int)

func (*GaugeSpec) String

func (this *GaugeSpec) String() string

func (*GaugeSpec) Unmarshal

func (m *GaugeSpec) Unmarshal(dAtA []byte) error

func (*GaugeSpec) XXX_DiscardUnknown

func (m *GaugeSpec) XXX_DiscardUnknown()

func (*GaugeSpec) XXX_Marshal

func (m *GaugeSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*GaugeSpec) XXX_Merge

func (m *GaugeSpec) XXX_Merge(src proto.Message)

func (*GaugeSpec) XXX_Size

func (m *GaugeSpec) XXX_Size() int

func (*GaugeSpec) XXX_Unmarshal

func (m *GaugeSpec) XXX_Unmarshal(b []byte) error

type GitLocation

type GitLocation struct {
	// The Git Connection resource which exists in the same tenant as the parent DataProduct
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	GitConnectionName *string `json:"gitConnectionName,omitempty" protobuf:"bytes,1,opt,name=gitConnectionName"`
	// The URL to the destination Git repository
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=256
	URL *string `json:"url,omitempty" protobuf:"bytes,2,opt,name=url"`
	// The branch inside the Git repository
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=256
	Branch *string `json:"branch,omitempty" protobuf:"bytes,3,opt,name=branch"`
	// Indicates if the repository is private
	// +kubebuilder:default:=true
	Private *bool `json:"private,omitempty" protobuf:"varint,4,opt,name=private"`
}

GitLocation specifies the Git location where Modela will track resources as YAML

func (*GitLocation) DeepCopy

func (in *GitLocation) DeepCopy() *GitLocation

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GitLocation.

func (*GitLocation) DeepCopyInto

func (in *GitLocation) DeepCopyInto(out *GitLocation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*GitLocation) Descriptor

func (*GitLocation) Descriptor() ([]byte, []int)

func (*GitLocation) Marshal

func (m *GitLocation) Marshal() (dAtA []byte, err error)

func (*GitLocation) MarshalTo

func (m *GitLocation) MarshalTo(dAtA []byte) (int, error)

func (*GitLocation) MarshalToSizedBuffer

func (m *GitLocation) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*GitLocation) ProtoMessage

func (*GitLocation) ProtoMessage()

func (*GitLocation) Reset

func (m *GitLocation) Reset()

func (*GitLocation) Size

func (m *GitLocation) Size() (n int)

func (*GitLocation) String

func (this *GitLocation) String() string

func (*GitLocation) Unmarshal

func (m *GitLocation) Unmarshal(dAtA []byte) error

func (*GitLocation) XXX_DiscardUnknown

func (m *GitLocation) XXX_DiscardUnknown()

func (*GitLocation) XXX_Marshal

func (m *GitLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*GitLocation) XXX_Merge

func (m *GitLocation) XXX_Merge(src proto.Message)

func (*GitLocation) XXX_Size

func (m *GitLocation) XXX_Size() int

func (*GitLocation) XXX_Unmarshal

func (m *GitLocation) XXX_Unmarshal(b []byte) error

type GovernanceReviewStatus

type GovernanceReviewStatus struct {
	// The approval status, which can be approved or rejected
	Result ApprovalType `json:"result,omitempty" protobuf:"bytes,1,opt,name=result"`
	// The date of the approval
	ApprovalDate *metav1.Time `json:"approvalDate,omitempty" protobuf:"bytes,2,opt,name=approvalDate"`
	// Notes taken during the review
	Notes string `json:"notes,omitempty" protobuf:"bytes,3,opt,name=notes"`
}

func (*GovernanceReviewStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GovernanceReviewStatus.

func (*GovernanceReviewStatus) DeepCopyInto

func (in *GovernanceReviewStatus) DeepCopyInto(out *GovernanceReviewStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*GovernanceReviewStatus) Descriptor

func (*GovernanceReviewStatus) Descriptor() ([]byte, []int)

func (*GovernanceReviewStatus) Marshal

func (m *GovernanceReviewStatus) Marshal() (dAtA []byte, err error)

func (*GovernanceReviewStatus) MarshalTo

func (m *GovernanceReviewStatus) MarshalTo(dAtA []byte) (int, error)

func (*GovernanceReviewStatus) MarshalToSizedBuffer

func (m *GovernanceReviewStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*GovernanceReviewStatus) ProtoMessage

func (*GovernanceReviewStatus) ProtoMessage()

func (*GovernanceReviewStatus) Reset

func (m *GovernanceReviewStatus) Reset()

func (*GovernanceReviewStatus) Size

func (m *GovernanceReviewStatus) Size() (n int)

func (*GovernanceReviewStatus) String

func (this *GovernanceReviewStatus) String() string

func (*GovernanceReviewStatus) Unmarshal

func (m *GovernanceReviewStatus) Unmarshal(dAtA []byte) error

func (*GovernanceReviewStatus) XXX_DiscardUnknown

func (m *GovernanceReviewStatus) XXX_DiscardUnknown()

func (*GovernanceReviewStatus) XXX_Marshal

func (m *GovernanceReviewStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*GovernanceReviewStatus) XXX_Merge

func (m *GovernanceReviewStatus) XXX_Merge(src proto.Message)

func (*GovernanceReviewStatus) XXX_Size

func (m *GovernanceReviewStatus) XXX_Size() int

func (*GovernanceReviewStatus) XXX_Unmarshal

func (m *GovernanceReviewStatus) XXX_Unmarshal(b []byte) error

type GovernanceSpec

type GovernanceSpec struct {
	// Indicates if governance is enabled
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Enabled *bool `json:"enabled,omitempty" protobuf:"varint,1,opt,name=enabled"`
	// The country whose regulations are under consideration
	// +kubebuilder:validation:Optional
	Country *string `json:"country,omitempty" protobuf:"bytes,2,opt,name=country"`
	// The account name of the IT reviewer
	// +kubebuilder:validation:Optional
	ITReviewer *string `json:"itReviewer,omitempty" protobuf:"bytes,3,opt,name=itReviewer"`
	// The account name of the compliance reviewer
	// +kubebuilder:validation:Optional
	ComplianceReviewer *string `json:"complianceReviewer,omitempty" protobuf:"bytes,4,opt,name=complianceReviewer"`
	// The account name of the business reviewer
	// +kubebuilder:validation:Optional
	BusinessReviewer *string `json:"businessReviewer,omitempty" protobuf:"bytes,5,opt,name=businessReviewer"`
}

GovernanceSpec describes the governance requirements for models produced under a DataProduct

func (*GovernanceSpec) DeepCopy

func (in *GovernanceSpec) DeepCopy() *GovernanceSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GovernanceSpec.

func (*GovernanceSpec) DeepCopyInto

func (in *GovernanceSpec) DeepCopyInto(out *GovernanceSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*GovernanceSpec) Descriptor

func (*GovernanceSpec) Descriptor() ([]byte, []int)

func (*GovernanceSpec) Marshal

func (m *GovernanceSpec) Marshal() (dAtA []byte, err error)

func (*GovernanceSpec) MarshalTo

func (m *GovernanceSpec) MarshalTo(dAtA []byte) (int, error)

func (*GovernanceSpec) MarshalToSizedBuffer

func (m *GovernanceSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*GovernanceSpec) ProtoMessage

func (*GovernanceSpec) ProtoMessage()

func (*GovernanceSpec) Reset

func (m *GovernanceSpec) Reset()

func (*GovernanceSpec) Size

func (m *GovernanceSpec) Size() (n int)

func (*GovernanceSpec) String

func (this *GovernanceSpec) String() string

func (*GovernanceSpec) Unmarshal

func (m *GovernanceSpec) Unmarshal(dAtA []byte) error

func (*GovernanceSpec) XXX_DiscardUnknown

func (m *GovernanceSpec) XXX_DiscardUnknown()

func (*GovernanceSpec) XXX_Marshal

func (m *GovernanceSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*GovernanceSpec) XXX_Merge

func (m *GovernanceSpec) XXX_Merge(src proto.Message)

func (*GovernanceSpec) XXX_Size

func (m *GovernanceSpec) XXX_Size() int

func (*GovernanceSpec) XXX_Unmarshal

func (m *GovernanceSpec) XXX_Unmarshal(b []byte) error

type GovernanceStatus

type GovernanceStatus struct {
	// The review status for IT department
	// +kubebuilder:validation:Optional
	ITReviewStatus GovernanceReviewStatus `json:"ITReviewStatus,omitempty" protobuf:"bytes,1,opt,name=ITReviewStatus"`
	// The review status for the compliance department
	// +kubebuilder:validation:Optional
	ComplianceReviewStatus GovernanceReviewStatus `json:"complianceReviewStatus,omitempty" protobuf:"bytes,2,opt,name=complianceReviewStatus"`
	// The review status for the management department
	// +kubebuilder:validation:Optional
	BusinessReviewStatus GovernanceReviewStatus `json:"businessReviewStatus,omitempty" protobuf:"bytes,3,opt,name=businessReviewStatus"`
}

GovernanceStatus describes the current state of a governance review for a model

func (*GovernanceStatus) DeepCopy

func (in *GovernanceStatus) DeepCopy() *GovernanceStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GovernanceStatus.

func (*GovernanceStatus) DeepCopyInto

func (in *GovernanceStatus) DeepCopyInto(out *GovernanceStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*GovernanceStatus) Descriptor

func (*GovernanceStatus) Descriptor() ([]byte, []int)

func (*GovernanceStatus) Marshal

func (m *GovernanceStatus) Marshal() (dAtA []byte, err error)

func (*GovernanceStatus) MarshalTo

func (m *GovernanceStatus) MarshalTo(dAtA []byte) (int, error)

func (*GovernanceStatus) MarshalToSizedBuffer

func (m *GovernanceStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*GovernanceStatus) ProtoMessage

func (*GovernanceStatus) ProtoMessage()

func (*GovernanceStatus) Reset

func (m *GovernanceStatus) Reset()

func (*GovernanceStatus) Size

func (m *GovernanceStatus) Size() (n int)

func (*GovernanceStatus) String

func (this *GovernanceStatus) String() string

func (*GovernanceStatus) Unmarshal

func (m *GovernanceStatus) Unmarshal(dAtA []byte) error

func (*GovernanceStatus) XXX_DiscardUnknown

func (m *GovernanceStatus) XXX_DiscardUnknown()

func (*GovernanceStatus) XXX_Marshal

func (m *GovernanceStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*GovernanceStatus) XXX_Merge

func (m *GovernanceStatus) XXX_Merge(src proto.Message)

func (*GovernanceStatus) XXX_Size

func (m *GovernanceStatus) XXX_Size() int

func (*GovernanceStatus) XXX_Unmarshal

func (m *GovernanceStatus) XXX_Unmarshal(b []byte) error

type HistogramSpec

type HistogramSpec struct {
	// Dataset is the name of the dataset
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// name of the X column
	// +kubebuilder:validation:Optional
	X *string `json:"x,omitempty" protobuf:"bytes,2,opt,name=x"`
	// Show borther
	// +kubebuilder:validation:Optional
	Bins *int32 `json:"bins,omitempty" protobuf:"varint,3,opt,name=bins"`
}

func (*HistogramSpec) DeepCopy

func (in *HistogramSpec) DeepCopy() *HistogramSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HistogramSpec.

func (*HistogramSpec) DeepCopyInto

func (in *HistogramSpec) DeepCopyInto(out *HistogramSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*HistogramSpec) Descriptor

func (*HistogramSpec) Descriptor() ([]byte, []int)

func (*HistogramSpec) Marshal

func (m *HistogramSpec) Marshal() (dAtA []byte, err error)

func (*HistogramSpec) MarshalTo

func (m *HistogramSpec) MarshalTo(dAtA []byte) (int, error)

func (*HistogramSpec) MarshalToSizedBuffer

func (m *HistogramSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*HistogramSpec) ProtoMessage

func (*HistogramSpec) ProtoMessage()

func (*HistogramSpec) Reset

func (m *HistogramSpec) Reset()

func (*HistogramSpec) Size

func (m *HistogramSpec) Size() (n int)

func (*HistogramSpec) String

func (this *HistogramSpec) String() string

func (*HistogramSpec) Unmarshal

func (m *HistogramSpec) Unmarshal(dAtA []byte) error

func (*HistogramSpec) XXX_DiscardUnknown

func (m *HistogramSpec) XXX_DiscardUnknown()

func (*HistogramSpec) XXX_Marshal

func (m *HistogramSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*HistogramSpec) XXX_Merge

func (m *HistogramSpec) XXX_Merge(src proto.Message)

func (*HistogramSpec) XXX_Size

func (m *HistogramSpec) XXX_Size() int

func (*HistogramSpec) XXX_Unmarshal

func (m *HistogramSpec) XXX_Unmarshal(b []byte) error

type ImageLocation

type ImageLocation struct {
	// The canonical name of the image repository. If not set, it will default to docker/{dataproduct_name}
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=256
	Name *string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
	// The image repository Connection resource which exists in the same tenant as the parent DataProduct. If the field
	// is not set, Modela will ignore the image location and not push images
	// +kubebuilder:default:=""
	RegistryConnectionName *string `json:"registryConnectionName,omitempty" protobuf:"bytes,2,opt,name=registryConnectionName"`
}

ImageLocation specifies the destination for all model images produced under a DataProduct

func (*ImageLocation) DeepCopy

func (in *ImageLocation) DeepCopy() *ImageLocation

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ImageLocation.

func (*ImageLocation) DeepCopyInto

func (in *ImageLocation) DeepCopyInto(out *ImageLocation)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ImageLocation) Descriptor

func (*ImageLocation) Descriptor() ([]byte, []int)

func (*ImageLocation) Marshal

func (m *ImageLocation) Marshal() (dAtA []byte, err error)

func (*ImageLocation) MarshalTo

func (m *ImageLocation) MarshalTo(dAtA []byte) (int, error)

func (*ImageLocation) MarshalToSizedBuffer

func (m *ImageLocation) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ImageLocation) ProtoMessage

func (*ImageLocation) ProtoMessage()

func (*ImageLocation) Reset

func (m *ImageLocation) Reset()

func (*ImageLocation) Size

func (m *ImageLocation) Size() (n int)

func (*ImageLocation) String

func (this *ImageLocation) String() string

func (*ImageLocation) Unmarshal

func (m *ImageLocation) Unmarshal(dAtA []byte) error

func (*ImageLocation) XXX_DiscardUnknown

func (m *ImageLocation) XXX_DiscardUnknown()

func (*ImageLocation) XXX_Marshal

func (m *ImageLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ImageLocation) XXX_Merge

func (m *ImageLocation) XXX_Merge(src proto.Message)

func (*ImageLocation) XXX_Size

func (m *ImageLocation) XXX_Size() int

func (*ImageLocation) XXX_Unmarshal

func (m *ImageLocation) XXX_Unmarshal(b []byte) error

type KPI

type KPI struct {
	// The name of the KPI
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Name *string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
	// The value of the KPI
	Value *float64 `json:"value,omitempty" protobuf:"bytes,2,opt,name=value"`
}

KPI specifies a key performance indicator for a DataProduct. Currently not implemented.

func (*KPI) DeepCopy

func (in *KPI) DeepCopy() *KPI

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KPI.

func (*KPI) DeepCopyInto

func (in *KPI) DeepCopyInto(out *KPI)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*KPI) Descriptor

func (*KPI) Descriptor() ([]byte, []int)

func (*KPI) Marshal

func (m *KPI) Marshal() (dAtA []byte, err error)

func (*KPI) MarshalTo

func (m *KPI) MarshalTo(dAtA []byte) (int, error)

func (*KPI) MarshalToSizedBuffer

func (m *KPI) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*KPI) ProtoMessage

func (*KPI) ProtoMessage()

func (*KPI) Reset

func (m *KPI) Reset()

func (*KPI) Size

func (m *KPI) Size() (n int)

func (*KPI) String

func (this *KPI) String() string

func (*KPI) Unmarshal

func (m *KPI) Unmarshal(dAtA []byte) error

func (*KPI) XXX_DiscardUnknown

func (m *KPI) XXX_DiscardUnknown()

func (*KPI) XXX_Marshal

func (m *KPI) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*KPI) XXX_Merge

func (m *KPI) XXX_Merge(src proto.Message)

func (*KPI) XXX_Size

func (m *KPI) XXX_Size() int

func (*KPI) XXX_Unmarshal

func (m *KPI) XXX_Unmarshal(b []byte) error

type LabelingPipeline

type LabelingPipeline struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              LabelingPipelineSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status LabelingPipelineStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

LabelingPipeline represents the ETL flow from the data sources to a processed dataset, ready for training. +kubebuilder:object:root=true +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:subresource:status +kubebuilder:resource:path=labelingpipelines,singular=labelingpipeline,shortName=lp,categories={data,modela}

func ParseLabelPipeline

func ParseLabelPipeline(content string, user string, commit string) (*LabelingPipeline, error)

Parse an data

func (*LabelingPipeline) AddConfiditions

func (lp *LabelingPipeline) AddConfiditions()

func (*LabelingPipeline) AddFinalizer

func (lp *LabelingPipeline) AddFinalizer()

func (*LabelingPipeline) Age

func (lp *LabelingPipeline) Age() string

func (*LabelingPipeline) Archived

func (lp *LabelingPipeline) Archived() bool

func (*LabelingPipeline) CreateOrUpdateCond

func (lp *LabelingPipeline) CreateOrUpdateCond(cond LabelingPipelineCondition)

Merge or update condition Merge or update condition

func (*LabelingPipeline) DeepCopy

func (in *LabelingPipeline) DeepCopy() *LabelingPipeline

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingPipeline.

func (*LabelingPipeline) DeepCopyInto

func (in *LabelingPipeline) DeepCopyInto(out *LabelingPipeline)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LabelingPipeline) DeepCopyObject

func (in *LabelingPipeline) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*LabelingPipeline) Default

func (feature *LabelingPipeline) Default()

No defaults in this current release

func (*LabelingPipeline) Descriptor

func (*LabelingPipeline) Descriptor() ([]byte, []int)

func (*LabelingPipeline) GetCond

func (*LabelingPipeline) GetCondIdx

func (*LabelingPipeline) HasFinalizer

func (lp *LabelingPipeline) HasFinalizer() bool

func (*LabelingPipeline) IsGitObj

func (lp *LabelingPipeline) IsGitObj() bool

func (*LabelingPipeline) IsReady

func (lp *LabelingPipeline) IsReady() bool

func (*LabelingPipeline) Key

func (lp *LabelingPipeline) Key() string

func (*LabelingPipeline) LabelWithCommit

func (lp *LabelingPipeline) LabelWithCommit(commit string, uname string, branch string)

func (*LabelingPipeline) MarkArchived

func (lp *LabelingPipeline) MarkArchived()

func (*LabelingPipeline) MarkReady

func (lp *LabelingPipeline) MarkReady()

func (*LabelingPipeline) Marshal

func (m *LabelingPipeline) Marshal() (dAtA []byte, err error)

func (*LabelingPipeline) MarshalTo

func (m *LabelingPipeline) MarshalTo(dAtA []byte) (int, error)

func (*LabelingPipeline) MarshalToSizedBuffer

func (m *LabelingPipeline) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LabelingPipeline) ProtoMessage

func (*LabelingPipeline) ProtoMessage()

func (*LabelingPipeline) RemoveFinalizer

func (lp *LabelingPipeline) RemoveFinalizer()

func (*LabelingPipeline) RepEntry

func (lp *LabelingPipeline) RepEntry() (string, error)

func (*LabelingPipeline) RepPath

func (lp *LabelingPipeline) RepPath(root string) (string, error)

Return the on disk rep location

func (*LabelingPipeline) Reset

func (m *LabelingPipeline) Reset()

func (*LabelingPipeline) SetChanged

func (lp *LabelingPipeline) SetChanged()

func (*LabelingPipeline) SetupWebhookWithManager

func (in *LabelingPipeline) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*LabelingPipeline) Size

func (m *LabelingPipeline) Size() (n int)

func (*LabelingPipeline) String

func (this *LabelingPipeline) String() string

func (*LabelingPipeline) ToYamlFile

func (lp *LabelingPipeline) ToYamlFile() ([]byte, error)

func (*LabelingPipeline) Unmarshal

func (m *LabelingPipeline) Unmarshal(dAtA []byte) error

func (*LabelingPipeline) UpdateRunStatus added in v0.4.612

func (in *LabelingPipeline) UpdateRunStatus(run LabelingPipelineRun)

func (*LabelingPipeline) ValidateCreate

func (feature *LabelingPipeline) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*LabelingPipeline) ValidateDelete

func (r *LabelingPipeline) ValidateDelete() error

func (*LabelingPipeline) ValidateUpdate

func (feature *LabelingPipeline) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*LabelingPipeline) XXX_DiscardUnknown

func (m *LabelingPipeline) XXX_DiscardUnknown()

func (*LabelingPipeline) XXX_Marshal

func (m *LabelingPipeline) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LabelingPipeline) XXX_Merge

func (m *LabelingPipeline) XXX_Merge(src proto.Message)

func (*LabelingPipeline) XXX_Size

func (m *LabelingPipeline) XXX_Size() int

func (*LabelingPipeline) XXX_Unmarshal

func (m *LabelingPipeline) XXX_Unmarshal(b []byte) error

type LabelingPipelineCondition

type LabelingPipelineCondition struct {
	// Type of account condition.
	Type LabelingPipelineConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=LabelingPipelineConditionType"`
	// Status of the condition, one of True, False, Unknown.
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,3,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}

LabelingPipelineCondition describes the state of a wrangler at a certain point.

func (*LabelingPipelineCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingPipelineCondition.

func (*LabelingPipelineCondition) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LabelingPipelineCondition) Descriptor

func (*LabelingPipelineCondition) Descriptor() ([]byte, []int)

func (*LabelingPipelineCondition) Marshal

func (m *LabelingPipelineCondition) Marshal() (dAtA []byte, err error)

func (*LabelingPipelineCondition) MarshalTo

func (m *LabelingPipelineCondition) MarshalTo(dAtA []byte) (int, error)

func (*LabelingPipelineCondition) MarshalToSizedBuffer

func (m *LabelingPipelineCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LabelingPipelineCondition) ProtoMessage

func (*LabelingPipelineCondition) ProtoMessage()

func (*LabelingPipelineCondition) Reset

func (m *LabelingPipelineCondition) Reset()

func (*LabelingPipelineCondition) Size

func (m *LabelingPipelineCondition) Size() (n int)

func (*LabelingPipelineCondition) String

func (this *LabelingPipelineCondition) String() string

func (*LabelingPipelineCondition) Unmarshal

func (m *LabelingPipelineCondition) Unmarshal(dAtA []byte) error

func (*LabelingPipelineCondition) XXX_DiscardUnknown

func (m *LabelingPipelineCondition) XXX_DiscardUnknown()

func (*LabelingPipelineCondition) XXX_Marshal

func (m *LabelingPipelineCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LabelingPipelineCondition) XXX_Merge

func (m *LabelingPipelineCondition) XXX_Merge(src proto.Message)

func (*LabelingPipelineCondition) XXX_Size

func (m *LabelingPipelineCondition) XXX_Size() int

func (*LabelingPipelineCondition) XXX_Unmarshal

func (m *LabelingPipelineCondition) XXX_Unmarshal(b []byte) error

type LabelingPipelineConditionType

type LabelingPipelineConditionType string

Condition on the dataset

const (
	LabelPipelineReady LabelingPipelineConditionType = "Ready"
	LabelPipelineSaved LabelingPipelineConditionType = "Saved"
)

/ LabelingPipeline Condition

type LabelingPipelineList

type LabelingPipelineList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []LabelingPipeline `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true LabelingPipelineList contain the list of LabelingPipeline

func (*LabelingPipelineList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingPipelineList.

func (*LabelingPipelineList) DeepCopyInto

func (in *LabelingPipelineList) DeepCopyInto(out *LabelingPipelineList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LabelingPipelineList) DeepCopyObject

func (in *LabelingPipelineList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*LabelingPipelineList) Descriptor

func (*LabelingPipelineList) Descriptor() ([]byte, []int)

func (*LabelingPipelineList) Marshal

func (m *LabelingPipelineList) Marshal() (dAtA []byte, err error)

func (*LabelingPipelineList) MarshalTo

func (m *LabelingPipelineList) MarshalTo(dAtA []byte) (int, error)

func (*LabelingPipelineList) MarshalToSizedBuffer

func (m *LabelingPipelineList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LabelingPipelineList) ProtoMessage

func (*LabelingPipelineList) ProtoMessage()

func (*LabelingPipelineList) Reset

func (m *LabelingPipelineList) Reset()

func (*LabelingPipelineList) Size

func (m *LabelingPipelineList) Size() (n int)

func (*LabelingPipelineList) String

func (this *LabelingPipelineList) String() string

func (*LabelingPipelineList) Unmarshal

func (m *LabelingPipelineList) Unmarshal(dAtA []byte) error

func (*LabelingPipelineList) XXX_DiscardUnknown

func (m *LabelingPipelineList) XXX_DiscardUnknown()

func (*LabelingPipelineList) XXX_Marshal

func (m *LabelingPipelineList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LabelingPipelineList) XXX_Merge

func (m *LabelingPipelineList) XXX_Merge(src proto.Message)

func (*LabelingPipelineList) XXX_Size

func (m *LabelingPipelineList) XXX_Size() int

func (*LabelingPipelineList) XXX_Unmarshal

func (m *LabelingPipelineList) XXX_Unmarshal(b []byte) error

type LabelingPipelineRun

type LabelingPipelineRun struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              LabelingPipelineRunSpec   `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	Status            LabelingPipelineRunStatus `json:"status" protobuf:"bytes,3,opt,name=status"`
}

LabelingPipeline represent a feature set object in the feature store. +kubebuilder:object:root=true +kubebuilder:printcolumn:name="Status",type="string",JSONPath=".status.phase" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Pipeline",type="string",JSONPath=".spec.labelPipelineName" +kubebuilder:printcolumn:name="StartTime",type="date",JSONPath=".status.startTime",priority=1 +kubebuilder:printcolumn:name="CompletionTime",type="date",JSONPath=".status.completionTime",priority=1 +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=labelingpipelineruns,singular=labelingpipelinerun,shortName=lpr,categories={data,modela} +kubebuilder:subresource:status

func ParseLabelPipelineRun

func ParseLabelPipelineRun(content string, user string, commit string) (*LabelingPipelineRun, error)

Parse an data

func ParseLabelPipelineRunYaml

func ParseLabelPipelineRunYaml(content []byte) (*LabelingPipelineRun, error)

func (*LabelingPipelineRun) AddConfiditions

func (lpr *LabelingPipelineRun) AddConfiditions()

func (*LabelingPipelineRun) AddFinalizer

func (lpr *LabelingPipelineRun) AddFinalizer()

func (*LabelingPipelineRun) Age

func (lpr *LabelingPipelineRun) Age() string

func (*LabelingPipelineRun) CreateOrUpdateCond

func (lpr *LabelingPipelineRun) CreateOrUpdateCond(cond LabelingPipelineRunCondition)

Merge or update condition Merge or update condition

func (*LabelingPipelineRun) DeepCopy

func (in *LabelingPipelineRun) DeepCopy() *LabelingPipelineRun

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingPipelineRun.

func (*LabelingPipelineRun) DeepCopyInto

func (in *LabelingPipelineRun) DeepCopyInto(out *LabelingPipelineRun)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LabelingPipelineRun) DeepCopyObject

func (in *LabelingPipelineRun) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*LabelingPipelineRun) Default

func (feature *LabelingPipelineRun) Default()

No defaults in this current release

func (*LabelingPipelineRun) Descriptor

func (*LabelingPipelineRun) Descriptor() ([]byte, []int)

func (*LabelingPipelineRun) GetCondIdx

func (*LabelingPipelineRun) HasFinalizer

func (lpr *LabelingPipelineRun) HasFinalizer() bool

func (*LabelingPipelineRun) IsFailed added in v0.4.614

func (in *LabelingPipelineRun) IsFailed() bool

func (*LabelingPipelineRun) IsGitObj

func (lpr *LabelingPipelineRun) IsGitObj() bool

func (*LabelingPipelineRun) IsReady

func (lpr *LabelingPipelineRun) IsReady() bool

func (*LabelingPipelineRun) Key

func (lpr *LabelingPipelineRun) Key() string

func (*LabelingPipelineRun) LabelWithCommit

func (lpr *LabelingPipelineRun) LabelWithCommit(commit string, uname string, branch string)

func (*LabelingPipelineRun) MarkComplete

func (run *LabelingPipelineRun) MarkComplete()

func (*LabelingPipelineRun) MarkFailed

func (run *LabelingPipelineRun) MarkFailed(err error)

func (*LabelingPipelineRun) MarkRunning

func (run *LabelingPipelineRun) MarkRunning()

func (*LabelingPipelineRun) Marshal

func (m *LabelingPipelineRun) Marshal() (dAtA []byte, err error)

func (*LabelingPipelineRun) MarshalTo

func (m *LabelingPipelineRun) MarshalTo(dAtA []byte) (int, error)

func (*LabelingPipelineRun) MarshalToSizedBuffer

func (m *LabelingPipelineRun) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LabelingPipelineRun) ProtoMessage

func (*LabelingPipelineRun) ProtoMessage()

func (*LabelingPipelineRun) RemoveFinalizer

func (lpr *LabelingPipelineRun) RemoveFinalizer()

func (*LabelingPipelineRun) Reset

func (m *LabelingPipelineRun) Reset()

func (*LabelingPipelineRun) RunStatus added in v0.4.614

func (run *LabelingPipelineRun) RunStatus() *catalog.LastRunStatus

Return the state of the run as RunStatus

func (*LabelingPipelineRun) SetChanged

func (lpr *LabelingPipelineRun) SetChanged()

func (*LabelingPipelineRun) SetupWebhookWithManager

func (in *LabelingPipelineRun) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*LabelingPipelineRun) Size

func (m *LabelingPipelineRun) Size() (n int)

func (*LabelingPipelineRun) String

func (this *LabelingPipelineRun) String() string

func (*LabelingPipelineRun) ToYamlFile

func (lpr *LabelingPipelineRun) ToYamlFile() ([]byte, error)

func (*LabelingPipelineRun) Unmarshal

func (m *LabelingPipelineRun) Unmarshal(dAtA []byte) error

func (*LabelingPipelineRun) ValidateCreate

func (feature *LabelingPipelineRun) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*LabelingPipelineRun) ValidateDelete

func (r *LabelingPipelineRun) ValidateDelete() error

func (*LabelingPipelineRun) ValidateUpdate

func (feature *LabelingPipelineRun) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*LabelingPipelineRun) XXX_DiscardUnknown

func (m *LabelingPipelineRun) XXX_DiscardUnknown()

func (*LabelingPipelineRun) XXX_Marshal

func (m *LabelingPipelineRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LabelingPipelineRun) XXX_Merge

func (m *LabelingPipelineRun) XXX_Merge(src proto.Message)

func (*LabelingPipelineRun) XXX_Size

func (m *LabelingPipelineRun) XXX_Size() int

func (*LabelingPipelineRun) XXX_Unmarshal

func (m *LabelingPipelineRun) XXX_Unmarshal(b []byte) error

type LabelingPipelineRunCondition

type LabelingPipelineRunCondition struct {
	// Type of account condition.
	Type LabelingPipelineRunConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=LabelConditionType"`
	// Status of the condition, one of True, False, Unknown.
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,3,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}

LabelingPipelineRunCondition describes the state of a deployment at a certain point.

func (*LabelingPipelineRunCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingPipelineRunCondition.

func (*LabelingPipelineRunCondition) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LabelingPipelineRunCondition) Descriptor

func (*LabelingPipelineRunCondition) Descriptor() ([]byte, []int)

func (*LabelingPipelineRunCondition) Marshal

func (m *LabelingPipelineRunCondition) Marshal() (dAtA []byte, err error)

func (*LabelingPipelineRunCondition) MarshalTo

func (m *LabelingPipelineRunCondition) MarshalTo(dAtA []byte) (int, error)

func (*LabelingPipelineRunCondition) MarshalToSizedBuffer

func (m *LabelingPipelineRunCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LabelingPipelineRunCondition) ProtoMessage

func (*LabelingPipelineRunCondition) ProtoMessage()

func (*LabelingPipelineRunCondition) Reset

func (m *LabelingPipelineRunCondition) Reset()

func (*LabelingPipelineRunCondition) Size

func (m *LabelingPipelineRunCondition) Size() (n int)

func (*LabelingPipelineRunCondition) String

func (this *LabelingPipelineRunCondition) String() string

func (*LabelingPipelineRunCondition) Unmarshal

func (m *LabelingPipelineRunCondition) Unmarshal(dAtA []byte) error

func (*LabelingPipelineRunCondition) XXX_DiscardUnknown

func (m *LabelingPipelineRunCondition) XXX_DiscardUnknown()

func (*LabelingPipelineRunCondition) XXX_Marshal

func (m *LabelingPipelineRunCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LabelingPipelineRunCondition) XXX_Merge

func (m *LabelingPipelineRunCondition) XXX_Merge(src proto.Message)

func (*LabelingPipelineRunCondition) XXX_Size

func (m *LabelingPipelineRunCondition) XXX_Size() int

func (*LabelingPipelineRunCondition) XXX_Unmarshal

func (m *LabelingPipelineRunCondition) XXX_Unmarshal(b []byte) error

type LabelingPipelineRunConditionType

type LabelingPipelineRunConditionType string

LabelingPipelineRunConditionType condition on the entity

const (
	LabelingPipelineRunCompleted LabelingPipelineRunConditionType = "Completed"
	LabelingPipelineRunSaved     LabelingPipelineRunConditionType = "Saved"
)

/ Label Condition

type LabelingPipelineRunList

type LabelingPipelineRunList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Items           []LabelingPipelineRun `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true

func (*LabelingPipelineRunList) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingPipelineRunList.

func (*LabelingPipelineRunList) DeepCopyInto

func (in *LabelingPipelineRunList) DeepCopyInto(out *LabelingPipelineRunList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LabelingPipelineRunList) DeepCopyObject

func (in *LabelingPipelineRunList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*LabelingPipelineRunList) Descriptor

func (*LabelingPipelineRunList) Descriptor() ([]byte, []int)

func (*LabelingPipelineRunList) Marshal

func (m *LabelingPipelineRunList) Marshal() (dAtA []byte, err error)

func (*LabelingPipelineRunList) MarshalTo

func (m *LabelingPipelineRunList) MarshalTo(dAtA []byte) (int, error)

func (*LabelingPipelineRunList) MarshalToSizedBuffer

func (m *LabelingPipelineRunList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LabelingPipelineRunList) ProtoMessage

func (*LabelingPipelineRunList) ProtoMessage()

func (*LabelingPipelineRunList) Reset

func (m *LabelingPipelineRunList) Reset()

func (*LabelingPipelineRunList) Size

func (m *LabelingPipelineRunList) Size() (n int)

func (*LabelingPipelineRunList) String

func (this *LabelingPipelineRunList) String() string

func (*LabelingPipelineRunList) Unmarshal

func (m *LabelingPipelineRunList) Unmarshal(dAtA []byte) error

func (*LabelingPipelineRunList) XXX_DiscardUnknown

func (m *LabelingPipelineRunList) XXX_DiscardUnknown()

func (*LabelingPipelineRunList) XXX_Marshal

func (m *LabelingPipelineRunList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LabelingPipelineRunList) XXX_Merge

func (m *LabelingPipelineRunList) XXX_Merge(src proto.Message)

func (*LabelingPipelineRunList) XXX_Size

func (m *LabelingPipelineRunList) XXX_Size() int

func (*LabelingPipelineRunList) XXX_Unmarshal

func (m *LabelingPipelineRunList) XXX_Unmarshal(b []byte) error

type LabelingPipelineRunPhase

type LabelingPipelineRunPhase string
const (
	LabelingPipelineRunPhasePending   LabelingPipelineRunPhase = "Pending"
	LabelingPipelineRunPhaseRunning   LabelingPipelineRunPhase = "Running"
	LabelingPipelineRunPhaseCompleted LabelingPipelineRunPhase = "Completed"
	LabelingPipelineRunPhaseFailed    LabelingPipelineRunPhase = "Failed"
)

type LabelingPipelineRunSpec

type LabelingPipelineRunSpec struct {
	// The owner of the LabelingPipelineRunSpec
	// +kubebuilder:default:="no-one"
	Owner *string `json:"owner" protobuf:"bytes,1,opt,name=owner"`
	// The product version for the pipeline
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName" protobuf:"bytes,2,opt,name=versionName"`
	// LabelPipelineName specifies the name of LabelingPipeline
	// +kubebuilder:validation:MaxLength=64
	// +kubebuilder:default:=""
	LabelPipelineName *string `json:"labelPipelineName" protobuf:"bytes,3,opt,name=labelPipelineName"`
	// Resources is the hardware resource req to run the labeling pipeline
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,4,opt,name=resources"`
	// TTL.
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	TTL *int32 `json:"ttl,omitempty" protobuf:"varint,5,opt,name=ttl"`
}

LabelSpec contain the desired state of a Label

func (*LabelingPipelineRunSpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingPipelineRunSpec.

func (*LabelingPipelineRunSpec) DeepCopyInto

func (in *LabelingPipelineRunSpec) DeepCopyInto(out *LabelingPipelineRunSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LabelingPipelineRunSpec) Descriptor

func (*LabelingPipelineRunSpec) Descriptor() ([]byte, []int)

func (*LabelingPipelineRunSpec) Marshal

func (m *LabelingPipelineRunSpec) Marshal() (dAtA []byte, err error)

func (*LabelingPipelineRunSpec) MarshalTo

func (m *LabelingPipelineRunSpec) MarshalTo(dAtA []byte) (int, error)

func (*LabelingPipelineRunSpec) MarshalToSizedBuffer

func (m *LabelingPipelineRunSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LabelingPipelineRunSpec) ProtoMessage

func (*LabelingPipelineRunSpec) ProtoMessage()

func (*LabelingPipelineRunSpec) Reset

func (m *LabelingPipelineRunSpec) Reset()

func (*LabelingPipelineRunSpec) Size

func (m *LabelingPipelineRunSpec) Size() (n int)

func (*LabelingPipelineRunSpec) String

func (this *LabelingPipelineRunSpec) String() string

func (*LabelingPipelineRunSpec) Unmarshal

func (m *LabelingPipelineRunSpec) Unmarshal(dAtA []byte) error

func (*LabelingPipelineRunSpec) XXX_DiscardUnknown

func (m *LabelingPipelineRunSpec) XXX_DiscardUnknown()

func (*LabelingPipelineRunSpec) XXX_Marshal

func (m *LabelingPipelineRunSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LabelingPipelineRunSpec) XXX_Merge

func (m *LabelingPipelineRunSpec) XXX_Merge(src proto.Message)

func (*LabelingPipelineRunSpec) XXX_Size

func (m *LabelingPipelineRunSpec) XXX_Size() int

func (*LabelingPipelineRunSpec) XXX_Unmarshal

func (m *LabelingPipelineRunSpec) XXX_Unmarshal(b []byte) error

type LabelingPipelineRunStatus

type LabelingPipelineRunStatus struct {
	// The phase of the labeling pipeline run
	// +kubebuilder:default:="Pending"
	Phase LabelingPipelineRunPhase `json:"phase" protobuf:"bytes,1,opt,name=phase"`
	// The Start time of the run
	StartTime *metav1.Time `json:"startTime" protobuf:"bytes,2,opt,name=startTime"`
	// The End time of the run
	EndTime *metav1.Time `json:"endTime" protobuf:"bytes,3,opt,name=endTime"`
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,4,opt,name=observedGeneration"`
	// What triggered the run
	//+kubebuilder:validation:Optional
	TriggeredBy catalog.TriggerType `json:"triggeredBy,omitempty" protobuf:"bytes,5,opt,name=triggeredBy"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,6,opt,name=lastUpdated"`
	// Update in case of terminal failure
	// Borrowed from cluster api controller
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,7,opt,name=failureReason"`
	// Update in case of terminal failure message
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,8,opt,name=failureMessage"`

	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []LabelingPipelineRunCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,9,rep,name=conditions"`
}

LabelPipelineRunStatus defines the observed state of Label

func (*LabelingPipelineRunStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingPipelineRunStatus.

func (*LabelingPipelineRunStatus) DeepCopyInto

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LabelingPipelineRunStatus) Descriptor

func (*LabelingPipelineRunStatus) Descriptor() ([]byte, []int)

func (*LabelingPipelineRunStatus) Marshal

func (m *LabelingPipelineRunStatus) Marshal() (dAtA []byte, err error)

func (*LabelingPipelineRunStatus) MarshalTo

func (m *LabelingPipelineRunStatus) MarshalTo(dAtA []byte) (int, error)

func (*LabelingPipelineRunStatus) MarshalToSizedBuffer

func (m *LabelingPipelineRunStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LabelingPipelineRunStatus) ProtoMessage

func (*LabelingPipelineRunStatus) ProtoMessage()

func (*LabelingPipelineRunStatus) Reset

func (m *LabelingPipelineRunStatus) Reset()

func (*LabelingPipelineRunStatus) Size

func (m *LabelingPipelineRunStatus) Size() (n int)

func (*LabelingPipelineRunStatus) String

func (this *LabelingPipelineRunStatus) String() string

func (*LabelingPipelineRunStatus) Unmarshal

func (m *LabelingPipelineRunStatus) Unmarshal(dAtA []byte) error

func (*LabelingPipelineRunStatus) XXX_DiscardUnknown

func (m *LabelingPipelineRunStatus) XXX_DiscardUnknown()

func (*LabelingPipelineRunStatus) XXX_Marshal

func (m *LabelingPipelineRunStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LabelingPipelineRunStatus) XXX_Merge

func (m *LabelingPipelineRunStatus) XXX_Merge(src proto.Message)

func (*LabelingPipelineRunStatus) XXX_Size

func (m *LabelingPipelineRunStatus) XXX_Size() int

func (*LabelingPipelineRunStatus) XXX_Unmarshal

func (m *LabelingPipelineRunStatus) XXX_Unmarshal(b []byte) error

type LabelingPipelineSpec

type LabelingPipelineSpec struct {
	// The product of the rejoiner
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,1,opt,name=versionName"`
	// User provided description
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Description *string `json:"description,omitempty" protobuf:"bytes,2,opt,name=description"`
	// DatasetSelector is used to select unlabeled dataset for labeling
	// +kubebuilder:validation:Optional
	DatasetSelector map[string]string `json:"datasetSelector,omitempty" protobuf:"bytes,3,opt,name=datasetSelector"`
	// The recipe for this pipeline.
	// +kubebuilder:validation:Optional
	RecipeNames []string `json:"recipeNames,omitempty" protobuf:"bytes,4,rep,name=recipeNames"`
	// The output file of the pipeline
	// +kubebuilder:validation:Optional
	OutputLabelsetName *string `json:"outputLabelset,omitempty" protobuf:"bytes,5,opt,name=outputLabelset"`
	// Schedule for running the pipeline
	// +kubebuilder:validation:Optional
	Schedule catalog.RunSchedule `json:"schedule,omitempty" protobuf:"bytes,6,opt,name=schedule"`
	// The owner account name
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,7,opt,name=owner"`
	// Resources is the hardware resource req to run the labeling pipeline
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,8,opt,name=resources"`
	// ActiveDeadlineSeconds is the deadline setup on jobs for this labeling pipeline.
	// +kubebuilder:default:=600
	// +kubebuilder:validation:Optional
	ActiveDeadlineSeconds *int64 `json:"activeDeadlineSeconds,omitempty" protobuf:"varint,9,opt,name=activeDeadlineSeconds"`
	// Paused request the labeling pipeline to suspend
	// +kubebuilder:default:=false
	Paused *bool `json:"paused,omitempty" protobuf:"varint,10,opt,name=paused"`
	// TTL.
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	TTL *int32 `json:"ttl,omitempty" protobuf:"varint,11,opt,name=ttl"`
}

LabelingPipelineSpec defines the desired state of a LabelingPipeline

func (*LabelingPipelineSpec) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingPipelineSpec.

func (*LabelingPipelineSpec) DeepCopyInto

func (in *LabelingPipelineSpec) DeepCopyInto(out *LabelingPipelineSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LabelingPipelineSpec) Descriptor

func (*LabelingPipelineSpec) Descriptor() ([]byte, []int)

func (*LabelingPipelineSpec) Marshal

func (m *LabelingPipelineSpec) Marshal() (dAtA []byte, err error)

func (*LabelingPipelineSpec) MarshalTo

func (m *LabelingPipelineSpec) MarshalTo(dAtA []byte) (int, error)

func (*LabelingPipelineSpec) MarshalToSizedBuffer

func (m *LabelingPipelineSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LabelingPipelineSpec) ProtoMessage

func (*LabelingPipelineSpec) ProtoMessage()

func (*LabelingPipelineSpec) Reset

func (m *LabelingPipelineSpec) Reset()

func (*LabelingPipelineSpec) Size

func (m *LabelingPipelineSpec) Size() (n int)

func (*LabelingPipelineSpec) String

func (this *LabelingPipelineSpec) String() string

func (*LabelingPipelineSpec) Unmarshal

func (m *LabelingPipelineSpec) Unmarshal(dAtA []byte) error

func (*LabelingPipelineSpec) XXX_DiscardUnknown

func (m *LabelingPipelineSpec) XXX_DiscardUnknown()

func (*LabelingPipelineSpec) XXX_Marshal

func (m *LabelingPipelineSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LabelingPipelineSpec) XXX_Merge

func (m *LabelingPipelineSpec) XXX_Merge(src proto.Message)

func (*LabelingPipelineSpec) XXX_Size

func (m *LabelingPipelineSpec) XXX_Size() int

func (*LabelingPipelineSpec) XXX_Unmarshal

func (m *LabelingPipelineSpec) XXX_Unmarshal(b []byte) error

type LabelingPipelineStatus

type LabelingPipelineStatus struct {
	// Last run is the last time a data pipeline run was created
	//+kubebuilder:validation:Optional
	LastRun catalog.LastRunStatus `json:"lastRun,omitempty" protobuf:"bytes,1,opt,name=lastRun"`
	// The time of the next schedule run
	//+kubebuilder:validation:Optional
	NextRun *metav1.Time `json:"nextRun,omitempty" protobuf:"bytes,2,opt,name=nextRun"`
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,3,opt,name=observedGeneration"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,4,opt,name=lastUpdated"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []LabelingPipelineCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,5,rep,name=conditions"`
}

LabelingPipelineStatus is the observed state of the LabelingPipeline object.

func (*LabelingPipelineStatus) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingPipelineStatus.

func (*LabelingPipelineStatus) DeepCopyInto

func (in *LabelingPipelineStatus) DeepCopyInto(out *LabelingPipelineStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LabelingPipelineStatus) Descriptor

func (*LabelingPipelineStatus) Descriptor() ([]byte, []int)

func (*LabelingPipelineStatus) Marshal

func (m *LabelingPipelineStatus) Marshal() (dAtA []byte, err error)

func (*LabelingPipelineStatus) MarshalTo

func (m *LabelingPipelineStatus) MarshalTo(dAtA []byte) (int, error)

func (*LabelingPipelineStatus) MarshalToSizedBuffer

func (m *LabelingPipelineStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LabelingPipelineStatus) ProtoMessage

func (*LabelingPipelineStatus) ProtoMessage()

func (*LabelingPipelineStatus) Reset

func (m *LabelingPipelineStatus) Reset()

func (*LabelingPipelineStatus) Size

func (m *LabelingPipelineStatus) Size() (n int)

func (*LabelingPipelineStatus) String

func (this *LabelingPipelineStatus) String() string

func (*LabelingPipelineStatus) Unmarshal

func (m *LabelingPipelineStatus) Unmarshal(dAtA []byte) error

func (*LabelingPipelineStatus) XXX_DiscardUnknown

func (m *LabelingPipelineStatus) XXX_DiscardUnknown()

func (*LabelingPipelineStatus) XXX_Marshal

func (m *LabelingPipelineStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LabelingPipelineStatus) XXX_Merge

func (m *LabelingPipelineStatus) XXX_Merge(src proto.Message)

func (*LabelingPipelineStatus) XXX_Size

func (m *LabelingPipelineStatus) XXX_Size() int

func (*LabelingPipelineStatus) XXX_Unmarshal

func (m *LabelingPipelineStatus) XXX_Unmarshal(b []byte) error

type LabelingRule added in v0.4.821

type LabelingRule struct {
	Column   string     `json:"column,omitempty" protobuf:"bytes,1,opt,name=column"`
	Operator catalog.Op `json:"operator,omitempty" protobuf:"bytes,2,opt,name=operator"`
	Value    string     `json:"value,omitempty" protobuf:"bytes,3,opt,name=value"`
}

func (*LabelingRule) DeepCopy added in v0.4.821

func (in *LabelingRule) DeepCopy() *LabelingRule

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingRule.

func (*LabelingRule) DeepCopyInto added in v0.4.821

func (in *LabelingRule) DeepCopyInto(out *LabelingRule)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LabelingRule) Descriptor added in v0.4.821

func (*LabelingRule) Descriptor() ([]byte, []int)

func (*LabelingRule) Marshal added in v0.4.821

func (m *LabelingRule) Marshal() (dAtA []byte, err error)

func (*LabelingRule) MarshalTo added in v0.4.821

func (m *LabelingRule) MarshalTo(dAtA []byte) (int, error)

func (*LabelingRule) MarshalToSizedBuffer added in v0.4.821

func (m *LabelingRule) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LabelingRule) ProtoMessage added in v0.4.821

func (*LabelingRule) ProtoMessage()

func (*LabelingRule) Reset added in v0.4.821

func (m *LabelingRule) Reset()

func (*LabelingRule) Size added in v0.4.821

func (m *LabelingRule) Size() (n int)

func (*LabelingRule) String added in v0.4.821

func (this *LabelingRule) String() string

func (*LabelingRule) Unmarshal added in v0.4.821

func (m *LabelingRule) Unmarshal(dAtA []byte) error

func (*LabelingRule) XXX_DiscardUnknown added in v0.4.821

func (m *LabelingRule) XXX_DiscardUnknown()

func (*LabelingRule) XXX_Marshal added in v0.4.821

func (m *LabelingRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LabelingRule) XXX_Merge added in v0.4.821

func (m *LabelingRule) XXX_Merge(src proto.Message)

func (*LabelingRule) XXX_Size added in v0.4.821

func (m *LabelingRule) XXX_Size() int

func (*LabelingRule) XXX_Unmarshal added in v0.4.821

func (m *LabelingRule) XXX_Unmarshal(b []byte) error

type LabelingSpec added in v0.4.821

type LabelingSpec struct {
	// If true enable labeling.
	Enabled *bool `json:"enabled,omitempty" protobuf:"bytes,1,opt,name=enabled"`
	// The name of the column that will hold the result.
	ResultColumn string `json:"resultColumn,omitempty" protobuf:"bytes,2,opt,name=resultColumn"`
	// List of rules for positive rules.
	Positive []LabelingRule `json:"positive,omitempty" protobuf:"bytes,3,rep,name=positive"`
	// List of negative rules
	Negative []LabelingRule `json:"negative,omitempty" protobuf:"bytes,4,rep,name=negative"`
}

func (*LabelingSpec) DeepCopy added in v0.4.821

func (in *LabelingSpec) DeepCopy() *LabelingSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LabelingSpec.

func (*LabelingSpec) DeepCopyInto added in v0.4.821

func (in *LabelingSpec) DeepCopyInto(out *LabelingSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LabelingSpec) Descriptor added in v0.4.821

func (*LabelingSpec) Descriptor() ([]byte, []int)

func (*LabelingSpec) Marshal added in v0.4.821

func (m *LabelingSpec) Marshal() (dAtA []byte, err error)

func (*LabelingSpec) MarshalTo added in v0.4.821

func (m *LabelingSpec) MarshalTo(dAtA []byte) (int, error)

func (*LabelingSpec) MarshalToSizedBuffer added in v0.4.821

func (m *LabelingSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LabelingSpec) ProtoMessage added in v0.4.821

func (*LabelingSpec) ProtoMessage()

func (*LabelingSpec) Reset added in v0.4.821

func (m *LabelingSpec) Reset()

func (*LabelingSpec) Size added in v0.4.821

func (m *LabelingSpec) Size() (n int)

func (*LabelingSpec) String added in v0.4.821

func (this *LabelingSpec) String() string

func (*LabelingSpec) Unmarshal added in v0.4.821

func (m *LabelingSpec) Unmarshal(dAtA []byte) error

func (*LabelingSpec) XXX_DiscardUnknown added in v0.4.821

func (m *LabelingSpec) XXX_DiscardUnknown()

func (*LabelingSpec) XXX_Marshal added in v0.4.821

func (m *LabelingSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LabelingSpec) XXX_Merge added in v0.4.821

func (m *LabelingSpec) XXX_Merge(src proto.Message)

func (*LabelingSpec) XXX_Size added in v0.4.821

func (m *LabelingSpec) XXX_Size() int

func (*LabelingSpec) XXX_Unmarshal added in v0.4.821

func (m *LabelingSpec) XXX_Unmarshal(b []byte) error

type LineChartSpec

type LineChartSpec struct {
	// Dataset is the name of the dataset
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// name of the X column
	// +kubebuilder:validation:Optional
	X *string `json:"x,omitempty" protobuf:"bytes,2,opt,name=x"`
	// Y column
	// +kubebuilder:validation:Optional
	Y *string `json:"y,omitempty" protobuf:"bytes,3,opt,name=y"`
	// Show borther
	// +kubebuilder:validation:Optional
	Legend *bool `json:"legend,omitempty" protobuf:"varint,4,opt,name=legend"`
}

func (*LineChartSpec) DeepCopy

func (in *LineChartSpec) DeepCopy() *LineChartSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LineChartSpec.

func (*LineChartSpec) DeepCopyInto

func (in *LineChartSpec) DeepCopyInto(out *LineChartSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*LineChartSpec) Descriptor

func (*LineChartSpec) Descriptor() ([]byte, []int)

func (*LineChartSpec) Marshal

func (m *LineChartSpec) Marshal() (dAtA []byte, err error)

func (*LineChartSpec) MarshalTo

func (m *LineChartSpec) MarshalTo(dAtA []byte) (int, error)

func (*LineChartSpec) MarshalToSizedBuffer

func (m *LineChartSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*LineChartSpec) ProtoMessage

func (*LineChartSpec) ProtoMessage()

func (*LineChartSpec) Reset

func (m *LineChartSpec) Reset()

func (*LineChartSpec) Size

func (m *LineChartSpec) Size() (n int)

func (*LineChartSpec) String

func (this *LineChartSpec) String() string

func (*LineChartSpec) Unmarshal

func (m *LineChartSpec) Unmarshal(dAtA []byte) error

func (*LineChartSpec) XXX_DiscardUnknown

func (m *LineChartSpec) XXX_DiscardUnknown()

func (*LineChartSpec) XXX_Marshal

func (m *LineChartSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*LineChartSpec) XXX_Merge

func (m *LineChartSpec) XXX_Merge(src proto.Message)

func (*LineChartSpec) XXX_Size

func (m *LineChartSpec) XXX_Size() int

func (*LineChartSpec) XXX_Unmarshal

func (m *LineChartSpec) XXX_Unmarshal(b []byte) error

type MaterializationSpec

type MaterializationSpec struct {
	// If true, update the online store
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Online *bool `json:"online,omitempty" protobuf:"varint,1,opt,name=online"`
	// If true update the offline store.
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Offline *bool `json:"offline,omitempty" protobuf:"varint,2,opt,name=offline"`
	// +kubebuilder:validation:Optional
	StartDate *metav1.Time `json:"startDate,omitempty" protobuf:"bytes,3,opt,name=startDate"`
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:=""
	ScheduleInterval *string `json:"scheduleInterval,omitempty" protobuf:"bytes,4,opt,name=scheduleInterval"`
	// +kubebuilder:validation:Optional
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:default:=0
	TTL *int32 `json:"ttl,omitempty" protobuf:"varint,5,opt,name=ttl"`
	// Number of days to store information from the past in the feature store.
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:=21
	// +kubebuilder:validation:Minimum=0
	Backfill *int32 `json:"backfill,omitempty" protobuf:"varint,6,opt,name=backfill"`
}

func (*MaterializationSpec) DeepCopy

func (in *MaterializationSpec) DeepCopy() *MaterializationSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaterializationSpec.

func (*MaterializationSpec) DeepCopyInto

func (in *MaterializationSpec) DeepCopyInto(out *MaterializationSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*MaterializationSpec) Descriptor

func (*MaterializationSpec) Descriptor() ([]byte, []int)

func (*MaterializationSpec) Marshal

func (m *MaterializationSpec) Marshal() (dAtA []byte, err error)

func (*MaterializationSpec) MarshalTo

func (m *MaterializationSpec) MarshalTo(dAtA []byte) (int, error)

func (*MaterializationSpec) MarshalToSizedBuffer

func (m *MaterializationSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*MaterializationSpec) ProtoMessage

func (*MaterializationSpec) ProtoMessage()

func (*MaterializationSpec) Reset

func (m *MaterializationSpec) Reset()

func (*MaterializationSpec) Size

func (m *MaterializationSpec) Size() (n int)

func (*MaterializationSpec) String

func (this *MaterializationSpec) String() string

func (*MaterializationSpec) Unmarshal

func (m *MaterializationSpec) Unmarshal(dAtA []byte) error

func (*MaterializationSpec) XXX_DiscardUnknown

func (m *MaterializationSpec) XXX_DiscardUnknown()

func (*MaterializationSpec) XXX_Marshal

func (m *MaterializationSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*MaterializationSpec) XXX_Merge

func (m *MaterializationSpec) XXX_Merge(src proto.Message)

func (*MaterializationSpec) XXX_Size

func (m *MaterializationSpec) XXX_Size() int

func (*MaterializationSpec) XXX_Unmarshal

func (m *MaterializationSpec) XXX_Unmarshal(b []byte) error

type MetricSpec

type MetricSpec struct {
	// Dataset is the name of the dataset
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// Column is the name of the column
	// +kubebuilder:validation:Optional
	Column *string `json:"column,omitempty" protobuf:"bytes,2,opt,name=column"`
	// Row is the row number
	// +kubebuilder:validation:Optional
	Row *int32 `json:"row,omitempty" protobuf:"varint,3,opt,name=row"`
	// Scalar the a const value
	// +kubebuilder:validation:Optional
	Scalar *string `json:"scalar,omitempty" protobuf:"bytes,4,opt,name=scalar"`
}

func (*MetricSpec) DeepCopy

func (in *MetricSpec) DeepCopy() *MetricSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetricSpec.

func (*MetricSpec) DeepCopyInto

func (in *MetricSpec) DeepCopyInto(out *MetricSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*MetricSpec) Descriptor

func (*MetricSpec) Descriptor() ([]byte, []int)

func (*MetricSpec) Marshal

func (m *MetricSpec) Marshal() (dAtA []byte, err error)

func (*MetricSpec) MarshalTo

func (m *MetricSpec) MarshalTo(dAtA []byte) (int, error)

func (*MetricSpec) MarshalToSizedBuffer

func (m *MetricSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*MetricSpec) ProtoMessage

func (*MetricSpec) ProtoMessage()

func (*MetricSpec) Reset

func (m *MetricSpec) Reset()

func (*MetricSpec) Size

func (m *MetricSpec) Size() (n int)

func (*MetricSpec) String

func (this *MetricSpec) String() string

func (*MetricSpec) Unmarshal

func (m *MetricSpec) Unmarshal(dAtA []byte) error

func (*MetricSpec) XXX_DiscardUnknown

func (m *MetricSpec) XXX_DiscardUnknown()

func (*MetricSpec) XXX_Marshal

func (m *MetricSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*MetricSpec) XXX_Merge

func (m *MetricSpec) XXX_Merge(src proto.Message)

func (*MetricSpec) XXX_Size

func (m *MetricSpec) XXX_Size() int

func (*MetricSpec) XXX_Unmarshal

func (m *MetricSpec) XXX_Unmarshal(b []byte) error

type OutlierStat added in v0.4.858

type OutlierStat struct {
	// The number of outliers below baseline
	Lower int32 `json:"lower,omitempty" protobuf:"varint,1,opt,name=lower"`
	// The number of outliers above baseline
	Upper int32 `json:"upper,omitempty" protobuf:"varint,2,opt,name=upper"`
	// Percentage of rows detected as outliers
	Percent float32 `json:"percent,omitempty" protobuf:"bytes,3,opt,name=percent"`
}

func (*OutlierStat) DeepCopy added in v0.4.858

func (in *OutlierStat) DeepCopy() *OutlierStat

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new OutlierStat.

func (*OutlierStat) DeepCopyInto added in v0.4.858

func (in *OutlierStat) DeepCopyInto(out *OutlierStat)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*OutlierStat) Descriptor added in v0.4.858

func (*OutlierStat) Descriptor() ([]byte, []int)

func (*OutlierStat) Marshal added in v0.4.858

func (m *OutlierStat) Marshal() (dAtA []byte, err error)

func (*OutlierStat) MarshalTo added in v0.4.858

func (m *OutlierStat) MarshalTo(dAtA []byte) (int, error)

func (*OutlierStat) MarshalToSizedBuffer added in v0.4.858

func (m *OutlierStat) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*OutlierStat) ProtoMessage added in v0.4.858

func (*OutlierStat) ProtoMessage()

func (*OutlierStat) Reset added in v0.4.858

func (m *OutlierStat) Reset()

func (*OutlierStat) Size added in v0.4.858

func (m *OutlierStat) Size() (n int)

func (*OutlierStat) String added in v0.4.858

func (this *OutlierStat) String() string

func (*OutlierStat) Unmarshal added in v0.4.858

func (m *OutlierStat) Unmarshal(dAtA []byte) error

func (*OutlierStat) XXX_DiscardUnknown added in v0.4.858

func (m *OutlierStat) XXX_DiscardUnknown()

func (*OutlierStat) XXX_Marshal added in v0.4.858

func (m *OutlierStat) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*OutlierStat) XXX_Merge added in v0.4.858

func (m *OutlierStat) XXX_Merge(src proto.Message)

func (*OutlierStat) XXX_Size added in v0.4.858

func (m *OutlierStat) XXX_Size() int

func (*OutlierStat) XXX_Unmarshal added in v0.4.858

func (m *OutlierStat) XXX_Unmarshal(b []byte) error

type PageSpec

type PageSpec struct {
	Rows []RowSpec `json:"rows,omitempty" protobuf:"bytes,1,rep,name=rows"`
}

func (*PageSpec) DeepCopy

func (in *PageSpec) DeepCopy() *PageSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PageSpec.

func (*PageSpec) DeepCopyInto

func (in *PageSpec) DeepCopyInto(out *PageSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*PageSpec) Descriptor

func (*PageSpec) Descriptor() ([]byte, []int)

func (*PageSpec) Marshal

func (m *PageSpec) Marshal() (dAtA []byte, err error)

func (*PageSpec) MarshalTo

func (m *PageSpec) MarshalTo(dAtA []byte) (int, error)

func (*PageSpec) MarshalToSizedBuffer

func (m *PageSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*PageSpec) ProtoMessage

func (*PageSpec) ProtoMessage()

func (*PageSpec) Reset

func (m *PageSpec) Reset()

func (*PageSpec) Size

func (m *PageSpec) Size() (n int)

func (*PageSpec) String

func (this *PageSpec) String() string

func (*PageSpec) Unmarshal

func (m *PageSpec) Unmarshal(dAtA []byte) error

func (*PageSpec) XXX_DiscardUnknown

func (m *PageSpec) XXX_DiscardUnknown()

func (*PageSpec) XXX_Marshal

func (m *PageSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*PageSpec) XXX_Merge

func (m *PageSpec) XXX_Merge(src proto.Message)

func (*PageSpec) XXX_Size

func (m *PageSpec) XXX_Size() int

func (*PageSpec) XXX_Unmarshal

func (m *PageSpec) XXX_Unmarshal(b []byte) error

type ParquetFileSpec added in v0.4.861

type ParquetFileSpec struct {
	// The character used to separate fields (by default, a comma)
	// +kubebuilder:default:="auto"
	// +kubebuilder:validation:Optional
	Engine *string `json:"engine,omitempty" protobuf:"bytes,1,opt,name=engine"`
}

ParquetFileSpec specifies the format of a CSV (comma-separated values) file

func (*ParquetFileSpec) DeepCopy added in v0.4.861

func (in *ParquetFileSpec) DeepCopy() *ParquetFileSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParquetFileSpec.

func (*ParquetFileSpec) DeepCopyInto added in v0.4.861

func (in *ParquetFileSpec) DeepCopyInto(out *ParquetFileSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ParquetFileSpec) Descriptor added in v0.4.861

func (*ParquetFileSpec) Descriptor() ([]byte, []int)

func (*ParquetFileSpec) Marshal added in v0.4.861

func (m *ParquetFileSpec) Marshal() (dAtA []byte, err error)

func (*ParquetFileSpec) MarshalTo added in v0.4.861

func (m *ParquetFileSpec) MarshalTo(dAtA []byte) (int, error)

func (*ParquetFileSpec) MarshalToSizedBuffer added in v0.4.861

func (m *ParquetFileSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ParquetFileSpec) ProtoMessage added in v0.4.861

func (*ParquetFileSpec) ProtoMessage()

func (*ParquetFileSpec) Reset added in v0.4.861

func (m *ParquetFileSpec) Reset()

func (*ParquetFileSpec) Size added in v0.4.861

func (m *ParquetFileSpec) Size() (n int)

func (*ParquetFileSpec) String added in v0.4.861

func (this *ParquetFileSpec) String() string

func (*ParquetFileSpec) Unmarshal added in v0.4.861

func (m *ParquetFileSpec) Unmarshal(dAtA []byte) error

func (*ParquetFileSpec) XXX_DiscardUnknown added in v0.4.861

func (m *ParquetFileSpec) XXX_DiscardUnknown()

func (*ParquetFileSpec) XXX_Marshal added in v0.4.861

func (m *ParquetFileSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ParquetFileSpec) XXX_Merge added in v0.4.861

func (m *ParquetFileSpec) XXX_Merge(src proto.Message)

func (*ParquetFileSpec) XXX_Size added in v0.4.861

func (m *ParquetFileSpec) XXX_Size() int

func (*ParquetFileSpec) XXX_Unmarshal added in v0.4.861

func (m *ParquetFileSpec) XXX_Unmarshal(b []byte) error

type QuoteChar

type QuoteChar string

+kubebuilder:validation:Enum="double-quote";"single-quote";

const (
	SingleQuote QuoteChar = "single-quote"
	DoubleQuote QuoteChar = "double-quote"
)

type Recipe

type Recipe struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              RecipeSpec   `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	Status            RecipeStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

Recipe represents a single batch of data +kubebuilder:object:root=true +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description" +kubebuilder:printcolumn:name="Last Run",type="date",JSONPath=".status.lastRun.at",description="" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:subresource:status +kubebuilder:resource:path=recipes,shortName=rc,singular=recipe,categories={data,modela,all}

func (*Recipe) AddFinalizer

func (r *Recipe) AddFinalizer()

func (*Recipe) CreateOrUpdateCond

func (r *Recipe) CreateOrUpdateCond(cond RecipeCondition)

Merge or update condition

func (*Recipe) DeepCopy

func (in *Recipe) DeepCopy() *Recipe

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Recipe.

func (*Recipe) DeepCopyInto

func (in *Recipe) DeepCopyInto(out *Recipe)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Recipe) DeepCopyObject

func (in *Recipe) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*Recipe) Default

func (r *Recipe) Default()

func (*Recipe) Deleted

func (r *Recipe) Deleted() bool

func (*Recipe) Descriptor

func (*Recipe) Descriptor() ([]byte, []int)

func (*Recipe) GetCond

func (*Recipe) GetCondIdx

func (r *Recipe) GetCondIdx(t RecipeConditionType) int

func (*Recipe) HasFinalizer

func (r *Recipe) HasFinalizer() bool

func (*Recipe) IsInCond

func (r *Recipe) IsInCond(ct RecipeConditionType) bool

func (*Recipe) IsReady

func (r *Recipe) IsReady() bool

func (*Recipe) IsSaved

func (r *Recipe) IsSaved() bool

func (*Recipe) MarkReady

func (r *Recipe) MarkReady()

func (*Recipe) MarkSaved

func (r *Recipe) MarkSaved()

func (*Recipe) Marshal

func (m *Recipe) Marshal() (dAtA []byte, err error)

func (*Recipe) MarshalTo

func (m *Recipe) MarshalTo(dAtA []byte) (int, error)

func (*Recipe) MarshalToSizedBuffer

func (m *Recipe) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*Recipe) Populate

func (r *Recipe) Populate(name string)

func (*Recipe) PrintConditions

func (r *Recipe) PrintConditions()

func (*Recipe) ProtoMessage

func (*Recipe) ProtoMessage()

func (*Recipe) RemoveFinalizer

func (r *Recipe) RemoveFinalizer()

func (*Recipe) Reset

func (m *Recipe) Reset()

func (*Recipe) SetupWebhookWithManager

func (r *Recipe) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*Recipe) Size

func (m *Recipe) Size() (n int)

func (*Recipe) String

func (this *Recipe) String() string

func (*Recipe) ToYamlFile

func (r *Recipe) ToYamlFile() ([]byte, error)

func (*Recipe) Unmarshal

func (m *Recipe) Unmarshal(dAtA []byte) error

func (*Recipe) UpdateRunStatus added in v0.4.612

func (in *Recipe) UpdateRunStatus(run RecipeRun)

func (*Recipe) ValidateCreate

func (recipe *Recipe) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*Recipe) ValidateDelete

func (recipe *Recipe) ValidateDelete() error

func (*Recipe) ValidateUpdate

func (recipe *Recipe) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*Recipe) XXX_DiscardUnknown

func (m *Recipe) XXX_DiscardUnknown()

func (*Recipe) XXX_Marshal

func (m *Recipe) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Recipe) XXX_Merge

func (m *Recipe) XXX_Merge(src proto.Message)

func (*Recipe) XXX_Size

func (m *Recipe) XXX_Size() int

func (*Recipe) XXX_Unmarshal

func (m *Recipe) XXX_Unmarshal(b []byte) error

type RecipeCondition

type RecipeCondition struct {
	Type RecipeConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=RecipeConditionType"`
	// Status of the condition, one of True, False, Unknown.
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	// +kubebuilder:validation:Optional
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,3,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	// +kubebuilder:validation:Optional
	Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
	// A human readable message indicating details about the transition.
	// +kubebuilder:validation:Optional
	Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}

RecipeCondition describes the state of a dataset at a certain point.

func (*RecipeCondition) DeepCopy

func (in *RecipeCondition) DeepCopy() *RecipeCondition

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeCondition.

func (*RecipeCondition) DeepCopyInto

func (in *RecipeCondition) DeepCopyInto(out *RecipeCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeCondition) Descriptor

func (*RecipeCondition) Descriptor() ([]byte, []int)

func (*RecipeCondition) Marshal

func (m *RecipeCondition) Marshal() (dAtA []byte, err error)

func (*RecipeCondition) MarshalTo

func (m *RecipeCondition) MarshalTo(dAtA []byte) (int, error)

func (*RecipeCondition) MarshalToSizedBuffer

func (m *RecipeCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeCondition) ProtoMessage

func (*RecipeCondition) ProtoMessage()

func (*RecipeCondition) Reset

func (m *RecipeCondition) Reset()

func (*RecipeCondition) Size

func (m *RecipeCondition) Size() (n int)

func (*RecipeCondition) String

func (this *RecipeCondition) String() string

func (*RecipeCondition) Unmarshal

func (m *RecipeCondition) Unmarshal(dAtA []byte) error

func (*RecipeCondition) XXX_DiscardUnknown

func (m *RecipeCondition) XXX_DiscardUnknown()

func (*RecipeCondition) XXX_Marshal

func (m *RecipeCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeCondition) XXX_Merge

func (m *RecipeCondition) XXX_Merge(src proto.Message)

func (*RecipeCondition) XXX_Size

func (m *RecipeCondition) XXX_Size() int

func (*RecipeCondition) XXX_Unmarshal

func (m *RecipeCondition) XXX_Unmarshal(b []byte) error

type RecipeConditionType

type RecipeConditionType string

RecipeConditionType is the condition on the Recipe

const (
	RecipeReady RecipeConditionType = "Ready"
	RecipeSaved RecipeConditionType = "Saved"
)

/ RecipeName Condition

const (
	RecipeRunCompleted RecipeConditionType = "Completed"
	RecipeRunSaved     RecipeConditionType = "Saved"
)

/ RecipeName Condition

type RecipeInputSpec

type RecipeInputSpec struct {
	// DatasetName is the name of the dataset
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// Location is the folder of the actual data resides, if not using dataset
	// +required.
	Location *DataLocation `json:"location,omitempty" protobuf:"bytes,2,opt,name=location"`
	// Format is the dataset format
	// +kubebuilder:default:=csv
	Format *catalog.DatastoreType `json:"format,omitempty" protobuf:"bytes,3,opt,name=format"`
}

RecipeInputSpec specify the input for a recipe

func (*RecipeInputSpec) DeepCopy

func (in *RecipeInputSpec) DeepCopy() *RecipeInputSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeInputSpec.

func (*RecipeInputSpec) DeepCopyInto

func (in *RecipeInputSpec) DeepCopyInto(out *RecipeInputSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeInputSpec) Descriptor

func (*RecipeInputSpec) Descriptor() ([]byte, []int)

func (*RecipeInputSpec) Marshal

func (m *RecipeInputSpec) Marshal() (dAtA []byte, err error)

func (*RecipeInputSpec) MarshalTo

func (m *RecipeInputSpec) MarshalTo(dAtA []byte) (int, error)

func (*RecipeInputSpec) MarshalToSizedBuffer

func (m *RecipeInputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeInputSpec) ProtoMessage

func (*RecipeInputSpec) ProtoMessage()

func (*RecipeInputSpec) Reset

func (m *RecipeInputSpec) Reset()

func (*RecipeInputSpec) Size

func (m *RecipeInputSpec) Size() (n int)

func (*RecipeInputSpec) String

func (this *RecipeInputSpec) String() string

func (*RecipeInputSpec) Unmarshal

func (m *RecipeInputSpec) Unmarshal(dAtA []byte) error

func (*RecipeInputSpec) XXX_DiscardUnknown

func (m *RecipeInputSpec) XXX_DiscardUnknown()

func (*RecipeInputSpec) XXX_Marshal

func (m *RecipeInputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeInputSpec) XXX_Merge

func (m *RecipeInputSpec) XXX_Merge(src proto.Message)

func (*RecipeInputSpec) XXX_Size

func (m *RecipeInputSpec) XXX_Size() int

func (*RecipeInputSpec) XXX_Unmarshal

func (m *RecipeInputSpec) XXX_Unmarshal(b []byte) error

type RecipeList

type RecipeList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []Recipe `json:"items" protobuf:"bytes,2,rep,name=items"`
}

RecipeList contains a list of Recipes +kubebuilder:object:root=true

func (*RecipeList) DeepCopy

func (in *RecipeList) DeepCopy() *RecipeList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeList.

func (*RecipeList) DeepCopyInto

func (in *RecipeList) DeepCopyInto(out *RecipeList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeList) DeepCopyObject

func (in *RecipeList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*RecipeList) Descriptor

func (*RecipeList) Descriptor() ([]byte, []int)

func (*RecipeList) Marshal

func (m *RecipeList) Marshal() (dAtA []byte, err error)

func (*RecipeList) MarshalTo

func (m *RecipeList) MarshalTo(dAtA []byte) (int, error)

func (*RecipeList) MarshalToSizedBuffer

func (m *RecipeList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeList) ProtoMessage

func (*RecipeList) ProtoMessage()

func (*RecipeList) Reset

func (m *RecipeList) Reset()

func (*RecipeList) Size

func (m *RecipeList) Size() (n int)

func (*RecipeList) String

func (this *RecipeList) String() string

func (*RecipeList) Unmarshal

func (m *RecipeList) Unmarshal(dAtA []byte) error

func (*RecipeList) XXX_DiscardUnknown

func (m *RecipeList) XXX_DiscardUnknown()

func (*RecipeList) XXX_Marshal

func (m *RecipeList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeList) XXX_Merge

func (m *RecipeList) XXX_Merge(src proto.Message)

func (*RecipeList) XXX_Size

func (m *RecipeList) XXX_Size() int

func (*RecipeList) XXX_Unmarshal

func (m *RecipeList) XXX_Unmarshal(b []byte) error

type RecipeOutputSpec

type RecipeOutputSpec struct {
	// CreateDataset if true, create a new dataset when the recipe is done.
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	CreateDataset *bool `json:"createDataset,omitempty" protobuf:"varint,1,opt,name=createDataset"`
	// DatasetName is the name of the dataset output to the recipe
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,2,opt,name=datasetName"`
	// Location is the data location folder of the actual data resides.
	// +kubebuilder:validation:Required
	// +required
	Location *DataLocation `json:"location,omitempty" protobuf:"bytes,3,opt,name=location"`
}

RecipeOutputSpec for the recipe output

func (*RecipeOutputSpec) DeepCopy

func (in *RecipeOutputSpec) DeepCopy() *RecipeOutputSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeOutputSpec.

func (*RecipeOutputSpec) DeepCopyInto

func (in *RecipeOutputSpec) DeepCopyInto(out *RecipeOutputSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeOutputSpec) Descriptor

func (*RecipeOutputSpec) Descriptor() ([]byte, []int)

func (*RecipeOutputSpec) Marshal

func (m *RecipeOutputSpec) Marshal() (dAtA []byte, err error)

func (*RecipeOutputSpec) MarshalTo

func (m *RecipeOutputSpec) MarshalTo(dAtA []byte) (int, error)

func (*RecipeOutputSpec) MarshalToSizedBuffer

func (m *RecipeOutputSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeOutputSpec) ProtoMessage

func (*RecipeOutputSpec) ProtoMessage()

func (*RecipeOutputSpec) Reset

func (m *RecipeOutputSpec) Reset()

func (*RecipeOutputSpec) Size

func (m *RecipeOutputSpec) Size() (n int)

func (*RecipeOutputSpec) String

func (this *RecipeOutputSpec) String() string

func (*RecipeOutputSpec) Unmarshal

func (m *RecipeOutputSpec) Unmarshal(dAtA []byte) error

func (*RecipeOutputSpec) XXX_DiscardUnknown

func (m *RecipeOutputSpec) XXX_DiscardUnknown()

func (*RecipeOutputSpec) XXX_Marshal

func (m *RecipeOutputSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeOutputSpec) XXX_Merge

func (m *RecipeOutputSpec) XXX_Merge(src proto.Message)

func (*RecipeOutputSpec) XXX_Size

func (m *RecipeOutputSpec) XXX_Size() int

func (*RecipeOutputSpec) XXX_Unmarshal

func (m *RecipeOutputSpec) XXX_Unmarshal(b []byte) error

type RecipePartSpec

type RecipePartSpec struct {
	// RecipeName is the name of the recipe to run
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	RecipeName *string `json:"recipeName,omitempty" protobuf:"bytes,1,opt,name=recipeName"`
	// Dependents is the list of recipe that need to run after this recipe.
	Dependents []string `json:"dependents,omitempty" protobuf:"bytes,2,rep,name=dependents"`
}

func (*RecipePartSpec) DeepCopy

func (in *RecipePartSpec) DeepCopy() *RecipePartSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipePartSpec.

func (*RecipePartSpec) DeepCopyInto

func (in *RecipePartSpec) DeepCopyInto(out *RecipePartSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipePartSpec) Descriptor

func (*RecipePartSpec) Descriptor() ([]byte, []int)

func (*RecipePartSpec) Marshal

func (m *RecipePartSpec) Marshal() (dAtA []byte, err error)

func (*RecipePartSpec) MarshalTo

func (m *RecipePartSpec) MarshalTo(dAtA []byte) (int, error)

func (*RecipePartSpec) MarshalToSizedBuffer

func (m *RecipePartSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipePartSpec) ProtoMessage

func (*RecipePartSpec) ProtoMessage()

func (*RecipePartSpec) Reset

func (m *RecipePartSpec) Reset()

func (*RecipePartSpec) Size

func (m *RecipePartSpec) Size() (n int)

func (*RecipePartSpec) String

func (this *RecipePartSpec) String() string

func (*RecipePartSpec) Unmarshal

func (m *RecipePartSpec) Unmarshal(dAtA []byte) error

func (*RecipePartSpec) XXX_DiscardUnknown

func (m *RecipePartSpec) XXX_DiscardUnknown()

func (*RecipePartSpec) XXX_Marshal

func (m *RecipePartSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipePartSpec) XXX_Merge

func (m *RecipePartSpec) XXX_Merge(src proto.Message)

func (*RecipePartSpec) XXX_Size

func (m *RecipePartSpec) XXX_Size() int

func (*RecipePartSpec) XXX_Unmarshal

func (m *RecipePartSpec) XXX_Unmarshal(b []byte) error

type RecipeRun

type RecipeRun struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              RecipeRunSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status RecipeRunStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}

RecipeRun represent one execution of the recipe. Execution is performed by creating a Kubernetes job. +kubebuilder:object:root=true +kubebuilder:printcolumn:name="Status",type="string",JSONPath=".status.phase" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Recipe",type="string",JSONPath=".spec.recipeName" +kubebuilder:printcolumn:name="StartTime",type="date",JSONPath=".status.startTime",priority=1 +kubebuilder:printcolumn:name="CompletionTime",type="date",JSONPath=".status.completionTime",priority=1 +kubebuilder:subresource:status +kubebuilder:resource:path=reciperuns,shortName=rcr,singular=reciperun,categories={data,modela,all}

func (*RecipeRun) AddFinalizer

func (r *RecipeRun) AddFinalizer()

func (*RecipeRun) CompletionAlert added in v0.4.601

func (run *RecipeRun) CompletionAlert(tenantRef *v1.ObjectReference, notifierName *string) *infra.Alert

Generate a dataset completion alert

func (*RecipeRun) CreateOrUpdateCond

func (r *RecipeRun) CreateOrUpdateCond(cond RecipeCondition)

Merge or update condition

func (*RecipeRun) DeepCopy

func (in *RecipeRun) DeepCopy() *RecipeRun

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeRun.

func (*RecipeRun) DeepCopyInto

func (in *RecipeRun) DeepCopyInto(out *RecipeRun)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeRun) DeepCopyObject

func (in *RecipeRun) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*RecipeRun) Default

func (run *RecipeRun) Default()

func (*RecipeRun) Deleted

func (r *RecipeRun) Deleted() bool

func (*RecipeRun) Descriptor

func (*RecipeRun) Descriptor() ([]byte, []int)

func (*RecipeRun) ErrorAlert added in v0.4.601

func (run *RecipeRun) ErrorAlert(tenantRef *v1.ObjectReference, notifierName *string, err error) *infra.Alert

func (*RecipeRun) GetCond

func (*RecipeRun) GetCondIdx

func (r *RecipeRun) GetCondIdx(t RecipeConditionType) int

func (*RecipeRun) HasFinalizer

func (r *RecipeRun) HasFinalizer() bool

func (*RecipeRun) IsFailed

func (in *RecipeRun) IsFailed() bool

func (*RecipeRun) IsInCond

func (r *RecipeRun) IsInCond(ct RecipeConditionType) bool

func (*RecipeRun) IsReady

func (r *RecipeRun) IsReady() bool

func (*RecipeRun) IsRunning

func (in *RecipeRun) IsRunning() bool

func (*RecipeRun) IsSaved

func (in *RecipeRun) IsSaved() bool

func (*RecipeRun) ManifestUri

func (r *RecipeRun) ManifestUri() string

func (*RecipeRun) MarkCompleted

func (r *RecipeRun) MarkCompleted()

func (*RecipeRun) MarkFailed

func (r *RecipeRun) MarkFailed(error string)

func (*RecipeRun) MarkRunning

func (r *RecipeRun) MarkRunning()

func (*RecipeRun) MarkSaved

func (r *RecipeRun) MarkSaved()

func (*RecipeRun) Marshal

func (m *RecipeRun) Marshal() (dAtA []byte, err error)

func (*RecipeRun) MarshalTo

func (m *RecipeRun) MarshalTo(dAtA []byte) (int, error)

func (*RecipeRun) MarshalToSizedBuffer

func (m *RecipeRun) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeRun) PrintConditions

func (r *RecipeRun) PrintConditions()

func (*RecipeRun) ProtoMessage

func (*RecipeRun) ProtoMessage()

func (*RecipeRun) RemoveFinalizer

func (r *RecipeRun) RemoveFinalizer()

func (*RecipeRun) ReportName

func (r *RecipeRun) ReportName() string

func (*RecipeRun) Reset

func (m *RecipeRun) Reset()

func (*RecipeRun) RootUri

func (r *RecipeRun) RootUri() string

func (*RecipeRun) RunStatus added in v0.4.614

func (run *RecipeRun) RunStatus() *catalog.LastRunStatus

Return the state of the run as RunStatus

func (*RecipeRun) SetupWebhookWithManager

func (r *RecipeRun) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*RecipeRun) Size

func (m *RecipeRun) Size() (n int)

func (*RecipeRun) StatusString

func (r *RecipeRun) StatusString() string

func (*RecipeRun) String

func (this *RecipeRun) String() string

func (*RecipeRun) ToYamlFile

func (r *RecipeRun) ToYamlFile() ([]byte, error)

func (*RecipeRun) Unmarshal

func (m *RecipeRun) Unmarshal(dAtA []byte) error

func (*RecipeRun) ValidateCreate

func (recipe *RecipeRun) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*RecipeRun) ValidateDelete

func (recipe *RecipeRun) ValidateDelete() error

func (*RecipeRun) ValidateUpdate

func (recipe *RecipeRun) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*RecipeRun) XXX_DiscardUnknown

func (m *RecipeRun) XXX_DiscardUnknown()

func (*RecipeRun) XXX_Marshal

func (m *RecipeRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeRun) XXX_Merge

func (m *RecipeRun) XXX_Merge(src proto.Message)

func (*RecipeRun) XXX_Size

func (m *RecipeRun) XXX_Size() int

func (*RecipeRun) XXX_Unmarshal

func (m *RecipeRun) XXX_Unmarshal(b []byte) error

type RecipeRunCondition

type RecipeRunCondition struct {
	Type RecipeConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=RecipeConditionType"`
	// Status of the condition, one of True, False, Unknown.
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	// +kubebuilder:validation:Optional
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,3,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	// +kubebuilder:validation:Optional
	Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
	// A human readable message indicating details about the transition.
	// +kubebuilder:validation:Optional
	Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}

RecipeRunCondition describes the state of a dataset at a certain point.

func (*RecipeRunCondition) DeepCopy

func (in *RecipeRunCondition) DeepCopy() *RecipeRunCondition

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeRunCondition.

func (*RecipeRunCondition) DeepCopyInto

func (in *RecipeRunCondition) DeepCopyInto(out *RecipeRunCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeRunCondition) Descriptor

func (*RecipeRunCondition) Descriptor() ([]byte, []int)

func (*RecipeRunCondition) Marshal

func (m *RecipeRunCondition) Marshal() (dAtA []byte, err error)

func (*RecipeRunCondition) MarshalTo

func (m *RecipeRunCondition) MarshalTo(dAtA []byte) (int, error)

func (*RecipeRunCondition) MarshalToSizedBuffer

func (m *RecipeRunCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeRunCondition) ProtoMessage

func (*RecipeRunCondition) ProtoMessage()

func (*RecipeRunCondition) Reset

func (m *RecipeRunCondition) Reset()

func (*RecipeRunCondition) Size

func (m *RecipeRunCondition) Size() (n int)

func (*RecipeRunCondition) String

func (this *RecipeRunCondition) String() string

func (*RecipeRunCondition) Unmarshal

func (m *RecipeRunCondition) Unmarshal(dAtA []byte) error

func (*RecipeRunCondition) XXX_DiscardUnknown

func (m *RecipeRunCondition) XXX_DiscardUnknown()

func (*RecipeRunCondition) XXX_Marshal

func (m *RecipeRunCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeRunCondition) XXX_Merge

func (m *RecipeRunCondition) XXX_Merge(src proto.Message)

func (*RecipeRunCondition) XXX_Size

func (m *RecipeRunCondition) XXX_Size() int

func (*RecipeRunCondition) XXX_Unmarshal

func (m *RecipeRunCondition) XXX_Unmarshal(b []byte) error

type RecipeRunConditionType

type RecipeRunConditionType string

Condition on the dataset

type RecipeRunList

type RecipeRunList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
	Items           []RecipeRun `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true RecipeRunList contains a list of Recipes

func (*RecipeRunList) DeepCopy

func (in *RecipeRunList) DeepCopy() *RecipeRunList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeRunList.

func (*RecipeRunList) DeepCopyInto

func (in *RecipeRunList) DeepCopyInto(out *RecipeRunList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeRunList) DeepCopyObject

func (in *RecipeRunList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*RecipeRunList) Descriptor

func (*RecipeRunList) Descriptor() ([]byte, []int)

func (*RecipeRunList) Marshal

func (m *RecipeRunList) Marshal() (dAtA []byte, err error)

func (*RecipeRunList) MarshalTo

func (m *RecipeRunList) MarshalTo(dAtA []byte) (int, error)

func (*RecipeRunList) MarshalToSizedBuffer

func (m *RecipeRunList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeRunList) ProtoMessage

func (*RecipeRunList) ProtoMessage()

func (*RecipeRunList) Reset

func (m *RecipeRunList) Reset()

func (*RecipeRunList) Size

func (m *RecipeRunList) Size() (n int)

func (*RecipeRunList) String

func (this *RecipeRunList) String() string

func (*RecipeRunList) Unmarshal

func (m *RecipeRunList) Unmarshal(dAtA []byte) error

func (*RecipeRunList) XXX_DiscardUnknown

func (m *RecipeRunList) XXX_DiscardUnknown()

func (*RecipeRunList) XXX_Marshal

func (m *RecipeRunList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeRunList) XXX_Merge

func (m *RecipeRunList) XXX_Merge(src proto.Message)

func (*RecipeRunList) XXX_Size

func (m *RecipeRunList) XXX_Size() int

func (*RecipeRunList) XXX_Unmarshal

func (m *RecipeRunList) XXX_Unmarshal(b []byte) error

type RecipeRunPhase

type RecipeRunPhase string
const (
	RecipeRunPhasePending RecipeRunPhase = "Pending"
	RecipeRunPhaseAborted RecipeRunPhase = "Aborted"
	RecipeRunPhaseRunning RecipeRunPhase = "Running"
	RecipeRunPhaseSucceed RecipeRunPhase = "Completed"
	RecipeRunPhaseFailed  RecipeRunPhase = "Failed"
)

type RecipeRunSpec

type RecipeRunSpec struct {
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,1,opt,name=versionName"`
	// The name of the recipe that execute this run
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	RecipeName *string `json:"recipeName,omitempty" protobuf:"bytes,2,opt,name=recipeName"`
	// The execution env of this recipes
	// +kubebuilder:validation:Optional
	LabRef v1.ObjectReference `json:"labRef,omitempty" protobuf:"bytes,3,opt,name=labRef"`
	// The location of the data output.
	// +required.
	Output DataLocation `json:"output,omitempty" protobuf:"bytes,4,opt,name=output"`
	// Resources are hardware resource req for a recipe run.
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,5,opt,name=resources"`
	// TTL.
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	TTL *int32 `json:"ttl,omitempty" protobuf:"varint,6,opt,name=ttl"`
}

RecipeSpec defines the desired state of a dataset

func (*RecipeRunSpec) DeepCopy

func (in *RecipeRunSpec) DeepCopy() *RecipeRunSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeRunSpec.

func (*RecipeRunSpec) DeepCopyInto

func (in *RecipeRunSpec) DeepCopyInto(out *RecipeRunSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeRunSpec) Descriptor

func (*RecipeRunSpec) Descriptor() ([]byte, []int)

func (*RecipeRunSpec) Marshal

func (m *RecipeRunSpec) Marshal() (dAtA []byte, err error)

func (*RecipeRunSpec) MarshalTo

func (m *RecipeRunSpec) MarshalTo(dAtA []byte) (int, error)

func (*RecipeRunSpec) MarshalToSizedBuffer

func (m *RecipeRunSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeRunSpec) ProtoMessage

func (*RecipeRunSpec) ProtoMessage()

func (*RecipeRunSpec) Reset

func (m *RecipeRunSpec) Reset()

func (*RecipeRunSpec) Size

func (m *RecipeRunSpec) Size() (n int)

func (*RecipeRunSpec) String

func (this *RecipeRunSpec) String() string

func (*RecipeRunSpec) Unmarshal

func (m *RecipeRunSpec) Unmarshal(dAtA []byte) error

func (*RecipeRunSpec) XXX_DiscardUnknown

func (m *RecipeRunSpec) XXX_DiscardUnknown()

func (*RecipeRunSpec) XXX_Marshal

func (m *RecipeRunSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeRunSpec) XXX_Merge

func (m *RecipeRunSpec) XXX_Merge(src proto.Message)

func (*RecipeRunSpec) XXX_Size

func (m *RecipeRunSpec) XXX_Size() int

func (*RecipeRunSpec) XXX_Unmarshal

func (m *RecipeRunSpec) XXX_Unmarshal(b []byte) error

type RecipeRunStatus

type RecipeRunStatus struct {
	// Represents the start time
	StartTime *metav1.Time `json:"startTime,omitempty" protobuf:"bytes,1,opt,name=startTime"`
	// Represents the end time
	// +kubebuilder:validation:Optional
	EndTime *metav1.Time `json:"endTime,omitempty" protobuf:"bytes,2,opt,name=endTime"`
	// The phase of the dataset processing
	// +kubebuilder:default:="Pending"
	// +kubebuilder:validation:Optional
	Phase RecipeRunPhase `json:"phase,omitempty" protobuf:"bytes,3,opt,name=phase"`
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,4,opt,name=observedGeneration"`
	// Update in case of terminal failure
	// Borrowed from cluster api controller
	//+kubebuilder:validation:Optional
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,5,opt,name=failureReason"`
	// Update in case of terminal failure message
	//+kubebuilder:validation:Optional
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,6,opt,name=failureMessage"`
	// What triggered the run
	//+kubebuilder:validation:Optional
	TriggeredBy catalog.TriggerType `json:"triggeredBy,omitempty" protobuf:"bytes,7,opt,name=triggeredBy"`
	// Holds the location of log paths
	//+kubebuilder:validation:Optional
	Logs catalog.Logs `json:"logs,,omitempty" protobuf:"bytes,8,opt,name=logs"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,9,opt,name=lastUpdated"`

	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []RecipeCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,10,rep,name=conditions"`
}

RecipeStatus defines the observed state of Recipe

func (*RecipeRunStatus) DeepCopy

func (in *RecipeRunStatus) DeepCopy() *RecipeRunStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeRunStatus.

func (*RecipeRunStatus) DeepCopyInto

func (in *RecipeRunStatus) DeepCopyInto(out *RecipeRunStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeRunStatus) Descriptor

func (*RecipeRunStatus) Descriptor() ([]byte, []int)

func (*RecipeRunStatus) Marshal

func (m *RecipeRunStatus) Marshal() (dAtA []byte, err error)

func (*RecipeRunStatus) MarshalTo

func (m *RecipeRunStatus) MarshalTo(dAtA []byte) (int, error)

func (*RecipeRunStatus) MarshalToSizedBuffer

func (m *RecipeRunStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeRunStatus) ProtoMessage

func (*RecipeRunStatus) ProtoMessage()

func (*RecipeRunStatus) Reset

func (m *RecipeRunStatus) Reset()

func (*RecipeRunStatus) Size

func (m *RecipeRunStatus) Size() (n int)

func (*RecipeRunStatus) String

func (this *RecipeRunStatus) String() string

func (*RecipeRunStatus) Unmarshal

func (m *RecipeRunStatus) Unmarshal(dAtA []byte) error

func (*RecipeRunStatus) XXX_DiscardUnknown

func (m *RecipeRunStatus) XXX_DiscardUnknown()

func (*RecipeRunStatus) XXX_Marshal

func (m *RecipeRunStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeRunStatus) XXX_Merge

func (m *RecipeRunStatus) XXX_Merge(src proto.Message)

func (*RecipeRunStatus) XXX_Size

func (m *RecipeRunStatus) XXX_Size() int

func (*RecipeRunStatus) XXX_Unmarshal

func (m *RecipeRunStatus) XXX_Unmarshal(b []byte) error

type RecipeSpec

type RecipeSpec struct {
	// Owner is the owner of the recipe
	// +kubebuilder:default:="no-one"
	// +kubebuilder:validation:Optional
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// VersionName is the data product version of the recipe
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=63
	// required.
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"`
	// Description is the user provided description
	// +kubebuilder:validation:MaxLength=512
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// Input is the input recipe spec
	Input RecipeInputSpec `json:"input,omitempty" protobuf:"bytes,4,opt,name=input"`
	// Steps are the list of recipe steps
	Steps []RecipeStep `json:"steps,omitempty" protobuf:"bytes,5,rep,name=steps"`
	// Output is the desired output
	Output RecipeOutputSpec `json:"output,omitempty" protobuf:"bytes,6,opt,name=output"`
	// Sample specify the sampling paramters when viewing the recipe
	// +kubebuilder:validation:Optional
	Sample SampleSpec `json:"sample,omitempty" protobuf:"bytes,7,opt,name=sample"`
	// Resource define the resource requirements to run the recipe
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,8,opt,name=resources"`
	// ActiveDeadlineSeconds is the deadline setup on jobs for this recipe.
	// +kubebuilder:default:=600
	// +kubebuilder:validation:Optional
	ActiveDeadlineSeconds *int64 `json:"activeDeadlineSeconds,omitempty" protobuf:"varint,10,opt,name=activeDeadlineSeconds"`
	// TTL.
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	TTL *int32 `json:"ttl,omitempty" protobuf:"varint,11,opt,name=ttl"`
}

RecipeSpec defines the desired state of a dataset

func (*RecipeSpec) DeepCopy

func (in *RecipeSpec) DeepCopy() *RecipeSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeSpec.

func (*RecipeSpec) DeepCopyInto

func (in *RecipeSpec) DeepCopyInto(out *RecipeSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeSpec) Descriptor

func (*RecipeSpec) Descriptor() ([]byte, []int)

func (*RecipeSpec) Marshal

func (m *RecipeSpec) Marshal() (dAtA []byte, err error)

func (*RecipeSpec) MarshalTo

func (m *RecipeSpec) MarshalTo(dAtA []byte) (int, error)

func (*RecipeSpec) MarshalToSizedBuffer

func (m *RecipeSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeSpec) ProtoMessage

func (*RecipeSpec) ProtoMessage()

func (*RecipeSpec) Reset

func (m *RecipeSpec) Reset()

func (*RecipeSpec) Size

func (m *RecipeSpec) Size() (n int)

func (*RecipeSpec) String

func (this *RecipeSpec) String() string

func (*RecipeSpec) Unmarshal

func (m *RecipeSpec) Unmarshal(dAtA []byte) error

func (*RecipeSpec) XXX_DiscardUnknown

func (m *RecipeSpec) XXX_DiscardUnknown()

func (*RecipeSpec) XXX_Marshal

func (m *RecipeSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeSpec) XXX_Merge

func (m *RecipeSpec) XXX_Merge(src proto.Message)

func (*RecipeSpec) XXX_Size

func (m *RecipeSpec) XXX_Size() int

func (*RecipeSpec) XXX_Unmarshal

func (m *RecipeSpec) XXX_Unmarshal(b []byte) error

type RecipeStatus

type RecipeStatus struct {
	//ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`

	// Last run is the last time a data pipeline run was created
	//+kubebuilder:validation:Optional
	LastRun catalog.LastRunStatus `json:"lastRun,omitempty" protobuf:"bytes,2,opt,name=lastRun"`
	// The time of the next schedule run
	//+kubebuilder:validation:Optional
	NextRun *metav1.Time `json:"nextRun,omitempty" protobuf:"bytes,3,opt,name=nextRun"`

	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,4,opt,name=lastUpdated"`
	// Update in case of terminal failure
	// Borrowed from cluster api controller
	//+kubebuilder:validation:Optional
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,5,opt,name=failureReason"`
	// Update in case of terminal failure message
	//+kubebuilder:validation:Optional
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,6,opt,name=failureMessage"`

	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []RecipeCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,7,rep,name=conditions"`
}

RecipeStatus defines the observed state of Recipe

func (*RecipeStatus) DeepCopy

func (in *RecipeStatus) DeepCopy() *RecipeStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeStatus.

func (*RecipeStatus) DeepCopyInto

func (in *RecipeStatus) DeepCopyInto(out *RecipeStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeStatus) Descriptor

func (*RecipeStatus) Descriptor() ([]byte, []int)

func (*RecipeStatus) Marshal

func (m *RecipeStatus) Marshal() (dAtA []byte, err error)

func (*RecipeStatus) MarshalTo

func (m *RecipeStatus) MarshalTo(dAtA []byte) (int, error)

func (*RecipeStatus) MarshalToSizedBuffer

func (m *RecipeStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeStatus) ProtoMessage

func (*RecipeStatus) ProtoMessage()

func (*RecipeStatus) Reset

func (m *RecipeStatus) Reset()

func (*RecipeStatus) Size

func (m *RecipeStatus) Size() (n int)

func (*RecipeStatus) String

func (this *RecipeStatus) String() string

func (*RecipeStatus) Unmarshal

func (m *RecipeStatus) Unmarshal(dAtA []byte) error

func (*RecipeStatus) XXX_DiscardUnknown

func (m *RecipeStatus) XXX_DiscardUnknown()

func (*RecipeStatus) XXX_Marshal

func (m *RecipeStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeStatus) XXX_Merge

func (m *RecipeStatus) XXX_Merge(src proto.Message)

func (*RecipeStatus) XXX_Size

func (m *RecipeStatus) XXX_Size() int

func (*RecipeStatus) XXX_Unmarshal

func (m *RecipeStatus) XXX_Unmarshal(b []byte) error

type RecipeStep

type RecipeStep struct {
	Op         RecipeStepOperation `json:"op,omitempty" protobuf:"bytes,1,opt,name=op"`
	Parameters []*RecipeStepParam  `json:"parameters,omitempty" protobuf:"bytes,2,rep,name=parameters"`
}

RecipeStep defines one step in the recipe

func (*RecipeStep) DeepCopy

func (in *RecipeStep) DeepCopy() *RecipeStep

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeStep.

func (*RecipeStep) DeepCopyInto

func (in *RecipeStep) DeepCopyInto(out *RecipeStep)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeStep) Descriptor

func (*RecipeStep) Descriptor() ([]byte, []int)

func (*RecipeStep) Marshal

func (m *RecipeStep) Marshal() (dAtA []byte, err error)

func (*RecipeStep) MarshalTo

func (m *RecipeStep) MarshalTo(dAtA []byte) (int, error)

func (*RecipeStep) MarshalToSizedBuffer

func (m *RecipeStep) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeStep) ProtoMessage

func (*RecipeStep) ProtoMessage()

func (*RecipeStep) Reset

func (m *RecipeStep) Reset()

func (*RecipeStep) Size

func (m *RecipeStep) Size() (n int)

func (*RecipeStep) String

func (this *RecipeStep) String() string

func (*RecipeStep) Unmarshal

func (m *RecipeStep) Unmarshal(dAtA []byte) error

func (*RecipeStep) XXX_DiscardUnknown

func (m *RecipeStep) XXX_DiscardUnknown()

func (*RecipeStep) XXX_Marshal

func (m *RecipeStep) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeStep) XXX_Merge

func (m *RecipeStep) XXX_Merge(src proto.Message)

func (*RecipeStep) XXX_Size

func (m *RecipeStep) XXX_Size() int

func (*RecipeStep) XXX_Unmarshal

func (m *RecipeStep) XXX_Unmarshal(b []byte) error

type RecipeStepOperation

type RecipeStepOperation string

RecipeStepOperation is the operation name of one step in the recipe

const (

	// Aggregate function
	AnyStepOp                   RecipeStepOperation = "any"
	AnyIfStepOp                 RecipeStepOperation = "any-if"
	ApproximateMedianStepOp     RecipeStepOperation = "approximate-median"
	ApproximatePercentileStepOp RecipeStepOperation = "approximate-percentile"
	ApproximateQuartileStepOp   RecipeStepOperation = "approximate-quartile"
	AverageStepOp               RecipeStepOperation = "average"
	AverageIfStepOp             RecipeStepOperation = "average-if"
	CorrelStepOp                RecipeStepOperation = "correl"
	CountAStepOp                RecipeStepOperation = "counta"
	CountAIfStepOp              RecipeStepOperation = "counta-if"
	CountDistinctStepOp         RecipeStepOperation = "counta-distinct"
	CountDistinctIfStepOp       RecipeStepOperation = "counta-distinct-if"
	CountStepOp                 RecipeStepOperation = "count"
	CountIfStepOp               RecipeStepOperation = "count-if"
	CovarStepOp                 RecipeStepOperation = "covar"
	KthLargestStepOp            RecipeStepOperation = "kth-largest"
	KthLargestIfStepOp          RecipeStepOperation = "kth-largest-if"
	KthLargestUniqueStepOp      RecipeStepOperation = "kth-largest-unique"
	KthLargestUniqueIfStepOp    RecipeStepOperation = "kth-largest-unique-if"
	ListStepOp                  RecipeStepOperation = "list"
	ListIfStepOp                RecipeStepOperation = "list-if"
	MaxStepOp                   RecipeStepOperation = "max"
	MaxIfStepOp                 RecipeStepOperation = "max-if"
	MedianStepOp                RecipeStepOperation = "median"
	MinStepOp                   RecipeStepOperation = "min"
	MinIfStepOp                 RecipeStepOperation = "min-if"
	ModeStepOp                  RecipeStepOperation = "mode"
	ModeIfStepOp                RecipeStepOperation = "mode-if"
	PercentileStepOp            RecipeStepOperation = "percentile"
	QuartileStepOp              RecipeStepOperation = "quartile"
	StdDevStepOp                RecipeStepOperation = "stddev"
	StdDevIfStepOp              RecipeStepOperation = "stddev-if"
	SumStepOp                   RecipeStepOperation = "sum"
	SumIfStepOp                 RecipeStepOperation = "sum-if"
	UniqueStepOp                RecipeStepOperation = "unique"
	VarStepOp                   RecipeStepOperation = "var"
	VarIfStepOp                 RecipeStepOperation = "var-if"

	// Logical functions
	OrStepOp  RecipeStepOperation = "or"
	AndStepOp RecipeStepOperation = "and"
	NotStepOp RecipeStepOperation = "not"

	// Comparison
	ComparisonStepOp       RecipeStepOperation = "comparison"
	IsEvenStepOp           RecipeStepOperation = "is-even"
	IsOddStepOp            RecipeStepOperation = "is-odd"
	InStepOp               RecipeStepOperation = "in"
	MatchesStepOp          RecipeStepOperation = "matches"
	EqualStepOp            RecipeStepOperation = "equal"
	NotEqualStepOp         RecipeStepOperation = "not-equal"
	GreaterThanStepOp      RecipeStepOperation = "greater-than"
	GreaterThanEqualStepOp RecipeStepOperation = "greater-than-equal"
	LessThanStepOp         RecipeStepOperation = "less-than"
	LessThanEqualOp        RecipeStepOperation = "less-than-equal"

	// Math
	AddStepOp           RecipeStepOperation = "add"
	SubstractStepOp     RecipeStepOperation = "subtract"
	MultiplyStepOp      RecipeStepOperation = "multiply"
	DivideOp            RecipeStepOperation = "divide"
	ModStepOp           RecipeStepOperation = "mod"
	NegateStepOp        RecipeStepOperation = "negate"
	SignStepOp          RecipeStepOperation = "sign"
	LcmStepOp           RecipeStepOperation = "lcm"
	AbsoluteStepOp      RecipeStepOperation = "absolute"
	ExponentStepOp      RecipeStepOperation = "exponent"
	LogStepOp           RecipeStepOperation = "log"
	LnStepOp            RecipeStepOperation = "ln"
	PowerStepOp         RecipeStepOperation = "power"
	SquareRootStepOp    RecipeStepOperation = "sqr"
	CeilingStepOp       RecipeStepOperation = "ceiling"
	FloorStepOp         RecipeStepOperation = "floor"
	RoundStepOp         RecipeStepOperation = "round"
	TruncStepOp         RecipeStepOperation = "trunc"
	PIStepOp            RecipeStepOperation = "pi"
	RandomStepOp        RecipeStepOperation = "random"
	RandomBetweenStepOp RecipeStepOperation = "random-between"

	// Trig
	SinStepOp     RecipeStepOperation = "sin"
	CosStepOp     RecipeStepOperation = "cos"
	TanStepOp     RecipeStepOperation = "tan"
	ASinStepOp    RecipeStepOperation = "asin"
	ACosStepOp    RecipeStepOperation = "acos"
	ATanStepOp    RecipeStepOperation = "atan"
	SinHStepOp    RecipeStepOperation = "sinh"
	CosHStepOp    RecipeStepOperation = "cosh"
	TanHStepOp    RecipeStepOperation = "tanh"
	ASinHStepOp   RecipeStepOperation = "asinh"
	ACosHStepOp   RecipeStepOperation = "acosh"
	ATanHStepOp   RecipeStepOperation = "atanh"
	DegreesOp     RecipeStepOperation = "degrees"
	RadiansStepOp RecipeStepOperation = "radians"

	// Date and time functions
	DateStepOp                   RecipeStepOperation = "date"
	TimeStepOp                   RecipeStepOperation = "time"
	DateTimeStepOp               RecipeStepOperation = "date-time"
	DateAddStepOp                RecipeStepOperation = "date-add"
	DateDiffStepOp               RecipeStepOperation = "date-diff"
	DateFormatStepOp             RecipeStepOperation = "date-format"
	UnixTimeFormatStepOp         RecipeStepOperation = "unix-time-format"
	MonthStepOp                  RecipeStepOperation = "month"
	MonthNameStepOp              RecipeStepOperation = "month-name"
	YearStepOp                   RecipeStepOperation = "year"
	DayStepOp                    RecipeStepOperation = "day"
	WeekNumberStepOp             RecipeStepOperation = "week-number"
	WeekDayStepOp                RecipeStepOperation = "week-day"
	HourStepOp                   RecipeStepOperation = "hour"
	MinuteStepOp                 RecipeStepOperation = "minute"
	SecondStepOp                 RecipeStepOperation = "second"
	UnixTimeStepOp               RecipeStepOperation = "unix-time"
	NowStepOp                    RecipeStepOperation = "now"
	TodayStepOp                  RecipeStepOperation = "today"
	ParseDateStepOp              RecipeStepOperation = "parse-date"
	NetworkDaysStepOp            RecipeStepOperation = "network-days"
	NetworkDaysIntlStepOp        RecipeStepOperation = "network-days-intl"
	MinDateStepOp                RecipeStepOperation = "min-date"
	MaxDateStepOp                RecipeStepOperation = "max-date"
	ModeDateStepOp               RecipeStepOperation = "mode-date"
	WorkdayStepOp                RecipeStepOperation = "workday"
	WorkDayIntlStepOp            RecipeStepOperation = "workday-intl"
	ConvertFromUtcStepOp         RecipeStepOperation = "convert-from-utc"
	ConvertToUtcStepOp           RecipeStepOperation = "convert-to-utc"
	ConvertTimeZoneStepOp        RecipeStepOperation = "convert-time-zone"
	MinDateIfStepOp              RecipeStepOperation = "min-date-if"
	MaxDateIfStepOp              RecipeStepOperation = "max-date-if"
	ModeDateIfStepOp             RecipeStepOperation = "model-date-if"
	KthLargestDateStepOp         RecipeStepOperation = "kth-largest-date"
	KthLargestUniqueDateStepOp   RecipeStepOperation = "kth-largest-unique-date"
	KthLargestUniqueDateIfStepOp RecipeStepOperation = "kth-largest-unique-date-step"
	KthLargestDateIfStepOp       RecipeStepOperation = "kth-largest-date-if"
	WeekDayNameStepOp            RecipeStepOperation = "week-day-name"

	// String
	CharStepOp                   RecipeStepOperation = "char"
	UnicodeStepOp                RecipeStepOperation = "unicode"
	UpperStepOp                  RecipeStepOperation = "upper"
	LowerStepOp                  RecipeStepOperation = "lower"
	ProperStepOp                 RecipeStepOperation = "proper"
	TrimStepOp                   RecipeStepOperation = "trim"
	RemoveWhitespaceStepOp       RecipeStepOperation = "remove-white-spaces"
	RemoveSymbolsStepOp          RecipeStepOperation = "remove-symbols"
	LenStepOp                    RecipeStepOperation = "len"
	FindStepOp                   RecipeStepOperation = "find"
	RightFindStepOp              RecipeStepOperation = "right-find"
	SubstringStepOp              RecipeStepOperation = "substring"
	SubstitueStepOp              RecipeStepOperation = "substitute"
	LeftStepOp                   RecipeStepOperation = "left"
	RightStepOp                  RecipeStepOperation = "right"
	PadStepOp                    RecipeStepOperation = "pad"
	MergeStringStepOp            RecipeStepOperation = "merge-string"
	StartsWithStepOp             RecipeStepOperation = "starts-with"
	EndsWithStepOp               RecipeStepOperation = "ends-with"
	RepeatStepOp                 RecipeStepOperation = "repeat"
	ExactStepOp                  RecipeStepOperation = "exact"
	StringGreaterThanStepOp      RecipeStepOperation = "string-greater-than"
	StringGreaterThanEqualStepOp RecipeStepOperation = "string-greater-equal"
	StringLessThanStepOp         RecipeStepOperation = "string-less-than"
	StringLessThanEqualStepOp    RecipeStepOperation = "string-less-than-equal"
	DoubleMetaphoneStepOp        RecipeStepOperation = "double-metaphone"
	DoubleMetaphoneEqualsStepOp  RecipeStepOperation = "double-metaphone-equals"
	TransliterateStepOp          RecipeStepOperation = "transliterate"
	TrimQuotesStepOp             RecipeStepOperation = "trim-quotes"
	Base64EncodeStepOp           RecipeStepOperation = "base64-encode"
	Base64DecodeStepOp           RecipeStepOperation = "base64-decode"

	// Type functions
	IfMissingStepOp  RecipeStepOperation = "if-missing"
	IsMissingStepOp  RecipeStepOperation = "is-missing"
	ParseIntStepOp   RecipeStepOperation = "parse-int"
	ParseBoolStepOp  RecipeStepOperation = "parse-bool"
	ParseFloatStepOp RecipeStepOperation = "parse-float"

	// windows functions
	PrevStepOp                    RecipeStepOperation = "prev"
	NextStepOp                    RecipeStepOperation = "next"
	FillStepOp                    RecipeStepOperation = "fill"
	RankStepOp                    RecipeStepOperation = "rank"
	DenseRankStepOp               RecipeStepOperation = "dense-rank"
	RollingAvgStepOp              RecipeStepOperation = "rolling-avg"
	RollingModeStepOp             RecipeStepOperation = "rolling-mode"
	RollingMaxStepOp              RecipeStepOperation = "rolling-max"
	RollingMinStepOp              RecipeStepOperation = "rolling-min"
	RollingSumStepOp              RecipeStepOperation = "rolling-sum"
	RollingStdDevStepOp           RecipeStepOperation = "rolling-std-dev"
	RollingStdDevSampStepOp       RecipeStepOperation = "rolling-std-dev-samp"
	RollingVarianceStepOp         RecipeStepOperation = "rolling-variance"
	RollingVarianceSampStepOp     RecipeStepOperation = "rolling-variance-samp"
	RollingCountAStepOp           RecipeStepOperation = "rolling-counta"
	RollingKthLargestStepOp       RecipeStepOperation = "rolling-k-largest"
	RollingKthLargestUniqueStepOp RecipeStepOperation = "rolling-k-largest-unique"
	RollingListStepOp             RecipeStepOperation = "rolling-list"
	RowNumberStepOp               RecipeStepOperation = "row-number"
	SessionStepOp                 RecipeStepOperation = "session"

	// other functions
	IpToIntStepOp         RecipeStepOperation = "ip-to-int"
	IntToIpStepOp         RecipeStepOperation = "int-to-ip"
	UrlParamsStepOp       RecipeStepOperation = "url-params"
	COALESCEStepOp        RecipeStepOperation = "coalesce"
	SourceRowNumberStepOp RecipeStepOperation = "source-row-number"
	IfStepOp              RecipeStepOperation = "if"
	CaseStepOp            RecipeStepOperation = "case"
	RangeStepOp           RecipeStepOperation = "range"
	HostStepOp            RecipeStepOperation = "host"
	DomainStepOp          RecipeStepOperation = "domain"
	SubDomainStepOp       RecipeStepOperation = "subdomain"

	// Basic cleaning
	DeleteStep         RecipeStepOperation = "delete"
	DuplicateStep      RecipeStepOperation = "duplicate"
	MoveAfterStep      RecipeStepOperation = "move-after"
	MoveBeforeStep     RecipeStepOperation = "move-before"
	MoveToEndStep      RecipeStepOperation = "move-to-end"
	MoveToIndexStep    RecipeStepOperation = "move-to-index"
	MoveToStartStep    RecipeStepOperation = "move-to-start"
	RenameStep         RecipeStepOperation = "rename"
	ToBoolColumnStep   RecipeStepOperation = "to-boolean-column"
	ToDoubleColumnStep RecipeStepOperation = "to-double-column"
	ToNumberColumnStep RecipeStepOperation = "to-number-column"
	ToStringColumnStep RecipeStepOperation = "to-string-column"

	// Data cleaning
	CapitalCaseStepOp              RecipeStepOperation = "capital-case"
	FormatDateStepOp               RecipeStepOperation = "format-date"
	AddDoubleQuotesStepOp          RecipeStepOperation = "add-double-quotes"
	AddPrefixStepOp                RecipeStepOperation = "add-prefix"
	AddSingleQuotesStepOp          RecipeStepOperation = "add-single-quotes"
	AddSuffixStepOp                RecipeStepOperation = "add-suffix"
	ExtractBetweenDelimitersStepOp RecipeStepOperation = "extract-between-delimiters"
	ExtractBetweenPositionsStepOp  RecipeStepOperation = "extract-between-position"
	ExtractPatternStepOp           RecipeStepOperation = "extract-pattern"
	RemoveCombinedStepOp           RecipeStepOperation = "remove-combined"
	ReplaceBetweenDelimitersStepOp RecipeStepOperation = "replace-between-delimiters"
	ReplaceBetweenPositionsStepOp  RecipeStepOperation = "replace-between-positions"
	ReplaceTextStepOp              RecipeStepOperation = "replace-text"

	// Data quality
	FillWithAvgStepOp       RecipeStepOperation = "fill-with-average"
	FillWithCustomStepOp    RecipeStepOperation = "fill-with-custom"
	FillWithEmptyStepOp     RecipeStepOperation = "fill-with-empty"
	FillWithLastValidStepOp RecipeStepOperation = "fill-with-last-valid"
	FillWithMedianStepOp    RecipeStepOperation = "fill-with-median"
	FillWithModeStepOp      RecipeStepOperation = "fill-with-mode"
	FillWithMostFreqStepOp  RecipeStepOperation = "fill-with-most-freq"
	FillWithNullStepOp      RecipeStepOperation = "fill-with-null"
	FillWithSumStepOp       RecipeStepOperation = "fill-with-sum"
	RemoveDuplicatesStepOp  RecipeStepOperation = "remove-duplicates"
	RemoveMissingStepOp     RecipeStepOperation = "remove-missing"

	FlagColumnFromNullStepOp           RecipeStepOperation = "flag-column-from-null"
	FlagColumnFromPatternStepOp        RecipeStepOperation = "flag-column-from-pattern"
	MergeStepOp                        RecipeStepOperation = "merge"
	SplitColumnsBetweenDelimiterStepOp RecipeStepOperation = "split-columns-between-delimiter"
	SplitColumnsBetweenPositionsStepOp RecipeStepOperation = "split-columns-between-positions"
	SplitColumnsFromEndStepOp          RecipeStepOperation = "split-columns-from-end"
	SplitColumnsFromStartStepOp        RecipeStepOperation = "split-columns-from-start"
	SplitColumnMultipleDelimiterStepOp RecipeStepOperation = "split-column-multiple-delimiter"
	SplitColumnSingleDelimiterStepOp   RecipeStepOperation = "split-column-single-delimiter"
	SplitColumnWithIntervalsStepOp     RecipeStepOperation = "split-column-with-intervals"

	// Data structure
	CatMappingStepOp   RecipeStepOperation = "category-mapping"
	GroupByStepOp      RecipeStepOperation = "group-by"
	JoinStepOp         RecipeStepOperation = "join"
	OneHotEncodeStepOp RecipeStepOperation = "one-hot-encode"
	PivotStepOp        RecipeStepOperation = "pivot"
	TokenizeStepOp     RecipeStepOperation = "tokenize"
	UnionStepOp        RecipeStepOperation = "union"
	UnpivotStepOp      RecipeStepOperation = "unpivot"
)

type RecipeStepParam

type RecipeStepParam struct {
	Name  string `json:"name,omitempty" protobuf:"bytes,1,opt,name=name"`
	Value string `json:"value,omitempty" protobuf:"bytes,2,opt,name=value"`
}

RecipeStepParam is a key value parameter of the recipe

func (*RecipeStepParam) DeepCopy

func (in *RecipeStepParam) DeepCopy() *RecipeStepParam

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecipeStepParam.

func (*RecipeStepParam) DeepCopyInto

func (in *RecipeStepParam) DeepCopyInto(out *RecipeStepParam)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecipeStepParam) Descriptor

func (*RecipeStepParam) Descriptor() ([]byte, []int)

func (*RecipeStepParam) Marshal

func (m *RecipeStepParam) Marshal() (dAtA []byte, err error)

func (*RecipeStepParam) MarshalTo

func (m *RecipeStepParam) MarshalTo(dAtA []byte) (int, error)

func (*RecipeStepParam) MarshalToSizedBuffer

func (m *RecipeStepParam) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecipeStepParam) ProtoMessage

func (*RecipeStepParam) ProtoMessage()

func (*RecipeStepParam) Reset

func (m *RecipeStepParam) Reset()

func (*RecipeStepParam) Size

func (m *RecipeStepParam) Size() (n int)

func (*RecipeStepParam) String

func (this *RecipeStepParam) String() string

func (*RecipeStepParam) Unmarshal

func (m *RecipeStepParam) Unmarshal(dAtA []byte) error

func (*RecipeStepParam) XXX_DiscardUnknown

func (m *RecipeStepParam) XXX_DiscardUnknown()

func (*RecipeStepParam) XXX_Marshal

func (m *RecipeStepParam) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecipeStepParam) XXX_Merge

func (m *RecipeStepParam) XXX_Merge(src proto.Message)

func (*RecipeStepParam) XXX_Size

func (m *RecipeStepParam) XXX_Size() int

func (*RecipeStepParam) XXX_Unmarshal

func (m *RecipeStepParam) XXX_Unmarshal(b []byte) error

type RecommendationSchema

type RecommendationSchema struct {
	// The name of the column that specifies user IDs (i.e. the primary key)
	// +kubebuilder:default:="user_id"
	// +kubebuilder:validation:Optional
	UserID *string `json:"userIDColumn,omitempty" protobuf:"bytes,1,opt,name=userIDColumn"`
	// The name of the column that specifies item IDs
	// +kubebuilder:default:="item_id"
	// +kubebuilder:validation:Optional
	ItemID *string `json:"itemIDColumn,omitempty" protobuf:"bytes,2,opt,name=itemIDColumn"`
	// The name of the column that specifies ratings
	// +kubebuilder:default:="rating"
	// +kubebuilder:validation:Optional
	Rating *string `json:"ratingColumn,omitempty" protobuf:"bytes,3,opt,name=ratingColumn"`
}

func (*RecommendationSchema) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RecommendationSchema.

func (*RecommendationSchema) DeepCopyInto

func (in *RecommendationSchema) DeepCopyInto(out *RecommendationSchema)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RecommendationSchema) Descriptor

func (*RecommendationSchema) Descriptor() ([]byte, []int)

func (*RecommendationSchema) Marshal

func (m *RecommendationSchema) Marshal() (dAtA []byte, err error)

func (*RecommendationSchema) MarshalTo

func (m *RecommendationSchema) MarshalTo(dAtA []byte) (int, error)

func (*RecommendationSchema) MarshalToSizedBuffer

func (m *RecommendationSchema) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RecommendationSchema) ProtoMessage

func (*RecommendationSchema) ProtoMessage()

func (*RecommendationSchema) Reset

func (m *RecommendationSchema) Reset()

func (*RecommendationSchema) Size

func (m *RecommendationSchema) Size() (n int)

func (*RecommendationSchema) String

func (this *RecommendationSchema) String() string

func (*RecommendationSchema) Unmarshal

func (m *RecommendationSchema) Unmarshal(dAtA []byte) error

func (*RecommendationSchema) XXX_DiscardUnknown

func (m *RecommendationSchema) XXX_DiscardUnknown()

func (*RecommendationSchema) XXX_Marshal

func (m *RecommendationSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RecommendationSchema) XXX_Merge

func (m *RecommendationSchema) XXX_Merge(src proto.Message)

func (*RecommendationSchema) XXX_Size

func (m *RecommendationSchema) XXX_Size() int

func (*RecommendationSchema) XXX_Unmarshal

func (m *RecommendationSchema) XXX_Unmarshal(b []byte) error

type RelationshipSpec

type RelationshipSpec struct {
	// The name of the relationship
	// +kubebuilder:validation:Required
	// +required
	Name string `json:"name,omitempty" protobuf:"bytes,1,opt,name=type"`
	// The name of the columns that holds the foreign key
	Column string `json:"columns,omitempty" protobuf:"bytes,2,opt,name=column"`
	// The relationship arity
	Arity *catalog.RelationshipArity `json:"arity,omitempty" protobuf:"bytes,3,opt,name=arity"`
	// The name of the other DataSource object
	// +kubebuilder:validation:Required
	// +required
	RelatesTo string `json:"relatesTo,omitempty" protobuf:"bytes,4,opt,name=relatesTo"`
}

RelationSpec defines a relationship between two DataSource objects

func (*RelationshipSpec) DeepCopy

func (in *RelationshipSpec) DeepCopy() *RelationshipSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RelationshipSpec.

func (*RelationshipSpec) DeepCopyInto

func (in *RelationshipSpec) DeepCopyInto(out *RelationshipSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RelationshipSpec) Descriptor

func (*RelationshipSpec) Descriptor() ([]byte, []int)

func (*RelationshipSpec) Marshal

func (m *RelationshipSpec) Marshal() (dAtA []byte, err error)

func (*RelationshipSpec) MarshalTo

func (m *RelationshipSpec) MarshalTo(dAtA []byte) (int, error)

func (*RelationshipSpec) MarshalToSizedBuffer

func (m *RelationshipSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RelationshipSpec) ProtoMessage

func (*RelationshipSpec) ProtoMessage()

func (*RelationshipSpec) Reset

func (m *RelationshipSpec) Reset()

func (*RelationshipSpec) Size

func (m *RelationshipSpec) Size() (n int)

func (*RelationshipSpec) String

func (this *RelationshipSpec) String() string

func (*RelationshipSpec) Unmarshal

func (m *RelationshipSpec) Unmarshal(dAtA []byte) error

func (*RelationshipSpec) XXX_DiscardUnknown

func (m *RelationshipSpec) XXX_DiscardUnknown()

func (*RelationshipSpec) XXX_Marshal

func (m *RelationshipSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RelationshipSpec) XXX_Merge

func (m *RelationshipSpec) XXX_Merge(src proto.Message)

func (*RelationshipSpec) XXX_Size

func (m *RelationshipSpec) XXX_Size() int

func (*RelationshipSpec) XXX_Unmarshal

func (m *RelationshipSpec) XXX_Unmarshal(b []byte) error

type RowSpec

type RowSpec struct {
	Cols []ColumnSpec `json:"cols,omitempty" protobuf:"bytes,1,rep,name=cols"`
}

func (*RowSpec) DeepCopy

func (in *RowSpec) DeepCopy() *RowSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RowSpec.

func (*RowSpec) DeepCopyInto

func (in *RowSpec) DeepCopyInto(out *RowSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*RowSpec) Descriptor

func (*RowSpec) Descriptor() ([]byte, []int)

func (*RowSpec) Marshal

func (m *RowSpec) Marshal() (dAtA []byte, err error)

func (*RowSpec) MarshalTo

func (m *RowSpec) MarshalTo(dAtA []byte) (int, error)

func (*RowSpec) MarshalToSizedBuffer

func (m *RowSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*RowSpec) ProtoMessage

func (*RowSpec) ProtoMessage()

func (*RowSpec) Reset

func (m *RowSpec) Reset()

func (*RowSpec) Size

func (m *RowSpec) Size() (n int)

func (*RowSpec) String

func (this *RowSpec) String() string

func (*RowSpec) Unmarshal

func (m *RowSpec) Unmarshal(dAtA []byte) error

func (*RowSpec) XXX_DiscardUnknown

func (m *RowSpec) XXX_DiscardUnknown()

func (*RowSpec) XXX_Marshal

func (m *RowSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*RowSpec) XXX_Merge

func (m *RowSpec) XXX_Merge(src proto.Message)

func (*RowSpec) XXX_Size

func (m *RowSpec) XXX_Size() int

func (*RowSpec) XXX_Unmarshal

func (m *RowSpec) XXX_Unmarshal(b []byte) error

type SampleSpec

type SampleSpec struct {
	// Indicates if sampling is enabled
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Enabled *bool `json:"enabled,omitempty" protobuf:"varint,1,opt,name=enabled"`
	// The type of sampling (random sampling, by default)
	// +kubebuilder:default:="random"
	// +kubebuilder:validation:Optional
	Type catalog.SamplingType `json:"type,omitempty" protobuf:"bytes,2,opt,name=type"`
	// The number of rows to sample (by default, 500)
	// +kubebuilder:default:=500
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	Rows *int32 `json:"rows,omitempty" protobuf:"varint,3,opt,name=rows"`
	// The percentage of rows to sample
	// +kubebuilder:default:=100
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	Pct *int32 `json:"percent,omitempty" protobuf:"varint,4,opt,name=percent"`
	// The filter formula, valid only if the sample type is a filter
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Filter *string `json:"filter,omitempty" protobuf:"bytes,5,opt,name=filter"`
	// The name of the column to be used for stratified sampling
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Column *string `json:"column,omitempty" protobuf:"bytes,6,opt,name=column"`
}

SampleSpec specifies how the contents of a dataset should be sampled

func (*SampleSpec) DeepCopy

func (in *SampleSpec) DeepCopy() *SampleSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SampleSpec.

func (*SampleSpec) DeepCopyInto

func (in *SampleSpec) DeepCopyInto(out *SampleSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*SampleSpec) Descriptor

func (*SampleSpec) Descriptor() ([]byte, []int)

func (*SampleSpec) Marshal

func (m *SampleSpec) Marshal() (dAtA []byte, err error)

func (*SampleSpec) MarshalTo

func (m *SampleSpec) MarshalTo(dAtA []byte) (int, error)

func (*SampleSpec) MarshalToSizedBuffer

func (m *SampleSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*SampleSpec) ProtoMessage

func (*SampleSpec) ProtoMessage()

func (*SampleSpec) Reset

func (m *SampleSpec) Reset()

func (*SampleSpec) Size

func (m *SampleSpec) Size() (n int)

func (*SampleSpec) String

func (this *SampleSpec) String() string

func (*SampleSpec) Unmarshal

func (m *SampleSpec) Unmarshal(dAtA []byte) error

func (*SampleSpec) XXX_DiscardUnknown

func (m *SampleSpec) XXX_DiscardUnknown()

func (*SampleSpec) XXX_Marshal

func (m *SampleSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SampleSpec) XXX_Merge

func (m *SampleSpec) XXX_Merge(src proto.Message)

func (*SampleSpec) XXX_Size

func (m *SampleSpec) XXX_Size() int

func (*SampleSpec) XXX_Unmarshal

func (m *SampleSpec) XXX_Unmarshal(b []byte) error

type ScatterPlotSpec

type ScatterPlotSpec struct {
	// Dataset is the name of the dataset
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// name of the X column
	// +kubebuilder:validation:Optional
	X *string `json:"x,omitempty" protobuf:"bytes,2,opt,name=x"`
	// name of the Y axis columns
	// +kubebuilder:validation:Optional
	Y *string `json:"y,omitempty" protobuf:"bytes,3,opt,name=y"`
}

func (*ScatterPlotSpec) DeepCopy

func (in *ScatterPlotSpec) DeepCopy() *ScatterPlotSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScatterPlotSpec.

func (*ScatterPlotSpec) DeepCopyInto

func (in *ScatterPlotSpec) DeepCopyInto(out *ScatterPlotSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*ScatterPlotSpec) Descriptor

func (*ScatterPlotSpec) Descriptor() ([]byte, []int)

func (*ScatterPlotSpec) Marshal

func (m *ScatterPlotSpec) Marshal() (dAtA []byte, err error)

func (*ScatterPlotSpec) MarshalTo

func (m *ScatterPlotSpec) MarshalTo(dAtA []byte) (int, error)

func (*ScatterPlotSpec) MarshalToSizedBuffer

func (m *ScatterPlotSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*ScatterPlotSpec) ProtoMessage

func (*ScatterPlotSpec) ProtoMessage()

func (*ScatterPlotSpec) Reset

func (m *ScatterPlotSpec) Reset()

func (*ScatterPlotSpec) Size

func (m *ScatterPlotSpec) Size() (n int)

func (*ScatterPlotSpec) String

func (this *ScatterPlotSpec) String() string

func (*ScatterPlotSpec) Unmarshal

func (m *ScatterPlotSpec) Unmarshal(dAtA []byte) error

func (*ScatterPlotSpec) XXX_DiscardUnknown

func (m *ScatterPlotSpec) XXX_DiscardUnknown()

func (*ScatterPlotSpec) XXX_Marshal

func (m *ScatterPlotSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*ScatterPlotSpec) XXX_Merge

func (m *ScatterPlotSpec) XXX_Merge(src proto.Message)

func (*ScatterPlotSpec) XXX_Size

func (m *ScatterPlotSpec) XXX_Size() int

func (*ScatterPlotSpec) XXX_Unmarshal

func (m *ScatterPlotSpec) XXX_Unmarshal(b []byte) error

type Schema

type Schema struct {
	// The time-series schema, which sets time-series specific parameters
	// +kubebuilder:validation:Optional
	TimeSeriesSchema TimeSeriesSchema `json:"timeSeriesSchema,omitempty" protobuf:"bytes,1,opt,name=timeSeriesSchema"`
	// The recommendation schema, which is used for the recommendation ML task
	// +kubebuilder:validation:Optional
	RecommendationSchema RecommendationSchema `json:"recommendationSchema,omitempty" protobuf:"bytes,2,opt,name=recommendationSchema"`
	// The collection of columns and their attributes
	Columns []Column `json:"columns,omitempty" protobuf:"bytes,3,rep,name=columns"`
	// The specification for tests for a new dataset
	TestTemplate DatasetTestSuite `json:"tests,omitempty" protobuf:"bytes,4,opt,name=tests"`
}

Schema defines the column-level format and validation rules for data associated with a DataSource

func (*Schema) DeepCopy

func (in *Schema) DeepCopy() *Schema

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Schema.

func (*Schema) DeepCopyInto

func (in *Schema) DeepCopyInto(out *Schema)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*Schema) Descriptor

func (*Schema) Descriptor() ([]byte, []int)

func (*Schema) Marshal

func (m *Schema) Marshal() (dAtA []byte, err error)

func (*Schema) MarshalTo

func (m *Schema) MarshalTo(dAtA []byte) (int, error)

func (*Schema) MarshalToSizedBuffer

func (m *Schema) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*Schema) ProtoMessage

func (*Schema) ProtoMessage()

func (*Schema) Reset

func (m *Schema) Reset()

func (*Schema) Size

func (m *Schema) Size() (n int)

func (*Schema) String

func (this *Schema) String() string

func (*Schema) Unmarshal

func (m *Schema) Unmarshal(dAtA []byte) error

func (*Schema) XXX_DiscardUnknown

func (m *Schema) XXX_DiscardUnknown()

func (*Schema) XXX_Marshal

func (m *Schema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*Schema) XXX_Merge

func (m *Schema) XXX_Merge(src proto.Message)

func (*Schema) XXX_Size

func (m *Schema) XXX_Size() int

func (*Schema) XXX_Unmarshal

func (m *Schema) XXX_Unmarshal(b []byte) error

type SqlQuery

type SqlQuery struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              SqlQuerySpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status SqlQueryStatus `json:"status" protobuf:"bytes,3,opt,name=status"`
}

SqlQuery represent a single sqlquery in the sqlquery store. +kubebuilder:object:root=true +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Description",type="string",JSONPath=".spec.description" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=sqlqueries,singular=sqlquery,categories={data,modela} +kubebuilder:subresource:status

func (*SqlQuery) AddFinalizer

func (r *SqlQuery) AddFinalizer()

func (*SqlQuery) CreateOrUpdateCond

func (r *SqlQuery) CreateOrUpdateCond(cond SqlQueryCondition)

Merge or update condition

func (*SqlQuery) DeepCopy

func (in *SqlQuery) DeepCopy() *SqlQuery

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SqlQuery.

func (*SqlQuery) DeepCopyInto

func (in *SqlQuery) DeepCopyInto(out *SqlQuery)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*SqlQuery) DeepCopyObject

func (in *SqlQuery) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*SqlQuery) Default

func (r *SqlQuery) Default()

func (*SqlQuery) Deleted

func (r *SqlQuery) Deleted() bool

func (*SqlQuery) Descriptor

func (*SqlQuery) Descriptor() ([]byte, []int)

func (*SqlQuery) GetCond

func (*SqlQuery) GetCondIdx

func (r *SqlQuery) GetCondIdx(t SqlQueryConditionType) int

func (*SqlQuery) HasFinalizer

func (r *SqlQuery) HasFinalizer() bool

func (*SqlQuery) IsInCond

func (r *SqlQuery) IsInCond(ct SqlQueryConditionType) bool

func (*SqlQuery) IsReady

func (r *SqlQuery) IsReady() bool

func (*SqlQuery) IsSaved

func (r *SqlQuery) IsSaved() bool

func (*SqlQuery) MarkReady

func (r *SqlQuery) MarkReady()

func (*SqlQuery) MarkSaved

func (r *SqlQuery) MarkSaved()

func (*SqlQuery) Marshal

func (m *SqlQuery) Marshal() (dAtA []byte, err error)

func (*SqlQuery) MarshalTo

func (m *SqlQuery) MarshalTo(dAtA []byte) (int, error)

func (*SqlQuery) MarshalToSizedBuffer

func (m *SqlQuery) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*SqlQuery) Populate

func (r *SqlQuery) Populate(name string)

func (*SqlQuery) PrintConditions

func (r *SqlQuery) PrintConditions()

func (*SqlQuery) ProtoMessage

func (*SqlQuery) ProtoMessage()

func (*SqlQuery) RemoveFinalizer

func (r *SqlQuery) RemoveFinalizer()

func (*SqlQuery) Reset

func (m *SqlQuery) Reset()

func (*SqlQuery) SetupWebhookWithManager

func (r *SqlQuery) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*SqlQuery) Size

func (m *SqlQuery) Size() (n int)

func (*SqlQuery) String

func (this *SqlQuery) String() string

func (*SqlQuery) ToYamlFile

func (r *SqlQuery) ToYamlFile() ([]byte, error)

func (*SqlQuery) Unmarshal

func (m *SqlQuery) Unmarshal(dAtA []byte) error

func (*SqlQuery) UpdateRunStatus added in v0.4.612

func (in *SqlQuery) UpdateRunStatus(run SqlQueryRun)

func (*SqlQuery) ValidateCreate

func (recipe *SqlQuery) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*SqlQuery) ValidateDelete

func (recipe *SqlQuery) ValidateDelete() error

func (*SqlQuery) ValidateUpdate

func (recipe *SqlQuery) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*SqlQuery) XXX_DiscardUnknown

func (m *SqlQuery) XXX_DiscardUnknown()

func (*SqlQuery) XXX_Marshal

func (m *SqlQuery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SqlQuery) XXX_Merge

func (m *SqlQuery) XXX_Merge(src proto.Message)

func (*SqlQuery) XXX_Size

func (m *SqlQuery) XXX_Size() int

func (*SqlQuery) XXX_Unmarshal

func (m *SqlQuery) XXX_Unmarshal(b []byte) error

type SqlQueryCondition

type SqlQueryCondition struct {
	// Type of account condition.
	Type SqlQueryConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=SqlQueryConditionType"`
	// Status of the condition, one of True, False, Unknown.
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,7,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}

SqlQueryCondition describes the state of a deployment at a certain point.

func (*SqlQueryCondition) DeepCopy

func (in *SqlQueryCondition) DeepCopy() *SqlQueryCondition

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SqlQueryCondition.

func (*SqlQueryCondition) DeepCopyInto

func (in *SqlQueryCondition) DeepCopyInto(out *SqlQueryCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*SqlQueryCondition) Descriptor

func (*SqlQueryCondition) Descriptor() ([]byte, []int)

func (*SqlQueryCondition) Marshal

func (m *SqlQueryCondition) Marshal() (dAtA []byte, err error)

func (*SqlQueryCondition) MarshalTo

func (m *SqlQueryCondition) MarshalTo(dAtA []byte) (int, error)

func (*SqlQueryCondition) MarshalToSizedBuffer

func (m *SqlQueryCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*SqlQueryCondition) ProtoMessage

func (*SqlQueryCondition) ProtoMessage()

func (*SqlQueryCondition) Reset

func (m *SqlQueryCondition) Reset()

func (*SqlQueryCondition) Size

func (m *SqlQueryCondition) Size() (n int)

func (*SqlQueryCondition) String

func (this *SqlQueryCondition) String() string

func (*SqlQueryCondition) Unmarshal

func (m *SqlQueryCondition) Unmarshal(dAtA []byte) error

func (*SqlQueryCondition) XXX_DiscardUnknown

func (m *SqlQueryCondition) XXX_DiscardUnknown()

func (*SqlQueryCondition) XXX_Marshal

func (m *SqlQueryCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SqlQueryCondition) XXX_Merge

func (m *SqlQueryCondition) XXX_Merge(src proto.Message)

func (*SqlQueryCondition) XXX_Size

func (m *SqlQueryCondition) XXX_Size() int

func (*SqlQueryCondition) XXX_Unmarshal

func (m *SqlQueryCondition) XXX_Unmarshal(b []byte) error

type SqlQueryConditionType

type SqlQueryConditionType string

SqlQueryConditionType is the condition of the sqlquery

const (
	SqlQueryReady SqlQueryConditionType = "Ready"
	SqlQuerySaved SqlQueryConditionType = "Saved"
)

/ SqlQuery Condition

type SqlQueryList

type SqlQueryList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Items           []SqlQuery `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true SqlQueryList contain a list of sqlquery objects

func (*SqlQueryList) DeepCopy

func (in *SqlQueryList) DeepCopy() *SqlQueryList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SqlQueryList.

func (*SqlQueryList) DeepCopyInto

func (in *SqlQueryList) DeepCopyInto(out *SqlQueryList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*SqlQueryList) DeepCopyObject

func (in *SqlQueryList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*SqlQueryList) Descriptor

func (*SqlQueryList) Descriptor() ([]byte, []int)

func (*SqlQueryList) Marshal

func (m *SqlQueryList) Marshal() (dAtA []byte, err error)

func (*SqlQueryList) MarshalTo

func (m *SqlQueryList) MarshalTo(dAtA []byte) (int, error)

func (*SqlQueryList) MarshalToSizedBuffer

func (m *SqlQueryList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*SqlQueryList) ProtoMessage

func (*SqlQueryList) ProtoMessage()

func (*SqlQueryList) Reset

func (m *SqlQueryList) Reset()

func (*SqlQueryList) Size

func (m *SqlQueryList) Size() (n int)

func (*SqlQueryList) String

func (this *SqlQueryList) String() string

func (*SqlQueryList) Unmarshal

func (m *SqlQueryList) Unmarshal(dAtA []byte) error

func (*SqlQueryList) XXX_DiscardUnknown

func (m *SqlQueryList) XXX_DiscardUnknown()

func (*SqlQueryList) XXX_Marshal

func (m *SqlQueryList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SqlQueryList) XXX_Merge

func (m *SqlQueryList) XXX_Merge(src proto.Message)

func (*SqlQueryList) XXX_Size

func (m *SqlQueryList) XXX_Size() int

func (*SqlQueryList) XXX_Unmarshal

func (m *SqlQueryList) XXX_Unmarshal(b []byte) error

type SqlQueryRun

type SqlQueryRun struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              SqlQueryRunSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status SqlQueryRunStatus `json:"status" protobuf:"bytes,3,opt,name=status"`
}

SqlQueryRun represent a single sqlquery in the sqlquery store. +kubebuilder:object:root=true +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=sqlqueryruns,singular=sqlqueryrun,categories={data,modela} +kubebuilder:subresource:status

func ParseSqlQueryRunYaml

func ParseSqlQueryRunYaml(content []byte) (*SqlQueryRun, error)

func (*SqlQueryRun) AddFinalizer

func (prediction *SqlQueryRun) AddFinalizer()

func (*SqlQueryRun) CompletionAlert added in v0.4.601

func (run *SqlQueryRun) CompletionAlert(tenantRef *v1.ObjectReference, notifierName *string) *infra.Alert

Generate a dataset completion alert

func (*SqlQueryRun) CreateOrUpdateCond

func (prediction *SqlQueryRun) CreateOrUpdateCond(cond SqlQueryRunCondition)

Merge or update condition Merge or update condition

func (*SqlQueryRun) DeepCopy

func (in *SqlQueryRun) DeepCopy() *SqlQueryRun

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SqlQueryRun.

func (*SqlQueryRun) DeepCopyInto

func (in *SqlQueryRun) DeepCopyInto(out *SqlQueryRun)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*SqlQueryRun) DeepCopyObject

func (in *SqlQueryRun) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*SqlQueryRun) Default

func (run *SqlQueryRun) Default()

func (*SqlQueryRun) Descriptor

func (*SqlQueryRun) Descriptor() ([]byte, []int)

func (*SqlQueryRun) ErrorAlert added in v0.4.601

func (run *SqlQueryRun) ErrorAlert(tenantRef *v1.ObjectReference, notifierName *string, err error) *infra.Alert

func (*SqlQueryRun) GetCond

func (*SqlQueryRun) GetCondIdx

func (prediction *SqlQueryRun) GetCondIdx(t SqlQueryRunConditionType) int

func (*SqlQueryRun) HasFinalizer

func (prediction *SqlQueryRun) HasFinalizer() bool

func (*SqlQueryRun) IsCompleted

func (prediction *SqlQueryRun) IsCompleted() bool

func (*SqlQueryRun) IsFailed added in v0.4.614

func (in *SqlQueryRun) IsFailed() bool

func (*SqlQueryRun) IsSaved

func (version *SqlQueryRun) IsSaved() bool

func (*SqlQueryRun) Key

func (prediction *SqlQueryRun) Key() string

func (*SqlQueryRun) ManifestUri

func (prediction *SqlQueryRun) ManifestUri() string

func (*SqlQueryRun) MarkCompleted

func (prediction *SqlQueryRun) MarkCompleted()

func (*SqlQueryRun) MarkFailed

func (run *SqlQueryRun) MarkFailed(msg string)

func (*SqlQueryRun) MarkRunning

func (run *SqlQueryRun) MarkRunning()

func (*SqlQueryRun) MarkSaved

func (version *SqlQueryRun) MarkSaved()

func (*SqlQueryRun) Marshal

func (m *SqlQueryRun) Marshal() (dAtA []byte, err error)

func (*SqlQueryRun) MarshalTo

func (m *SqlQueryRun) MarshalTo(dAtA []byte) (int, error)

func (*SqlQueryRun) MarshalToSizedBuffer

func (m *SqlQueryRun) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*SqlQueryRun) OpName

func (prediction *SqlQueryRun) OpName() string

func (*SqlQueryRun) ProtoMessage

func (*SqlQueryRun) ProtoMessage()

func (*SqlQueryRun) RemoveFinalizer

func (prediction *SqlQueryRun) RemoveFinalizer()

func (*SqlQueryRun) RepPath

func (prediction *SqlQueryRun) RepPath(root string) (string, error)

Return the on disk rep location

func (*SqlQueryRun) Reset

func (m *SqlQueryRun) Reset()

func (*SqlQueryRun) RootUri

func (prediction *SqlQueryRun) RootUri() string

func (*SqlQueryRun) RunStatus added in v0.4.614

func (run *SqlQueryRun) RunStatus() *catalog.LastRunStatus

Return the state of the run as RunStatus

func (*SqlQueryRun) SetupWebhookWithManager

func (prediction *SqlQueryRun) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*SqlQueryRun) Size

func (m *SqlQueryRun) Size() (n int)

func (*SqlQueryRun) SqlQueryName

func (prediction *SqlQueryRun) SqlQueryName() string

func (*SqlQueryRun) String

func (this *SqlQueryRun) String() string

func (*SqlQueryRun) ToYamlFile

func (prediction *SqlQueryRun) ToYamlFile() ([]byte, error)

func (*SqlQueryRun) Unmarshal

func (m *SqlQueryRun) Unmarshal(dAtA []byte) error

func (*SqlQueryRun) ValidateCreate

func (recipe *SqlQueryRun) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*SqlQueryRun) ValidateDelete

func (recipe *SqlQueryRun) ValidateDelete() error

func (*SqlQueryRun) ValidateUpdate

func (recipe *SqlQueryRun) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*SqlQueryRun) XXX_DiscardUnknown

func (m *SqlQueryRun) XXX_DiscardUnknown()

func (*SqlQueryRun) XXX_Marshal

func (m *SqlQueryRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SqlQueryRun) XXX_Merge

func (m *SqlQueryRun) XXX_Merge(src proto.Message)

func (*SqlQueryRun) XXX_Size

func (m *SqlQueryRun) XXX_Size() int

func (*SqlQueryRun) XXX_Unmarshal

func (m *SqlQueryRun) XXX_Unmarshal(b []byte) error

type SqlQueryRunCondition

type SqlQueryRunCondition struct {
	// Type of account condition.
	Type SqlQueryRunConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=SqlQueryRunConditionType"`
	// Status of the condition, one of True, False, Unknown.
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,7,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}

SqlQueryRunCondition describes the state of a deployment at a certain point.

func (*SqlQueryRunCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SqlQueryRunCondition.

func (*SqlQueryRunCondition) DeepCopyInto

func (in *SqlQueryRunCondition) DeepCopyInto(out *SqlQueryRunCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*SqlQueryRunCondition) Descriptor

func (*SqlQueryRunCondition) Descriptor() ([]byte, []int)

func (*SqlQueryRunCondition) Marshal

func (m *SqlQueryRunCondition) Marshal() (dAtA []byte, err error)

func (*SqlQueryRunCondition) MarshalTo

func (m *SqlQueryRunCondition) MarshalTo(dAtA []byte) (int, error)

func (*SqlQueryRunCondition) MarshalToSizedBuffer

func (m *SqlQueryRunCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*SqlQueryRunCondition) ProtoMessage

func (*SqlQueryRunCondition) ProtoMessage()

func (*SqlQueryRunCondition) Reset

func (m *SqlQueryRunCondition) Reset()

func (*SqlQueryRunCondition) Size

func (m *SqlQueryRunCondition) Size() (n int)

func (*SqlQueryRunCondition) String

func (this *SqlQueryRunCondition) String() string

func (*SqlQueryRunCondition) Unmarshal

func (m *SqlQueryRunCondition) Unmarshal(dAtA []byte) error

func (*SqlQueryRunCondition) XXX_DiscardUnknown

func (m *SqlQueryRunCondition) XXX_DiscardUnknown()

func (*SqlQueryRunCondition) XXX_Marshal

func (m *SqlQueryRunCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SqlQueryRunCondition) XXX_Merge

func (m *SqlQueryRunCondition) XXX_Merge(src proto.Message)

func (*SqlQueryRunCondition) XXX_Size

func (m *SqlQueryRunCondition) XXX_Size() int

func (*SqlQueryRunCondition) XXX_Unmarshal

func (m *SqlQueryRunCondition) XXX_Unmarshal(b []byte) error

type SqlQueryRunConditionType

type SqlQueryRunConditionType string

SqlQueryRunConditionType is the condition of the sqlquery

const (
	SqlQueryRunCompleted SqlQueryRunConditionType = "Completed"
	SqlQueryRunSaved     SqlQueryRunConditionType = "Saved"
)

/ SqlQueryRun Condition

type SqlQueryRunList

type SqlQueryRunList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Items           []SqlQueryRun `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true SqlQueryRunList contain a list of sqlquery objects

func (*SqlQueryRunList) DeepCopy

func (in *SqlQueryRunList) DeepCopy() *SqlQueryRunList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SqlQueryRunList.

func (*SqlQueryRunList) DeepCopyInto

func (in *SqlQueryRunList) DeepCopyInto(out *SqlQueryRunList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*SqlQueryRunList) DeepCopyObject

func (in *SqlQueryRunList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*SqlQueryRunList) Descriptor

func (*SqlQueryRunList) Descriptor() ([]byte, []int)

func (*SqlQueryRunList) Marshal

func (m *SqlQueryRunList) Marshal() (dAtA []byte, err error)

func (*SqlQueryRunList) MarshalTo

func (m *SqlQueryRunList) MarshalTo(dAtA []byte) (int, error)

func (*SqlQueryRunList) MarshalToSizedBuffer

func (m *SqlQueryRunList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*SqlQueryRunList) ProtoMessage

func (*SqlQueryRunList) ProtoMessage()

func (*SqlQueryRunList) Reset

func (m *SqlQueryRunList) Reset()

func (*SqlQueryRunList) Size

func (m *SqlQueryRunList) Size() (n int)

func (*SqlQueryRunList) String

func (this *SqlQueryRunList) String() string

func (*SqlQueryRunList) Unmarshal

func (m *SqlQueryRunList) Unmarshal(dAtA []byte) error

func (*SqlQueryRunList) XXX_DiscardUnknown

func (m *SqlQueryRunList) XXX_DiscardUnknown()

func (*SqlQueryRunList) XXX_Marshal

func (m *SqlQueryRunList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SqlQueryRunList) XXX_Merge

func (m *SqlQueryRunList) XXX_Merge(src proto.Message)

func (*SqlQueryRunList) XXX_Size

func (m *SqlQueryRunList) XXX_Size() int

func (*SqlQueryRunList) XXX_Unmarshal

func (m *SqlQueryRunList) XXX_Unmarshal(b []byte) error

type SqlQueryRunPhase

type SqlQueryRunPhase string

SqlQueryPhase is the current phase of a model

const (
	SqlQueryRunPhasePending   SqlQueryRunPhase = "Pending"
	SqlQueryRunPhaseRunning   SqlQueryRunPhase = "Running"
	SqlQueryRunPhaseFailed    SqlQueryRunPhase = "Failed"
	SqlQueryRunPhaseAborted   SqlQueryRunPhase = "Aborted"
	SqlQueryRunPhaseCompleted SqlQueryRunPhase = "Completed"
)

type SqlQueryRunSpec

type SqlQueryRunSpec struct {
	// The sqlquery owner
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:="no-one"
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// The product version for the sqlquery.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"`
	// Comments is a description of the sqlquery
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=512
	Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// Database
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Database *string `json:"databaser,omitempty" protobuf:"bytes,4,opt,name=database"`
	// If true this table
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Table *bool `json:"sql,omitempty" protobuf:"varint,5,opt,name=table"`
	// Type name of the column key, this column is the key column in the entity.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	SqlOrTable *string `json:"sqlOrTable,omitempty" protobuf:"bytes,6,opt,name=sqlOrTable"`
	// The name of the connection to the SQL data source
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	ConnectionName *string `json:"connectionName,omitempty" protobuf:"bytes,7,opt,name=connectionName"`
	// Resources is the hardware resource req.
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,8,opt,name=resources"`
	// ActiveDeadlineSeconds is the deadline of a job for this dataset.
	// +kubebuilder:default:=600
	// +kubebuilder:validation:Minimum=0
	// +kubebuilder:validation:Optional
	ActiveDeadlineSeconds *int64 `json:"activeDeadlineSeconds,omitempty" protobuf:"varint,9,opt,name=activeDeadlineSeconds"`
	// The priority of this prediction. The default is medium.
	// +kubebuilder:default:=medium
	// +kubebuilder:validation:Optional
	Priority *catalog.PriorityLevel `json:"priority,omitempty" protobuf:"bytes,10,opt,name=priority"`
	// Aborted is set when we want to abort the prediction
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Aborted *bool `json:"aborted,omitempty" protobuf:"varint,11,opt,name=aborted"`
	// If true save the query results
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Materialized *bool `json:"materialized,omitempty" protobuf:"varint,12,opt,name=materialized"`
	// If true generate a report each time the web request is executed
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Reported *bool `json:"reported,omitempty" protobuf:"varint,13,opt,name=reported"`
	// The source web request
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	SqlQueryName *string `json:"sqlQueryName,omitempty" protobuf:"bytes,14,opt,name=sqlQueryName"`
	// The execution env of this recipes
	// +kubebuilder:validation:Optional
	LabRef v1.ObjectReference `json:"labRef,omitempty" protobuf:"bytes,15,opt,name=labRef"`
}

SqlQueryRunSpec contain the desired state of a SqlQueryRun

func (*SqlQueryRunSpec) DeepCopy

func (in *SqlQueryRunSpec) DeepCopy() *SqlQueryRunSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SqlQueryRunSpec.

func (*SqlQueryRunSpec) DeepCopyInto

func (in *SqlQueryRunSpec) DeepCopyInto(out *SqlQueryRunSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*SqlQueryRunSpec) Descriptor

func (*SqlQueryRunSpec) Descriptor() ([]byte, []int)

func (*SqlQueryRunSpec) Marshal

func (m *SqlQueryRunSpec) Marshal() (dAtA []byte, err error)

func (*SqlQueryRunSpec) MarshalTo

func (m *SqlQueryRunSpec) MarshalTo(dAtA []byte) (int, error)

func (*SqlQueryRunSpec) MarshalToSizedBuffer

func (m *SqlQueryRunSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*SqlQueryRunSpec) ProtoMessage

func (*SqlQueryRunSpec) ProtoMessage()

func (*SqlQueryRunSpec) Reset

func (m *SqlQueryRunSpec) Reset()

func (*SqlQueryRunSpec) Size

func (m *SqlQueryRunSpec) Size() (n int)

func (*SqlQueryRunSpec) String

func (this *SqlQueryRunSpec) String() string

func (*SqlQueryRunSpec) Unmarshal

func (m *SqlQueryRunSpec) Unmarshal(dAtA []byte) error

func (*SqlQueryRunSpec) XXX_DiscardUnknown

func (m *SqlQueryRunSpec) XXX_DiscardUnknown()

func (*SqlQueryRunSpec) XXX_Marshal

func (m *SqlQueryRunSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SqlQueryRunSpec) XXX_Merge

func (m *SqlQueryRunSpec) XXX_Merge(src proto.Message)

func (*SqlQueryRunSpec) XXX_Size

func (m *SqlQueryRunSpec) XXX_Size() int

func (*SqlQueryRunSpec) XXX_Unmarshal

func (m *SqlQueryRunSpec) XXX_Unmarshal(b []byte) error

type SqlQueryRunStatus

type SqlQueryRunStatus struct {
	// StartTime is the start time of the prediction.
	StartTime *metav1.Time `json:"startTime,omitempty" protobuf:"bytes,1,opt,name=startTime"`
	// EndTime is the end time of the prediction.
	EndTime *metav1.Time `json:"endTime,omitempty" protobuf:"bytes,2,opt,name=endTime"`
	// Phase is the current phase of the prediction
	// +kubebuilder:default:="Pending"
	// +kubebuilder:validation:Optional
	Phase SqlQueryRunPhase `json:"phase,omitempty" protobuf:"bytes,3,opt,name=phase"`
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,4,opt,name=observedGeneration"`
	// The number of rows in the result query
	//+kubebuilder:validation:Optional
	Rows int32 `json:"rows,omitempty" protobuf:"varint,5,opt,name=rows"`
	// What triggered the run
	//+kubebuilder:validation:Optional
	TriggeredBy catalog.TriggerType `json:"triggeredBy,omitempty" protobuf:"bytes,6,opt,name=triggeredBy"`
	// Holds the location of log paths
	//+kubebuilder:validation:Optional
	Logs catalog.Logs `json:"logs,omitempty" protobuf:"bytes,7,opt,name=logs"`
	// The location of the materialized view
	//+kubebuilder:validation:Optional
	Location DataLocation `json:"location,omitempty" protobuf:"bytes,9,opt,name=location"`
	// The name of the report object.
	ReportName string `json:"reportName,omitempty" protobuf:"bytes,10,opt,name=reportName"`

	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,11,opt,name=lastUpdated"`
	// Update in case of terminal failure
	// Borrowed from cluster api controller
	//+kubebuilder:validation:Optional
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,12,opt,name=failureReason"`
	// Update in case of terminal failure message
	//+kubebuilder:validation:Optional
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,13,opt,name=failureMessage"`

	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []SqlQueryRunCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,14,rep,name=conditions"`
}

SqlQueryRunStatus defines the observed state of SqlQueryRun

func (*SqlQueryRunStatus) DeepCopy

func (in *SqlQueryRunStatus) DeepCopy() *SqlQueryRunStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SqlQueryRunStatus.

func (*SqlQueryRunStatus) DeepCopyInto

func (in *SqlQueryRunStatus) DeepCopyInto(out *SqlQueryRunStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*SqlQueryRunStatus) Descriptor

func (*SqlQueryRunStatus) Descriptor() ([]byte, []int)

func (*SqlQueryRunStatus) Marshal

func (m *SqlQueryRunStatus) Marshal() (dAtA []byte, err error)

func (*SqlQueryRunStatus) MarshalTo

func (m *SqlQueryRunStatus) MarshalTo(dAtA []byte) (int, error)

func (*SqlQueryRunStatus) MarshalToSizedBuffer

func (m *SqlQueryRunStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*SqlQueryRunStatus) ProtoMessage

func (*SqlQueryRunStatus) ProtoMessage()

func (*SqlQueryRunStatus) Reset

func (m *SqlQueryRunStatus) Reset()

func (*SqlQueryRunStatus) Size

func (m *SqlQueryRunStatus) Size() (n int)

func (*SqlQueryRunStatus) String

func (this *SqlQueryRunStatus) String() string

func (*SqlQueryRunStatus) Unmarshal

func (m *SqlQueryRunStatus) Unmarshal(dAtA []byte) error

func (*SqlQueryRunStatus) XXX_DiscardUnknown

func (m *SqlQueryRunStatus) XXX_DiscardUnknown()

func (*SqlQueryRunStatus) XXX_Marshal

func (m *SqlQueryRunStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SqlQueryRunStatus) XXX_Merge

func (m *SqlQueryRunStatus) XXX_Merge(src proto.Message)

func (*SqlQueryRunStatus) XXX_Size

func (m *SqlQueryRunStatus) XXX_Size() int

func (*SqlQueryRunStatus) XXX_Unmarshal

func (m *SqlQueryRunStatus) XXX_Unmarshal(b []byte) error

type SqlQuerySpec

type SqlQuerySpec struct {
	// The sqlquery owner
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:="no-one"
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// The product version for the sqlquery.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"`
	// Description of the cron query
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// The sql template to create.
	// +kubebuilder:validation:Optional
	Template SqlQueryRunSpec `json:"template,omitempty" protobuf:"bytes,4,opt,name=template"`
	// Schedule for running the sql query
	// +kubebuilder:validation:Optional
	Schedule catalog.RunSchedule `json:"schedule,omitempty" protobuf:"bytes,5,opt,name=schedule"`
}

SqlQuerySpec contain the desired state of a SqlQuery

func (*SqlQuerySpec) DeepCopy

func (in *SqlQuerySpec) DeepCopy() *SqlQuerySpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SqlQuerySpec.

func (*SqlQuerySpec) DeepCopyInto

func (in *SqlQuerySpec) DeepCopyInto(out *SqlQuerySpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*SqlQuerySpec) Descriptor

func (*SqlQuerySpec) Descriptor() ([]byte, []int)

func (*SqlQuerySpec) Marshal

func (m *SqlQuerySpec) Marshal() (dAtA []byte, err error)

func (*SqlQuerySpec) MarshalTo

func (m *SqlQuerySpec) MarshalTo(dAtA []byte) (int, error)

func (*SqlQuerySpec) MarshalToSizedBuffer

func (m *SqlQuerySpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*SqlQuerySpec) ProtoMessage

func (*SqlQuerySpec) ProtoMessage()

func (*SqlQuerySpec) Reset

func (m *SqlQuerySpec) Reset()

func (*SqlQuerySpec) Size

func (m *SqlQuerySpec) Size() (n int)

func (*SqlQuerySpec) String

func (this *SqlQuerySpec) String() string

func (*SqlQuerySpec) Unmarshal

func (m *SqlQuerySpec) Unmarshal(dAtA []byte) error

func (*SqlQuerySpec) XXX_DiscardUnknown

func (m *SqlQuerySpec) XXX_DiscardUnknown()

func (*SqlQuerySpec) XXX_Marshal

func (m *SqlQuerySpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SqlQuerySpec) XXX_Merge

func (m *SqlQuerySpec) XXX_Merge(src proto.Message)

func (*SqlQuerySpec) XXX_Size

func (m *SqlQuerySpec) XXX_Size() int

func (*SqlQuerySpec) XXX_Unmarshal

func (m *SqlQuerySpec) XXX_Unmarshal(b []byte) error

type SqlQueryStatus

type SqlQueryStatus struct {
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,1,opt,name=lastUpdated"`
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,2,opt,name=observedGeneration"`
	// Last run is the last time a data pipeline run was created
	//+kubebuilder:validation:Optional
	LastRun catalog.LastRunStatus `json:"lastRun,omitempty" protobuf:"bytes,3,opt,name=lastRun"`
	// The time of the next schedule run
	//+kubebuilder:validation:Optional
	NextRun *metav1.Time `json:"nextRun,omitempty" protobuf:"bytes,4,opt,name=nextRun"`
	// Update in case of terminal failure
	// Borrowed from cluster api controller
	//+kubebuilder:validation:Optional
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,5,opt,name=failureReason"`
	// Update in case of terminal failure message
	//+kubebuilder:validation:Optional
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,6,opt,name=failureMessage"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []SqlQueryCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,7,rep,name=conditions"`
}

SqlQueryStatus defines the observed state of SqlQuery

func (*SqlQueryStatus) DeepCopy

func (in *SqlQueryStatus) DeepCopy() *SqlQueryStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SqlQueryStatus.

func (*SqlQueryStatus) DeepCopyInto

func (in *SqlQueryStatus) DeepCopyInto(out *SqlQueryStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*SqlQueryStatus) Descriptor

func (*SqlQueryStatus) Descriptor() ([]byte, []int)

func (*SqlQueryStatus) Marshal

func (m *SqlQueryStatus) Marshal() (dAtA []byte, err error)

func (*SqlQueryStatus) MarshalTo

func (m *SqlQueryStatus) MarshalTo(dAtA []byte) (int, error)

func (*SqlQueryStatus) MarshalToSizedBuffer

func (m *SqlQueryStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*SqlQueryStatus) ProtoMessage

func (*SqlQueryStatus) ProtoMessage()

func (*SqlQueryStatus) Reset

func (m *SqlQueryStatus) Reset()

func (*SqlQueryStatus) Size

func (m *SqlQueryStatus) Size() (n int)

func (*SqlQueryStatus) String

func (this *SqlQueryStatus) String() string

func (*SqlQueryStatus) Unmarshal

func (m *SqlQueryStatus) Unmarshal(dAtA []byte) error

func (*SqlQueryStatus) XXX_DiscardUnknown

func (m *SqlQueryStatus) XXX_DiscardUnknown()

func (*SqlQueryStatus) XXX_Marshal

func (m *SqlQueryStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SqlQueryStatus) XXX_Merge

func (m *SqlQueryStatus) XXX_Merge(src proto.Message)

func (*SqlQueryStatus) XXX_Size

func (m *SqlQueryStatus) XXX_Size() int

func (*SqlQueryStatus) XXX_Unmarshal

func (m *SqlQueryStatus) XXX_Unmarshal(b []byte) error

type SyntacticSpec added in v0.4.914

type SyntacticSpec struct {
	// Enabled syntatic data
	// +kubebuilder:validation:Optional
	Enabled *bool `json:"enabled,omitempty" protobuf:"varint,1,opt,name=enabled"`
	// The number of top correlations to be included in the correlation results
	// +kubebuilder:default:=0
	// +kubebuilder:validation:Optional
	Rows *int32 `json:"rows,omitempty" protobuf:"varint,2,opt,name=rows"`
}

func (*SyntacticSpec) DeepCopy added in v0.4.915

func (in *SyntacticSpec) DeepCopy() *SyntacticSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SyntacticSpec.

func (*SyntacticSpec) DeepCopyInto added in v0.4.915

func (in *SyntacticSpec) DeepCopyInto(out *SyntacticSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*SyntacticSpec) Descriptor added in v0.4.914

func (*SyntacticSpec) Descriptor() ([]byte, []int)

func (*SyntacticSpec) Marshal added in v0.4.914

func (m *SyntacticSpec) Marshal() (dAtA []byte, err error)

func (*SyntacticSpec) MarshalTo added in v0.4.914

func (m *SyntacticSpec) MarshalTo(dAtA []byte) (int, error)

func (*SyntacticSpec) MarshalToSizedBuffer added in v0.4.914

func (m *SyntacticSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*SyntacticSpec) ProtoMessage added in v0.4.914

func (*SyntacticSpec) ProtoMessage()

func (*SyntacticSpec) Reset added in v0.4.914

func (m *SyntacticSpec) Reset()

func (*SyntacticSpec) Size added in v0.4.914

func (m *SyntacticSpec) Size() (n int)

func (*SyntacticSpec) String added in v0.4.914

func (this *SyntacticSpec) String() string

func (*SyntacticSpec) Unmarshal added in v0.4.914

func (m *SyntacticSpec) Unmarshal(dAtA []byte) error

func (*SyntacticSpec) XXX_DiscardUnknown added in v0.4.914

func (m *SyntacticSpec) XXX_DiscardUnknown()

func (*SyntacticSpec) XXX_Marshal added in v0.4.914

func (m *SyntacticSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*SyntacticSpec) XXX_Merge added in v0.4.914

func (m *SyntacticSpec) XXX_Merge(src proto.Message)

func (*SyntacticSpec) XXX_Size added in v0.4.914

func (m *SyntacticSpec) XXX_Size() int

func (*SyntacticSpec) XXX_Unmarshal added in v0.4.914

func (m *SyntacticSpec) XXX_Unmarshal(b []byte) error

type TableSpec

type TableSpec struct {
	// Dataset is the name of the dataset
	// +kubebuilder:validation:Optional
	DatasetName *string `json:"datasetName,omitempty" protobuf:"bytes,1,opt,name=datasetName"`
	// List of table columns, if empty use call the columns
	// +kubebuilder:validation:Optional
	Columns []string `json:"columns,omitempty" protobuf:"bytes,2,rep,name=columns"`
	// one or more filters
	// +kubebuilder:validation:Optional
	Filters []string `json:"filters,omitempty" protobuf:"bytes,3,rep,name=filters"`
	// Groupby columns
	// +kubebuilder:validation:Optional
	GroupBy []string `json:"groupby,omitempty" protobuf:"bytes,4,rep,name=groupby"`
	// Row is the row number
	// +kubebuilder:validation:Optional
	Rows *int32 `json:"rows,omitempty" protobuf:"varint,5,opt,name=rows"`
	// Show index column
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	ShowIndex *bool `json:"showIndex,omitempty" protobuf:"varint,6,opt,name=showIndex"`
	// Show borther
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Border *bool `json:"border,omitempty" protobuf:"varint,7,opt,name=border"`
}

func (*TableSpec) DeepCopy

func (in *TableSpec) DeepCopy() *TableSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpec.

func (*TableSpec) DeepCopyInto

func (in *TableSpec) DeepCopyInto(out *TableSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*TableSpec) Descriptor

func (*TableSpec) Descriptor() ([]byte, []int)

func (*TableSpec) Marshal

func (m *TableSpec) Marshal() (dAtA []byte, err error)

func (*TableSpec) MarshalTo

func (m *TableSpec) MarshalTo(dAtA []byte) (int, error)

func (*TableSpec) MarshalToSizedBuffer

func (m *TableSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*TableSpec) ProtoMessage

func (*TableSpec) ProtoMessage()

func (*TableSpec) Reset

func (m *TableSpec) Reset()

func (*TableSpec) Size

func (m *TableSpec) Size() (n int)

func (*TableSpec) String

func (this *TableSpec) String() string

func (*TableSpec) Unmarshal

func (m *TableSpec) Unmarshal(dAtA []byte) error

func (*TableSpec) XXX_DiscardUnknown

func (m *TableSpec) XXX_DiscardUnknown()

func (*TableSpec) XXX_Marshal

func (m *TableSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*TableSpec) XXX_Merge

func (m *TableSpec) XXX_Merge(src proto.Message)

func (*TableSpec) XXX_Size

func (m *TableSpec) XXX_Size() int

func (*TableSpec) XXX_Unmarshal

func (m *TableSpec) XXX_Unmarshal(b []byte) error

type TimeSeriesSchema

type TimeSeriesSchema struct {
	// The time series frequency
	Freq catalog.Freq `json:"freq,omitempty" protobuf:"bytes,1,opt,name=freq"`
	// The holiday which should be taken into account
	Country *catalog.HolidayCountry `json:"country,omitempty" protobuf:"bytes,2,opt,name=country"`
}

func (*TimeSeriesSchema) DeepCopy

func (in *TimeSeriesSchema) DeepCopy() *TimeSeriesSchema

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TimeSeriesSchema.

func (*TimeSeriesSchema) DeepCopyInto

func (in *TimeSeriesSchema) DeepCopyInto(out *TimeSeriesSchema)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*TimeSeriesSchema) Descriptor

func (*TimeSeriesSchema) Descriptor() ([]byte, []int)

func (*TimeSeriesSchema) Marshal

func (m *TimeSeriesSchema) Marshal() (dAtA []byte, err error)

func (*TimeSeriesSchema) MarshalTo

func (m *TimeSeriesSchema) MarshalTo(dAtA []byte) (int, error)

func (*TimeSeriesSchema) MarshalToSizedBuffer

func (m *TimeSeriesSchema) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*TimeSeriesSchema) ProtoMessage

func (*TimeSeriesSchema) ProtoMessage()

func (*TimeSeriesSchema) Reset

func (m *TimeSeriesSchema) Reset()

func (*TimeSeriesSchema) Size

func (m *TimeSeriesSchema) Size() (n int)

func (*TimeSeriesSchema) String

func (this *TimeSeriesSchema) String() string

func (*TimeSeriesSchema) Unmarshal

func (m *TimeSeriesSchema) Unmarshal(dAtA []byte) error

func (*TimeSeriesSchema) XXX_DiscardUnknown

func (m *TimeSeriesSchema) XXX_DiscardUnknown()

func (*TimeSeriesSchema) XXX_Marshal

func (m *TimeSeriesSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*TimeSeriesSchema) XXX_Merge

func (m *TimeSeriesSchema) XXX_Merge(src proto.Message)

func (*TimeSeriesSchema) XXX_Size

func (m *TimeSeriesSchema) XXX_Size() int

func (*TimeSeriesSchema) XXX_Unmarshal

func (m *TimeSeriesSchema) XXX_Unmarshal(b []byte) error

type WebRequest

type WebRequest struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              WebRequestSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status WebRequestStatus `json:"status" protobuf:"bytes,3,opt,name=status"`
}

WebRequest represent a single sqlquery in the sqlquery store. +kubebuilder:object:root=true +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=webrequests,singular=webrequest,categories={data,modela} +kubebuilder:subresource:status

func (*WebRequest) AddFinalizer

func (r *WebRequest) AddFinalizer()

func (*WebRequest) CreateOrUpdateCond

func (r *WebRequest) CreateOrUpdateCond(cond WebRequestCondition)

Merge or update condition

func (*WebRequest) DeepCopy

func (in *WebRequest) DeepCopy() *WebRequest

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WebRequest.

func (*WebRequest) DeepCopyInto

func (in *WebRequest) DeepCopyInto(out *WebRequest)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*WebRequest) DeepCopyObject

func (in *WebRequest) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*WebRequest) Default

func (r *WebRequest) Default()

func (*WebRequest) Deleted

func (r *WebRequest) Deleted() bool

func (*WebRequest) Descriptor

func (*WebRequest) Descriptor() ([]byte, []int)

func (*WebRequest) GetCond

func (*WebRequest) GetCondIdx

func (r *WebRequest) GetCondIdx(t WebRequestConditionType) int

func (*WebRequest) HasFinalizer

func (r *WebRequest) HasFinalizer() bool

func (*WebRequest) IsInCond

func (r *WebRequest) IsInCond(ct WebRequestConditionType) bool

func (*WebRequest) IsReady

func (r *WebRequest) IsReady() bool

func (*WebRequest) IsSaved

func (r *WebRequest) IsSaved() bool

func (*WebRequest) MarkReady

func (r *WebRequest) MarkReady()

func (*WebRequest) MarkSaved

func (r *WebRequest) MarkSaved()

func (*WebRequest) Marshal

func (m *WebRequest) Marshal() (dAtA []byte, err error)

func (*WebRequest) MarshalTo

func (m *WebRequest) MarshalTo(dAtA []byte) (int, error)

func (*WebRequest) MarshalToSizedBuffer

func (m *WebRequest) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*WebRequest) Populate

func (r *WebRequest) Populate(name string)

func (*WebRequest) PrintConditions

func (r *WebRequest) PrintConditions()

func (*WebRequest) ProtoMessage

func (*WebRequest) ProtoMessage()

func (*WebRequest) RemoveFinalizer

func (r *WebRequest) RemoveFinalizer()

func (*WebRequest) Reset

func (m *WebRequest) Reset()

func (*WebRequest) SetupWebhookWithManager

func (r *WebRequest) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*WebRequest) Size

func (m *WebRequest) Size() (n int)

func (*WebRequest) String

func (this *WebRequest) String() string

func (*WebRequest) ToYamlFile

func (r *WebRequest) ToYamlFile() ([]byte, error)

func (*WebRequest) Unmarshal

func (m *WebRequest) Unmarshal(dAtA []byte) error

func (*WebRequest) UpdateRunStatus added in v0.4.612

func (in *WebRequest) UpdateRunStatus(run WebRequestRun)

func (*WebRequest) ValidateCreate

func (recipe *WebRequest) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*WebRequest) ValidateDelete

func (recipe *WebRequest) ValidateDelete() error

func (*WebRequest) ValidateUpdate

func (recipe *WebRequest) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*WebRequest) XXX_DiscardUnknown

func (m *WebRequest) XXX_DiscardUnknown()

func (*WebRequest) XXX_Marshal

func (m *WebRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*WebRequest) XXX_Merge

func (m *WebRequest) XXX_Merge(src proto.Message)

func (*WebRequest) XXX_Size

func (m *WebRequest) XXX_Size() int

func (*WebRequest) XXX_Unmarshal

func (m *WebRequest) XXX_Unmarshal(b []byte) error

type WebRequestCondition

type WebRequestCondition struct {
	// Type of account condition.
	Type WebRequestConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=WebRequestConditionType"`
	// Status of the condition, one of True, False, Unknown.
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,7,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}

WebRequestCondition describes the state of a deployment at a certain point.

func (*WebRequestCondition) DeepCopy

func (in *WebRequestCondition) DeepCopy() *WebRequestCondition

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WebRequestCondition.

func (*WebRequestCondition) DeepCopyInto

func (in *WebRequestCondition) DeepCopyInto(out *WebRequestCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*WebRequestCondition) Descriptor

func (*WebRequestCondition) Descriptor() ([]byte, []int)

func (*WebRequestCondition) Marshal

func (m *WebRequestCondition) Marshal() (dAtA []byte, err error)

func (*WebRequestCondition) MarshalTo

func (m *WebRequestCondition) MarshalTo(dAtA []byte) (int, error)

func (*WebRequestCondition) MarshalToSizedBuffer

func (m *WebRequestCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*WebRequestCondition) ProtoMessage

func (*WebRequestCondition) ProtoMessage()

func (*WebRequestCondition) Reset

func (m *WebRequestCondition) Reset()

func (*WebRequestCondition) Size

func (m *WebRequestCondition) Size() (n int)

func (*WebRequestCondition) String

func (this *WebRequestCondition) String() string

func (*WebRequestCondition) Unmarshal

func (m *WebRequestCondition) Unmarshal(dAtA []byte) error

func (*WebRequestCondition) XXX_DiscardUnknown

func (m *WebRequestCondition) XXX_DiscardUnknown()

func (*WebRequestCondition) XXX_Marshal

func (m *WebRequestCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*WebRequestCondition) XXX_Merge

func (m *WebRequestCondition) XXX_Merge(src proto.Message)

func (*WebRequestCondition) XXX_Size

func (m *WebRequestCondition) XXX_Size() int

func (*WebRequestCondition) XXX_Unmarshal

func (m *WebRequestCondition) XXX_Unmarshal(b []byte) error

type WebRequestConditionType

type WebRequestConditionType string

WebRequestConditionType is the condition of the sqlquery

const (
	WebRequestReady WebRequestConditionType = "Ready"
	WebRequestSaved WebRequestConditionType = "Saved"
)

/ WebRequest Condition

type WebRequestList

type WebRequestList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Items           []WebRequest `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true WebRequestList contain a list of sqlquery objects

func (*WebRequestList) DeepCopy

func (in *WebRequestList) DeepCopy() *WebRequestList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WebRequestList.

func (*WebRequestList) DeepCopyInto

func (in *WebRequestList) DeepCopyInto(out *WebRequestList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*WebRequestList) DeepCopyObject

func (in *WebRequestList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*WebRequestList) Descriptor

func (*WebRequestList) Descriptor() ([]byte, []int)

func (*WebRequestList) Marshal

func (m *WebRequestList) Marshal() (dAtA []byte, err error)

func (*WebRequestList) MarshalTo

func (m *WebRequestList) MarshalTo(dAtA []byte) (int, error)

func (*WebRequestList) MarshalToSizedBuffer

func (m *WebRequestList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*WebRequestList) ProtoMessage

func (*WebRequestList) ProtoMessage()

func (*WebRequestList) Reset

func (m *WebRequestList) Reset()

func (*WebRequestList) Size

func (m *WebRequestList) Size() (n int)

func (*WebRequestList) String

func (this *WebRequestList) String() string

func (*WebRequestList) Unmarshal

func (m *WebRequestList) Unmarshal(dAtA []byte) error

func (*WebRequestList) XXX_DiscardUnknown

func (m *WebRequestList) XXX_DiscardUnknown()

func (*WebRequestList) XXX_Marshal

func (m *WebRequestList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*WebRequestList) XXX_Merge

func (m *WebRequestList) XXX_Merge(src proto.Message)

func (*WebRequestList) XXX_Size

func (m *WebRequestList) XXX_Size() int

func (*WebRequestList) XXX_Unmarshal

func (m *WebRequestList) XXX_Unmarshal(b []byte) error

type WebRequestRun

type WebRequestRun struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Spec              WebRequestRunSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
	//+optional
	Status WebRequestRunStatus `json:"status" protobuf:"bytes,3,opt,name=status"`
}

WebRequestRun represent a single webrequest in the webrequest store. +kubebuilder:object:root=true +kubebuilder:printcolumn:name="Ready",type="string",JSONPath=".status.conditions[?(@.type==\"Ready\")].status",description="" +kubebuilder:printcolumn:name="Owner",type="string",JSONPath=".spec.owner" +kubebuilder:printcolumn:name="Version",type="string",JSONPath=".spec.versionName" +kubebuilder:printcolumn:name="Age",type="date",JSONPath=".metadata.creationTimestamp",description="" +kubebuilder:resource:path=webrequestruns,singular=webrequestrun,categories={data,modela} +kubebuilder:subresource:status

func ParseWebRequestRunYaml

func ParseWebRequestRunYaml(content []byte) (*WebRequestRun, error)

func (*WebRequestRun) AddFinalizer

func (prediction *WebRequestRun) AddFinalizer()

func (*WebRequestRun) CompletionAlert added in v0.4.601

func (run *WebRequestRun) CompletionAlert(tenantRef *v1.ObjectReference, notifierName *string) *infra.Alert

Generate a dataset completion alert

func (*WebRequestRun) CreateOrUpdateCond

func (prediction *WebRequestRun) CreateOrUpdateCond(cond WebRequestRunCondition)

Merge or update condition Merge or update condition

func (*WebRequestRun) DeepCopy

func (in *WebRequestRun) DeepCopy() *WebRequestRun

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WebRequestRun.

func (*WebRequestRun) DeepCopyInto

func (in *WebRequestRun) DeepCopyInto(out *WebRequestRun)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*WebRequestRun) DeepCopyObject

func (in *WebRequestRun) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*WebRequestRun) Default

func (run *WebRequestRun) Default()

func (*WebRequestRun) Descriptor

func (*WebRequestRun) Descriptor() ([]byte, []int)

func (*WebRequestRun) ErrorAlert added in v0.4.601

func (run *WebRequestRun) ErrorAlert(tenantRef *v1.ObjectReference, notifierName *string, err error) *infra.Alert

func (*WebRequestRun) GetCond

func (*WebRequestRun) GetCondIdx

func (prediction *WebRequestRun) GetCondIdx(t WebRequestRunConditionType) int

func (*WebRequestRun) HasFinalizer

func (prediction *WebRequestRun) HasFinalizer() bool

func (*WebRequestRun) IsCompleted

func (prediction *WebRequestRun) IsCompleted() bool

func (*WebRequestRun) IsFailed added in v0.4.614

func (in *WebRequestRun) IsFailed() bool

func (*WebRequestRun) IsSaved

func (version *WebRequestRun) IsSaved() bool

func (*WebRequestRun) Key

func (prediction *WebRequestRun) Key() string

func (*WebRequestRun) ManifestUri

func (prediction *WebRequestRun) ManifestUri() string

func (*WebRequestRun) MarkCompleted

func (prediction *WebRequestRun) MarkCompleted()

func (*WebRequestRun) MarkFailed

func (prediction *WebRequestRun) MarkFailed(msg string)

func (*WebRequestRun) MarkRunning

func (run *WebRequestRun) MarkRunning()

func (*WebRequestRun) MarkSaved

func (version *WebRequestRun) MarkSaved()

func (*WebRequestRun) Marshal

func (m *WebRequestRun) Marshal() (dAtA []byte, err error)

func (*WebRequestRun) MarshalTo

func (m *WebRequestRun) MarshalTo(dAtA []byte) (int, error)

func (*WebRequestRun) MarshalToSizedBuffer

func (m *WebRequestRun) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*WebRequestRun) OpName

func (prediction *WebRequestRun) OpName() string

func (*WebRequestRun) ProtoMessage

func (*WebRequestRun) ProtoMessage()

func (*WebRequestRun) RemoveFinalizer

func (prediction *WebRequestRun) RemoveFinalizer()

func (*WebRequestRun) RepPath

func (prediction *WebRequestRun) RepPath(root string) (string, error)

Return the on disk rep location

func (*WebRequestRun) Reset

func (m *WebRequestRun) Reset()

func (*WebRequestRun) RootUri

func (prediction *WebRequestRun) RootUri() string

func (*WebRequestRun) RunStatus added in v0.4.614

func (run *WebRequestRun) RunStatus() *catalog.LastRunStatus

Return the state of the run as RunStatus

func (*WebRequestRun) SetupWebhookWithManager

func (prediction *WebRequestRun) SetupWebhookWithManager(mgr ctrl.Manager) error

func (*WebRequestRun) Size

func (m *WebRequestRun) Size() (n int)

func (*WebRequestRun) String

func (this *WebRequestRun) String() string

func (*WebRequestRun) ToYamlFile

func (prediction *WebRequestRun) ToYamlFile() ([]byte, error)

func (*WebRequestRun) Unmarshal

func (m *WebRequestRun) Unmarshal(dAtA []byte) error

func (*WebRequestRun) ValidateCreate

func (recipe *WebRequestRun) ValidateCreate() error

ValidateCreate implements webhook.Validator so a webhook will be registered for the type

func (*WebRequestRun) ValidateDelete

func (recipe *WebRequestRun) ValidateDelete() error

func (*WebRequestRun) ValidateUpdate

func (recipe *WebRequestRun) ValidateUpdate(old runtime.Object) error

ValidateUpdate implements webhook.Validator so a webhook will be registered for the type

func (*WebRequestRun) WebQueryName

func (prediction *WebRequestRun) WebQueryName() string

func (*WebRequestRun) XXX_DiscardUnknown

func (m *WebRequestRun) XXX_DiscardUnknown()

func (*WebRequestRun) XXX_Marshal

func (m *WebRequestRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*WebRequestRun) XXX_Merge

func (m *WebRequestRun) XXX_Merge(src proto.Message)

func (*WebRequestRun) XXX_Size

func (m *WebRequestRun) XXX_Size() int

func (*WebRequestRun) XXX_Unmarshal

func (m *WebRequestRun) XXX_Unmarshal(b []byte) error

type WebRequestRunCondition

type WebRequestRunCondition struct {
	// Type of account condition.
	Type WebRequestRunConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=WebRequestRunConditionType"`
	// Status of the condition, one of True, False, Unknown.
	Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
	// Last time the condition transitioned from one status to another.
	LastTransitionTime *metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,7,opt,name=lastTransitionTime"`
	// The reason for the condition's last transition.
	Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
	// A human readable message indicating details about the transition.
	Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}

WebRequestRunCondition describes the state of a deployment at a certain point.

func (*WebRequestRunCondition) DeepCopy

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WebRequestRunCondition.

func (*WebRequestRunCondition) DeepCopyInto

func (in *WebRequestRunCondition) DeepCopyInto(out *WebRequestRunCondition)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*WebRequestRunCondition) Descriptor

func (*WebRequestRunCondition) Descriptor() ([]byte, []int)

func (*WebRequestRunCondition) Marshal

func (m *WebRequestRunCondition) Marshal() (dAtA []byte, err error)

func (*WebRequestRunCondition) MarshalTo

func (m *WebRequestRunCondition) MarshalTo(dAtA []byte) (int, error)

func (*WebRequestRunCondition) MarshalToSizedBuffer

func (m *WebRequestRunCondition) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*WebRequestRunCondition) ProtoMessage

func (*WebRequestRunCondition) ProtoMessage()

func (*WebRequestRunCondition) Reset

func (m *WebRequestRunCondition) Reset()

func (*WebRequestRunCondition) Size

func (m *WebRequestRunCondition) Size() (n int)

func (*WebRequestRunCondition) String

func (this *WebRequestRunCondition) String() string

func (*WebRequestRunCondition) Unmarshal

func (m *WebRequestRunCondition) Unmarshal(dAtA []byte) error

func (*WebRequestRunCondition) XXX_DiscardUnknown

func (m *WebRequestRunCondition) XXX_DiscardUnknown()

func (*WebRequestRunCondition) XXX_Marshal

func (m *WebRequestRunCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*WebRequestRunCondition) XXX_Merge

func (m *WebRequestRunCondition) XXX_Merge(src proto.Message)

func (*WebRequestRunCondition) XXX_Size

func (m *WebRequestRunCondition) XXX_Size() int

func (*WebRequestRunCondition) XXX_Unmarshal

func (m *WebRequestRunCondition) XXX_Unmarshal(b []byte) error

type WebRequestRunConditionType

type WebRequestRunConditionType string

WebRequestRunConditionType is the condition of the sqlquery

const (
	WebRequestRunCompleted WebRequestRunConditionType = "Completed"
	WebRequestRunSaved     WebRequestRunConditionType = "Saved"
)

/ WebRequestRun Condition

type WebRequestRunList

type WebRequestRunList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
	Items           []WebRequestRun `json:"items" protobuf:"bytes,2,rep,name=items"`
}

+kubebuilder:object:root=true WebRequestRunList contain a list of webrequest objects

func (*WebRequestRunList) DeepCopy

func (in *WebRequestRunList) DeepCopy() *WebRequestRunList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WebRequestRunList.

func (*WebRequestRunList) DeepCopyInto

func (in *WebRequestRunList) DeepCopyInto(out *WebRequestRunList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*WebRequestRunList) DeepCopyObject

func (in *WebRequestRunList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*WebRequestRunList) Descriptor

func (*WebRequestRunList) Descriptor() ([]byte, []int)

func (*WebRequestRunList) Marshal

func (m *WebRequestRunList) Marshal() (dAtA []byte, err error)

func (*WebRequestRunList) MarshalTo

func (m *WebRequestRunList) MarshalTo(dAtA []byte) (int, error)

func (*WebRequestRunList) MarshalToSizedBuffer

func (m *WebRequestRunList) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*WebRequestRunList) ProtoMessage

func (*WebRequestRunList) ProtoMessage()

func (*WebRequestRunList) Reset

func (m *WebRequestRunList) Reset()

func (*WebRequestRunList) Size

func (m *WebRequestRunList) Size() (n int)

func (*WebRequestRunList) String

func (this *WebRequestRunList) String() string

func (*WebRequestRunList) Unmarshal

func (m *WebRequestRunList) Unmarshal(dAtA []byte) error

func (*WebRequestRunList) XXX_DiscardUnknown

func (m *WebRequestRunList) XXX_DiscardUnknown()

func (*WebRequestRunList) XXX_Marshal

func (m *WebRequestRunList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*WebRequestRunList) XXX_Merge

func (m *WebRequestRunList) XXX_Merge(src proto.Message)

func (*WebRequestRunList) XXX_Size

func (m *WebRequestRunList) XXX_Size() int

func (*WebRequestRunList) XXX_Unmarshal

func (m *WebRequestRunList) XXX_Unmarshal(b []byte) error

type WebRequestRunPhase

type WebRequestRunPhase string

SqlQueryPhase is the current phase of a model

const (
	WebRequestRunPhasePending   WebRequestRunPhase = "Pending"
	WebRequestRunPhaseRunning   WebRequestRunPhase = "Running"
	WebRequestRunPhaseFailed    WebRequestRunPhase = "Failed"
	WebRequestRunPhaseAborted   WebRequestRunPhase = "Aborted"
	WebRequestRunPhaseCompleted WebRequestRunPhase = "Completed"
)

type WebRequestRunSpec

type WebRequestRunSpec struct {
	// The webrequest owner
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:="no-one"
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// The product version for the webrequest.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"`
	// Comments is a description of the webrequest
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:=""
	// +kubebuilder:validation:MaxLength=512
	Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// Type name of the column key, this column is the key column in the entity.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	URL *string `json:"text,omitempty" protobuf:"bytes,4,opt,name=query"`
	// HTTP Verb to use
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Verb *string `json:"verb,omitempty" protobuf:"bytes,5,opt,name=verb"`
	// URL Parameters
	// +kubebuilder:validation:Optional
	Parameters map[string]string `json:"parameters,omitempty" protobuf:"bytes,6,opt,name=parameters"`
	// The Http headers to use
	// +kubebuilder:validation:Optional
	Headers map[string]string `json:"headers,omitempty" protobuf:"bytes,7,opt,name=headers"`
	// The name of the connection to the data source.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	ConnectionName *string `json:"connectionName,omitempty" protobuf:"bytes,8,opt,name=connectionName"`
	// The WebRequest statement timeout
	// +kubebuilder:default:=3600
	// +kubebuilder:validation:Optional
	Timeout *int32 `json:"timeout,omitempty" protobuf:"varint,9,opt,name=timeout"`
	// If true save the query results
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Materialized *bool `json:"materialized,omitempty" protobuf:"varint,10,opt,name=materialized"`
	// If true generate a report each time the web request is executed
	// +kubebuilder:default:=false
	// +kubebuilder:validation:Optional
	Reported *bool `json:"reported,omitempty" protobuf:"varint,11,opt,name=reported"`
	// Resources is the hardware resource req.
	// +kubebuilder:validation:Optional
	Resources catalog.ResourceSpec `json:"resources,omitempty" protobuf:"bytes,12,opt,name=resources"`
	// The source web request
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	WebRequestName *string `json:"webRequestName,omitempty" protobuf:"bytes,13,opt,name=webRequestName"`
	// The execution env of this web request run.
	// If none, use the default lab from the data product
	// +kubebuilder:validation:Optional
	LabRef v1.ObjectReference `json:"labRef,omitempty" protobuf:"bytes,14,opt,name=labRef"`
}

WebRequestRunSpec contain the desired state of a WebRequestRun

func (*WebRequestRunSpec) DeepCopy

func (in *WebRequestRunSpec) DeepCopy() *WebRequestRunSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WebRequestRunSpec.

func (*WebRequestRunSpec) DeepCopyInto

func (in *WebRequestRunSpec) DeepCopyInto(out *WebRequestRunSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*WebRequestRunSpec) Descriptor

func (*WebRequestRunSpec) Descriptor() ([]byte, []int)

func (*WebRequestRunSpec) Marshal

func (m *WebRequestRunSpec) Marshal() (dAtA []byte, err error)

func (*WebRequestRunSpec) MarshalTo

func (m *WebRequestRunSpec) MarshalTo(dAtA []byte) (int, error)

func (*WebRequestRunSpec) MarshalToSizedBuffer

func (m *WebRequestRunSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*WebRequestRunSpec) ProtoMessage

func (*WebRequestRunSpec) ProtoMessage()

func (*WebRequestRunSpec) Reset

func (m *WebRequestRunSpec) Reset()

func (*WebRequestRunSpec) Size

func (m *WebRequestRunSpec) Size() (n int)

func (*WebRequestRunSpec) String

func (this *WebRequestRunSpec) String() string

func (*WebRequestRunSpec) Unmarshal

func (m *WebRequestRunSpec) Unmarshal(dAtA []byte) error

func (*WebRequestRunSpec) XXX_DiscardUnknown

func (m *WebRequestRunSpec) XXX_DiscardUnknown()

func (*WebRequestRunSpec) XXX_Marshal

func (m *WebRequestRunSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*WebRequestRunSpec) XXX_Merge

func (m *WebRequestRunSpec) XXX_Merge(src proto.Message)

func (*WebRequestRunSpec) XXX_Size

func (m *WebRequestRunSpec) XXX_Size() int

func (*WebRequestRunSpec) XXX_Unmarshal

func (m *WebRequestRunSpec) XXX_Unmarshal(b []byte) error

type WebRequestRunStatus

type WebRequestRunStatus struct {
	// StartTime is the start time of the prediction.
	StartTime *metav1.Time `json:"startTime,omitempty" protobuf:"bytes,1,opt,name=startTime"`
	// EndTime is the end time of the prediction.
	EndTime *metav1.Time `json:"endTime,omitempty" protobuf:"bytes,2,opt,name=endTime"`
	// Phase is the current phase of the prediction
	// +kubebuilder:default:="Pending"
	// +kubebuilder:validation:Optional
	Phase WebRequestRunPhase `json:"phase,omitempty" protobuf:"bytes,3,opt,name=phase"`
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,4,opt,name=observedGeneration"`
	// The number of rows in the result query
	//+kubebuilder:validation:Optional
	Rows int32 `json:"rows,omitempty" protobuf:"varint,5,opt,name=rows"`
	// What triggered the run
	//+kubebuilder:validation:Optional
	TriggeredBy catalog.TriggerType `json:"triggeredBy,omitempty" protobuf:"bytes,6,opt,name=triggeredBy"`
	// The result of the HTTP execution
	// +kubebuilder:validation:Optional
	HttpResultCode int32 `json:"httpResultCode,omitempty" protobuf:"varint,7,opt,name=httpResultCode"`
	// The location of the result
	// +kubebuilder:validation:Optional
	Location DataLocation `json:"resultLocation,omitempty" protobuf:"bytes,8,opt,name=resultLocation"`
	// The name of the report object.
	// +kubebuilder:validation:Optional
	ReportName string `json:"reportName,omitempty" protobuf:"bytes,9,opt,name=reportName"`
	// Update in case of terminal failure
	// Borrowed from cluster api controller
	//+kubebuilder:validation:Optional
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,10,opt,name=failureReason"`
	// Update in case of terminal failure message
	//+kubebuilder:validation:Optional
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,11,opt,name=failureMessage"`
	// Holds the location of log paths
	//+kubebuilder:validation:Optional
	Logs catalog.Logs `json:"logs,omitempty" protobuf:"bytes,12,opt,name=logs"`
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,13,opt,name=lastUpdated"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []WebRequestRunCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,14,rep,name=conditions"`
}

WebRequestRunStatus defines the observed state of WebRequestRun

func (*WebRequestRunStatus) DeepCopy

func (in *WebRequestRunStatus) DeepCopy() *WebRequestRunStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WebRequestRunStatus.

func (*WebRequestRunStatus) DeepCopyInto

func (in *WebRequestRunStatus) DeepCopyInto(out *WebRequestRunStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*WebRequestRunStatus) Descriptor

func (*WebRequestRunStatus) Descriptor() ([]byte, []int)

func (*WebRequestRunStatus) Marshal

func (m *WebRequestRunStatus) Marshal() (dAtA []byte, err error)

func (*WebRequestRunStatus) MarshalTo

func (m *WebRequestRunStatus) MarshalTo(dAtA []byte) (int, error)

func (*WebRequestRunStatus) MarshalToSizedBuffer

func (m *WebRequestRunStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*WebRequestRunStatus) ProtoMessage

func (*WebRequestRunStatus) ProtoMessage()

func (*WebRequestRunStatus) Reset

func (m *WebRequestRunStatus) Reset()

func (*WebRequestRunStatus) Size

func (m *WebRequestRunStatus) Size() (n int)

func (*WebRequestRunStatus) String

func (this *WebRequestRunStatus) String() string

func (*WebRequestRunStatus) Unmarshal

func (m *WebRequestRunStatus) Unmarshal(dAtA []byte) error

func (*WebRequestRunStatus) XXX_DiscardUnknown

func (m *WebRequestRunStatus) XXX_DiscardUnknown()

func (*WebRequestRunStatus) XXX_Marshal

func (m *WebRequestRunStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*WebRequestRunStatus) XXX_Merge

func (m *WebRequestRunStatus) XXX_Merge(src proto.Message)

func (*WebRequestRunStatus) XXX_Size

func (m *WebRequestRunStatus) XXX_Size() int

func (*WebRequestRunStatus) XXX_Unmarshal

func (m *WebRequestRunStatus) XXX_Unmarshal(b []byte) error

type WebRequestSpec

type WebRequestSpec struct {
	// The sqlquery owner
	// +kubebuilder:validation:Optional
	// +kubebuilder:default:="no-one"
	Owner *string `json:"owner,omitempty" protobuf:"bytes,1,opt,name=owner"`
	// The product version for the sqlquery.
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	VersionName *string `json:"versionName,omitempty" protobuf:"bytes,2,opt,name=versionName"`
	// Description of the cron api call
	// +kubebuilder:default:=""
	// +kubebuilder:validation:Optional
	Description *string `json:"description,omitempty" protobuf:"bytes,3,opt,name=description"`
	// The sql template to create.
	// +kubebuilder:validation:Optional
	Template WebRequestRunSpec `json:"template,omitempty" protobuf:"bytes,4,opt,name=template"`
	// Schedule for running the sql query
	// +kubebuilder:validation:Optional
	Schedule catalog.RunSchedule `json:"schedule,omitempty" protobuf:"bytes,5,opt,name=schedule"`
}

WebRequestSpec contain the desired state of a WebRequest

func (*WebRequestSpec) DeepCopy

func (in *WebRequestSpec) DeepCopy() *WebRequestSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WebRequestSpec.

func (*WebRequestSpec) DeepCopyInto

func (in *WebRequestSpec) DeepCopyInto(out *WebRequestSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*WebRequestSpec) Descriptor

func (*WebRequestSpec) Descriptor() ([]byte, []int)

func (*WebRequestSpec) Marshal

func (m *WebRequestSpec) Marshal() (dAtA []byte, err error)

func (*WebRequestSpec) MarshalTo

func (m *WebRequestSpec) MarshalTo(dAtA []byte) (int, error)

func (*WebRequestSpec) MarshalToSizedBuffer

func (m *WebRequestSpec) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*WebRequestSpec) ProtoMessage

func (*WebRequestSpec) ProtoMessage()

func (*WebRequestSpec) Reset

func (m *WebRequestSpec) Reset()

func (*WebRequestSpec) Size

func (m *WebRequestSpec) Size() (n int)

func (*WebRequestSpec) String

func (this *WebRequestSpec) String() string

func (*WebRequestSpec) Unmarshal

func (m *WebRequestSpec) Unmarshal(dAtA []byte) error

func (*WebRequestSpec) XXX_DiscardUnknown

func (m *WebRequestSpec) XXX_DiscardUnknown()

func (*WebRequestSpec) XXX_Marshal

func (m *WebRequestSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*WebRequestSpec) XXX_Merge

func (m *WebRequestSpec) XXX_Merge(src proto.Message)

func (*WebRequestSpec) XXX_Size

func (m *WebRequestSpec) XXX_Size() int

func (*WebRequestSpec) XXX_Unmarshal

func (m *WebRequestSpec) XXX_Unmarshal(b []byte) error

type WebRequestStatus

type WebRequestStatus struct {
	// Last time the object was updated
	//+kubebuilder:validation:Optional
	LastUpdated *metav1.Time `json:"lastUpdated,omitempty" protobuf:"bytes,1,opt,name=lastUpdated"`
	// ObservedGeneration is the Last generation that was acted on
	//+kubebuilder:validation:Optional
	ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,2,opt,name=observedGeneration"`
	//+kubebuilder:validation:Optional
	LastRun catalog.LastRunStatus `json:"lastRun,omitempty" protobuf:"bytes,3,opt,name=lastRun"`
	// The time of the next schedule run
	//+kubebuilder:validation:Optional
	NextRun *metav1.Time `json:"nextRun,omitempty" protobuf:"bytes,4,opt,name=nextRun"`
	// Update in case of terminal failure
	// Borrowed from cluster api controller
	//+kubebuilder:validation:Optional
	FailureReason *catalog.StatusError `json:"failureReason,omitempty" protobuf:"bytes,5,opt,name=failureReason"`
	// Update in case of terminal failure message
	//+kubebuilder:validation:Optional
	FailureMessage *string `json:"failureMessage,omitempty" protobuf:"bytes,6,opt,name=failureMessage"`
	// +patchMergeKey=type
	// +patchStrategy=merge
	// +kubebuilder:validation:Optional
	Conditions []WebRequestCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,7,rep,name=conditions"`
}

WebRequestStatus defines the observed state of WebRequest

func (*WebRequestStatus) DeepCopy

func (in *WebRequestStatus) DeepCopy() *WebRequestStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WebRequestStatus.

func (*WebRequestStatus) DeepCopyInto

func (in *WebRequestStatus) DeepCopyInto(out *WebRequestStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*WebRequestStatus) Descriptor

func (*WebRequestStatus) Descriptor() ([]byte, []int)

func (*WebRequestStatus) Marshal

func (m *WebRequestStatus) Marshal() (dAtA []byte, err error)

func (*WebRequestStatus) MarshalTo

func (m *WebRequestStatus) MarshalTo(dAtA []byte) (int, error)

func (*WebRequestStatus) MarshalToSizedBuffer

func (m *WebRequestStatus) MarshalToSizedBuffer(dAtA []byte) (int, error)

func (*WebRequestStatus) ProtoMessage

func (*WebRequestStatus) ProtoMessage()

func (*WebRequestStatus) Reset

func (m *WebRequestStatus) Reset()

func (*WebRequestStatus) Size

func (m *WebRequestStatus) Size() (n int)

func (*WebRequestStatus) String

func (this *WebRequestStatus) String() string

func (*WebRequestStatus) Unmarshal

func (m *WebRequestStatus) Unmarshal(dAtA []byte) error

func (*WebRequestStatus) XXX_DiscardUnknown

func (m *WebRequestStatus) XXX_DiscardUnknown()

func (*WebRequestStatus) XXX_Marshal

func (m *WebRequestStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)

func (*WebRequestStatus) XXX_Merge

func (m *WebRequestStatus) XXX_Merge(src proto.Message)

func (*WebRequestStatus) XXX_Size

func (m *WebRequestStatus) XXX_Size() int

func (*WebRequestStatus) XXX_Unmarshal

func (m *WebRequestStatus) XXX_Unmarshal(b []byte) error

Source Files

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL