v1beta1

package
v0.42.8 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 7, 2024 License: Apache-2.0 Imports: 28 Imported by: 7

Documentation

Overview

Package v1beta1 contains API Schema definitions for the sources v1beta1 API group +k8s:openapi-gen=true +k8s:deepcopy-gen=package,register +k8s:defaulter-gen=TypeMeta +groupName=sources.knative.dev

Package v1beta1 contains API Schema definitions for the sources v1beta1 API group +k8s:openapi-gen=true +k8s:deepcopy-gen=package,register +k8s:defaulter-gen=TypeMeta +groupName=sources.knative.dev

Index

Constants

View Source
const (
	// KafkaConditionReady has status True when the KafkaSource is ready to send events.
	KafkaConditionReady = apis.ConditionReady

	// KafkaConditionSinkProvided has status True when the KafkaSource has been configured with a sink target.
	KafkaConditionSinkProvided apis.ConditionType = "SinkProvided"

	// KafkaConditionDeployed has status True when the KafkaSource has had it's receive adapter deployment created.
	KafkaConditionDeployed apis.ConditionType = "Deployed"

	// KafkaConditionKeyType is True when the KafkaSource has been configured with valid key type for
	// the key deserializer.
	KafkaConditionKeyType apis.ConditionType = "KeyTypeCorrect"

	// KafkaConditionConfigReady has status True when the Kafka configuration to use by the source
	// succeeded in establishing a connection to Kafka.
	KafkaConditionConnectionEstablished apis.ConditionType = "ConnectionEstablished"

	// KafkaConditionInitialOffsetsCommitted is True when the KafkaSource has committed the
	// initial offset of all claims
	KafkaConditionInitialOffsetsCommitted apis.ConditionType = "InitialOffsetsCommitted"

	// KafkaConditionOIDCIdentityCreated has status True when the KafkaSource has created an OIDC identity.
	KafkaConditionOIDCIdentityCreated apis.ConditionType = "OIDCIdentityCreated"
)
View Source
const (
	// KafkaEventType is the Kafka CloudEvent type.
	KafkaEventType = "dev.knative.kafka.event"

	KafkaKeyTypeLabel = "kafkasources.sources.knative.dev/key-type"

	// OffsetEarliest denotes the earliest offset in the kafka partition
	OffsetEarliest Offset = "earliest"

	// OffsetLatest denotes the latest offset in the kafka partition
	OffsetLatest Offset = "latest"
)
View Source
const (

	// KafkaConditionScheduled is True when all KafkaSource consumers has been scheduled
	KafkaConditionScheduled apis.ConditionType = "Scheduled"
)

Variables

View Source
var (
	SchemeBuilder = runtime.NewSchemeBuilder(addKnownTypes)
	AddToScheme   = SchemeBuilder.AddToScheme
)
View Source
var FuzzerFuncs = fuzzer.MergeFuzzerFuncs(
	func(codecs serializer.CodecFactory) []interface{} {
		return []interface{}{
			func(s *KafkaSourceStatus, c fuzz.Continue) {
				c.FuzzNoCustom(s)

				s.Status.SetConditions(nil)

				s.InitializeConditions()
				pkgfuzzer.FuzzConditions(&s.Status, c)
			},
		}
	},
)

FuzzerFuncs includes fuzzing funcs for sources.knative.dev v1beta1 types

For other examples see https://github.com/kubernetes/apimachinery/blob/master/pkg/apis/meta/fuzzer/fuzzer.go

View Source
var KafkaKeyTypeAllowed = []string{"string", "int", "float", "byte-array"}
View Source
var SchemeGroupVersion = schema.GroupVersion{Group: sources.GroupName, Version: "v1beta1"}

SchemeGroupVersion is group version used to register these objects

Functions

func DeploymentIsAvailable

func DeploymentIsAvailable(d *appsv1.DeploymentStatus, def bool) bool

func KafkaEventSource

func KafkaEventSource(namespace, kafkaSourceName, topic string) string

KafkaEventSource returns the Kafka CloudEvent source.

func Kind

func Kind(kind string) schema.GroupKind

Kind takes an unqualified kind and returns back a Group qualified GroupKind

func RegisterAlternateKafkaConditionSet

func RegisterAlternateKafkaConditionSet(conditionSet apis.ConditionSet)

RegisterAlternateKafkaConditionSet register an alternate apis.ConditionSet.

func Resource

func Resource(resource string) schema.GroupResource

Resource takes an unqualified resource and returns a Group qualified GroupResource

Types

type DeliveryOrdering added in v0.38.0

type DeliveryOrdering string
const (
	// Ordered is a per partition blocking consumer.
	// It waits for a successful response of the sink before
	// sending the next message in the partition.
	Ordered DeliveryOrdering = "ordered"
	// Unordered is non-blocking consumer that delivers
	// events out of any particular order.
	Unordered DeliveryOrdering = "unordered"
)

func (DeliveryOrdering) Validate added in v0.38.0

type KafkaSource

type KafkaSource struct {
	metav1.TypeMeta   `json:",inline"`
	metav1.ObjectMeta `json:"metadata,omitempty"`

	Spec   KafkaSourceSpec   `json:"spec,omitempty"`
	Status KafkaSourceStatus `json:"status,omitempty"`
}

+genclient +genclient:method=GetScale,verb=get,subresource=scale,result=k8s.io/api/autoscaling/v1.Scale +genclient:method=UpdateScale,verb=update,subresource=scale,input=k8s.io/api/autoscaling/v1.Scale,result=k8s.io/api/autoscaling/v1.Scale +genclient:method=ApplyScale,verb=apply,subresource=scale,input=k8s.io/api/autoscaling/v1.Scale,result=k8s.io/api/autoscaling/v1.Scale +genreconciler +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object KafkaSource is the Schema for the kafkasources API. +k8s:openapi-gen=true

func (*KafkaSource) CheckImmutableFields

func (ks *KafkaSource) CheckImmutableFields(ctx context.Context, original *KafkaSource) *apis.FieldError

func (*KafkaSource) ConvertFrom

func (sink *KafkaSource) ConvertFrom(ctx context.Context, source apis.Convertible) error

ConvertFrom implements apis.Convertible

func (*KafkaSource) ConvertTo

func (source *KafkaSource) ConvertTo(ctx context.Context, sink apis.Convertible) error

ConvertTo implements apis.Convertible

func (*KafkaSource) DeepCopy

func (in *KafkaSource) DeepCopy() *KafkaSource

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KafkaSource.

func (*KafkaSource) DeepCopyInto

func (in *KafkaSource) DeepCopyInto(out *KafkaSource)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*KafkaSource) DeepCopyObject

func (in *KafkaSource) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

func (*KafkaSource) GetConditionSet

func (*KafkaSource) GetConditionSet() apis.ConditionSet

GetConditionSet retrieves the condition set for this resource. Implements the KRShaped interface.

func (*KafkaSource) GetGroupVersionKind

func (*KafkaSource) GetGroupVersionKind() schema.GroupVersionKind

func (*KafkaSource) GetKey

func (k *KafkaSource) GetKey() types.NamespacedName

func (*KafkaSource) GetPlacements

func (k *KafkaSource) GetPlacements() []v1alpha1.Placement

func (*KafkaSource) GetResourceVersion

func (k *KafkaSource) GetResourceVersion() string

func (*KafkaSource) GetStatus

func (k *KafkaSource) GetStatus() *duckv1.Status

GetStatus retrieves the duck status for this resource. Implements the KRShaped interface.

func (*KafkaSource) GetVReplicas

func (k *KafkaSource) GetVReplicas() int32

func (*KafkaSource) SetDefaults

func (k *KafkaSource) SetDefaults(ctx context.Context)

SetDefaults ensures KafkaSource reflects the default values.

func (*KafkaSource) Validate

func (ks *KafkaSource) Validate(ctx context.Context) *apis.FieldError

Validate ensures KafkaSource is properly configured.

type KafkaSourceList

type KafkaSourceList struct {
	metav1.TypeMeta `json:",inline"`
	metav1.ListMeta `json:"metadata,omitempty"`
	Items           []KafkaSource `json:"items"`
}

KafkaSourceList contains a list of KafkaSources.

func (*KafkaSourceList) DeepCopy

func (in *KafkaSourceList) DeepCopy() *KafkaSourceList

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KafkaSourceList.

func (*KafkaSourceList) DeepCopyInto

func (in *KafkaSourceList) DeepCopyInto(out *KafkaSourceList)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*KafkaSourceList) DeepCopyObject

func (in *KafkaSourceList) DeepCopyObject() runtime.Object

DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.

type KafkaSourceSpec

type KafkaSourceSpec struct {
	// Number of desired consumers running in the consumer group. Defaults to 1.
	//
	// This is a pointer to distinguish between explicit
	// zero and not specified.
	// +optional
	Consumers *int32 `json:"consumers,omitempty"`

	bindingsv1beta1.KafkaAuthSpec `json:",inline"`

	// Topic topics to consume messages from
	// +required
	Topics []string `json:"topics"`

	// ConsumerGroupID is the consumer group ID.
	// +optional
	ConsumerGroup string `json:"consumerGroup,omitempty"`

	// InitialOffset is the Initial Offset for the consumer group.
	// should be earliest or latest
	// +optional
	InitialOffset Offset `json:"initialOffset,omitempty"`

	// Delivery contains the delivery spec for this source
	// +optional
	Delivery *eventingduckv1.DeliverySpec `json:"delivery,omitempty"`

	// Ordering is the type of the consumer verticle.
	// Should be ordered or unordered.
	// By default, it is ordered.
	// +optional
	Ordering *DeliveryOrdering `json:"ordering,omitempty"`

	// inherits duck/v1 SourceSpec, which currently provides:
	// * Sink - a reference to an object that will resolve to a domain name or
	//   a URI directly to use as the sink.
	// * CloudEventOverrides - defines overrides to control the output format
	//   and modifications of the event sent to the sink.
	duckv1.SourceSpec `json:",inline"`
}

KafkaSourceSpec defines the desired state of the KafkaSource.

func (*KafkaSourceSpec) DeepCopy

func (in *KafkaSourceSpec) DeepCopy() *KafkaSourceSpec

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KafkaSourceSpec.

func (*KafkaSourceSpec) DeepCopyInto

func (in *KafkaSourceSpec) DeepCopyInto(out *KafkaSourceSpec)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*KafkaSourceSpec) Validate

func (kss *KafkaSourceSpec) Validate(ctx context.Context) *apis.FieldError

type KafkaSourceStatus

type KafkaSourceStatus struct {
	// inherits duck/v1 SourceStatus, which currently provides:
	// * ObservedGeneration - the 'Generation' of the Service that was last
	//   processed by the controller.
	// * Conditions - the latest available observations of a resource's current
	//   state.
	// * SinkURI - the current active sink URI that has been configured for the
	//   Source.
	duckv1.SourceStatus `json:",inline"`

	// Total number of consumers actually running in the consumer group.
	// +optional
	Consumers int32 `json:"consumers,omitempty"`

	// Use for labelSelectorPath when scaling Kafka source
	// +optional
	Selector string `json:"selector,omitempty"`

	// Claims consumed by this KafkaSource instance
	// +optional
	Claims string `json:"claims,omitempty"`

	// Implement Placeable.
	// +optional
	v1alpha1.Placeable `json:",inline"`
}

KafkaSourceStatus defines the observed state of KafkaSource.

func (*KafkaSourceStatus) DeepCopy

func (in *KafkaSourceStatus) DeepCopy() *KafkaSourceStatus

DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KafkaSourceStatus.

func (*KafkaSourceStatus) DeepCopyInto

func (in *KafkaSourceStatus) DeepCopyInto(out *KafkaSourceStatus)

DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.

func (*KafkaSourceStatus) GetCondition

func (s *KafkaSourceStatus) GetCondition(t apis.ConditionType) *apis.Condition

func (*KafkaSourceStatus) InitializeConditions

func (s *KafkaSourceStatus) InitializeConditions()

InitializeConditions sets relevant unset conditions to Unknown state.

func (*KafkaSourceStatus) IsReady

func (s *KafkaSourceStatus) IsReady() bool

IsReady returns true if the resource is ready overall.

func (*KafkaSourceStatus) MarkConnectionEstablished

func (cs *KafkaSourceStatus) MarkConnectionEstablished()

func (*KafkaSourceStatus) MarkConnectionNotEstablished

func (cs *KafkaSourceStatus) MarkConnectionNotEstablished(reason, messageFormat string, messageA ...interface{})

func (*KafkaSourceStatus) MarkDeployed

func (s *KafkaSourceStatus) MarkDeployed(d *appsv1.Deployment)

MarkDeployed sets the condition that the source has been deployed.

func (*KafkaSourceStatus) MarkDeploying

func (s *KafkaSourceStatus) MarkDeploying(reason, messageFormat string, messageA ...interface{})

MarkDeploying sets the condition that the source is deploying.

func (*KafkaSourceStatus) MarkInitialOffsetCommitted

func (s *KafkaSourceStatus) MarkInitialOffsetCommitted()

func (*KafkaSourceStatus) MarkInitialOffsetNotCommitted

func (s *KafkaSourceStatus) MarkInitialOffsetNotCommitted(reason, messageFormat string, messageA ...interface{})

func (*KafkaSourceStatus) MarkKeyTypeCorrect

func (s *KafkaSourceStatus) MarkKeyTypeCorrect()

func (*KafkaSourceStatus) MarkKeyTypeIncorrect

func (s *KafkaSourceStatus) MarkKeyTypeIncorrect(reason, messageFormat string, messageA ...interface{})

func (*KafkaSourceStatus) MarkNoSink

func (s *KafkaSourceStatus) MarkNoSink(reason, messageFormat string, messageA ...interface{})

MarkNoSink sets the condition that the source does not have a sink configured.

func (*KafkaSourceStatus) MarkNotDeployed

func (s *KafkaSourceStatus) MarkNotDeployed(reason, messageFormat string, messageA ...interface{})

MarkNotDeployed sets the condition that the source has not been deployed.

func (*KafkaSourceStatus) MarkNotScheduled

func (s *KafkaSourceStatus) MarkNotScheduled(reason, messageFormat string, messageA ...interface{})

func (*KafkaSourceStatus) MarkOIDCIdentityCreatedFailed added in v0.41.0

func (s *KafkaSourceStatus) MarkOIDCIdentityCreatedFailed(reason, messageFormat string, messageA ...interface{})

func (*KafkaSourceStatus) MarkOIDCIdentityCreatedSucceeded added in v0.41.0

func (s *KafkaSourceStatus) MarkOIDCIdentityCreatedSucceeded()

func (*KafkaSourceStatus) MarkOIDCIdentityCreatedSucceededWithReason added in v0.41.0

func (s *KafkaSourceStatus) MarkOIDCIdentityCreatedSucceededWithReason(reason, messageFormat string, messageA ...interface{})

func (*KafkaSourceStatus) MarkOIDCIdentityCreatedUnknown added in v0.41.0

func (s *KafkaSourceStatus) MarkOIDCIdentityCreatedUnknown(reason, messageFormat string, messageA ...interface{})

func (*KafkaSourceStatus) MarkScheduled

func (s *KafkaSourceStatus) MarkScheduled()

func (*KafkaSourceStatus) MarkSink

func (s *KafkaSourceStatus) MarkSink(addr *duckv1.Addressable)

MarkSink sets the condition that the source has a sink configured.

func (*KafkaSourceStatus) UpdateConsumerGroupStatus

func (s *KafkaSourceStatus) UpdateConsumerGroupStatus(status string)

type Offset

type Offset string

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL