Documentation ¶
Overview ¶
Package collection manages persistency and lifecycle of collections.
Index ¶
- Constants
- Variables
- func BulkWorkflow(ctx workflow.Context, params BulkWorkflowInput) error
- func InitProcessingWorkflow(ctx context.Context, c client.Client, watcherName, pipelineName string, ...) error
- func NewService(logger logr.Logger, db *sql.DB, cc cadenceclient.Client, ...) *collectionImpl
- func TriggerProcessingWorkflow(ctx context.Context, c client.Client, req *ProcessingWorkflowRequest) error
- type BulkActivity
- type BulkProgress
- type BulkWorkflowInput
- type BulkWorkflowOperation
- type Collection
- type ProcessingWorkflowRequest
- type Service
- type Status
Constants ¶
View Source
const ( BulkWorkflowName = "collection-bulk-workflow" BulkWorkflowID = "collection-bulk-workflow" BulkWorkflowStateQueryHandler = "collection-bulk-state" BulkActivityName = "collection-bulk-activity" )
View Source
const ( // Name of the collection processing workflow. ProcessingWorkflowName = "processing-workflow" // Maximum duration of the processing workflow. Cadence does not support // workflows with infinite duration for now, but high values are fine. // Ten years is the timeout we also use in activities (policies.go). ProcessingWorkflowStartToCloseTimeout = time.Hour * 24 * 365 * 10 )
Variables ¶
Functions ¶
func BulkWorkflow ¶ added in v0.26.0
func BulkWorkflow(ctx workflow.Context, params BulkWorkflowInput) error
BulkWorkflow is a Cadence workflow that performs bulk operations.
func InitProcessingWorkflow ¶
func NewService ¶
Types ¶
type BulkActivity ¶ added in v0.26.0
type BulkActivity struct {
// contains filtered or unexported fields
}
func NewBulkActivity ¶ added in v0.26.0
func NewBulkActivity(colsvc Service) *BulkActivity
func (*BulkActivity) Execute ¶ added in v0.26.0
func (a *BulkActivity) Execute(ctx context.Context, params BulkWorkflowInput) error
type BulkProgress ¶ added in v0.26.0
BulkProgress reports bulk operation status - delivered as heartbeats.
type BulkWorkflowInput ¶ added in v0.26.0
type BulkWorkflowInput struct { // Status of collections where bulk is performed. Status Status // Type of operation that is performed, e.g. "retry", "cancel"... Operation BulkWorkflowOperation // Max. number of collections affected. Zero means no cap established. Size uint }
type BulkWorkflowOperation ¶ added in v0.26.0
type BulkWorkflowOperation string
const ( BulkWorkflowOperationRetry BulkWorkflowOperation = "retry" BulkWorkflowOperationCancel BulkWorkflowOperation = "cancel" BulkWorkflowOperationAbandon BulkWorkflowOperation = "abandon" )
type Collection ¶
type Collection struct { ID uint `db:"id"` Name string `db:"name"` WorkflowID string `db:"workflow_id"` RunID string `db:"run_id"` TransferID string `db:"transfer_id"` AIPID string `db:"aip_id"` OriginalID string `db:"original_id"` PipelineID string `db:"pipeline_id"` DecisionToken string `db:"decision_token"` Status Status `db:"status"` // It defaults to CURRENT_TIMESTAMP(6) so populated as soon as possible. CreatedAt time.Time `db:"created_at"` // Nullable, populated as soon as processing starts. StartedAt sql.NullTime `db:"started_at"` // Nullable, populated as soon as ingest completes. CompletedAt sql.NullTime `db:"completed_at"` }
Collection represents a collection in the collection table.
func (Collection) Goa ¶
func (c Collection) Goa() *goacollection.EnduroStoredCollection
Goa returns the API representation of the collection.
type ProcessingWorkflowRequest ¶ added in v0.4.0
type ProcessingWorkflowRequest struct { WorkflowID string `json:"-"` // The zero value represents a new collection. It can be used to indicate // an existing collection in retries. CollectionID uint // Name of the watcher that received this blob. WatcherName string // Pipeline name. PipelineName string // Period of time to schedule the deletion of the original blob from the // watched data source. nil means no deletion. RetentionPeriod *time.Duration // Whether the top-level directory is meant to be stripped. StripTopLevelDir bool // Key of the blob. Key string // Batch directory that contains the blob. BatchDir string // Configuration for the validating the transfer. ValidationConfig validation.Config }
type Service ¶
type Service interface { // Goa returns an implementation of the goacollection Service. Goa() goacollection.Service Create(context.Context, *Collection) error UpdateWorkflowStatus(ctx context.Context, ID uint, name string, workflowID, runID, transferID, aipID, pipelineID string, status Status, storedAt time.Time) error SetStatus(ctx context.Context, ID uint, status Status) error SetStatusInProgress(ctx context.Context, ID uint, startedAt time.Time) error SetStatusPending(ctx context.Context, ID uint, taskToken []byte) error SetOriginalID(ctx context.Context, ID uint, originalID string) error // HTTPDownload returns a HTTP handler that serves the package over HTTP. // // TODO: this service is meant to be agnostic to protocols. But I haven't // found a way in goagen to have my service write directly to the HTTP // response writer. Ideally, our goacollection.Service would have a new // method that takes a io.Writer (e.g. http.ResponseWriter). HTTPDownload(mux goahttp.Muxer, dec func(r *http.Request) goahttp.Decoder) http.HandlerFunc }
type Status ¶
type Status uint
See https://gist.github.com/sevein/dd36c2af23fd0d9e2e2438d8eb091314.
const ( StatusNew Status = iota // Unused! StatusInProgress // Undergoing work. StatusDone // Work has completed. StatusError // Processing failed. StatusUnknown // Unused! StatusQueued // Awaiting resource allocation. StatusAbandoned // User abandoned processing. StatusPending // Awaiting user decision. )
func (Status) MarshalJSON ¶
func (*Status) UnmarshalJSON ¶
Source Files ¶
Click to show internal directories.
Click to hide internal directories.