model

package
v1.6.0-preview.3 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Feb 14, 2023 License: AGPL-3.0 Imports: 5 Imported by: 0

Documentation

Index

Constants

View Source
const (
	Waiting                   = "waiting"
	GeneratedUploadSchema     = "generated_upload_schema"
	CreatedTableUploads       = "created_table_uploads"
	GeneratingLoadFiles       = "generating_load_files"
	GeneratedLoadFiles        = "generated_load_files"
	UpdatedTableUploadsCounts = "updated_table_uploads_counts"
	CreatedRemoteSchema       = "created_remote_schema"
	ExportedUserTables        = "exported_user_tables"
	ExportedData              = "exported_data"
	ExportingData             = "exporting_data"
	ExportingDataFailed       = "exporting_data_failed"
	ExportedIdentities        = "exported_identities"
	Aborted                   = "aborted"
	Failed                    = "failed"
)

Variables

View Source
var ErrUploadNotFound = errors.New("upload not found")

Functions

func GetLastFailedStatus added in v1.6.0

func GetLastFailedStatus(timingsMap Timings) (status string)

func GetLoadFileGenTime added in v1.6.0

func GetLoadFileGenTime(timingsMap Timings) (t time.Time)

Types

type AlterTableResponse added in v1.6.0

type AlterTableResponse struct {
	IsDependent bool // true if the column is dependent on another view or rules, false otherwise
	Query       string
}

type JobError added in v1.6.0

type JobError struct {
	Type   JobErrorType
	Format Matcher
}

type JobErrorType added in v1.6.0

type JobErrorType string
const (
	PermissionError           JobErrorType = "permission_error"
	AlterColumnError          JobErrorType = "alter_column_error"
	ResourceNotFoundError     JobErrorType = "resource_not_found_error"
	ColumnCountError          JobErrorType = "column_count_error"
	ColumnSizeError           JobErrorType = "column_size_error"
	InsufficientResourceError JobErrorType = "insufficient_resource_error"
	ConcurrentQueriesError    JobErrorType = "concurrent_queries_error"
	UnknownError              JobErrorType = "unknown_error"
)

type LoadFile added in v1.6.0

type LoadFile struct {
	ID                    int64
	TableName             string
	Location              string
	TotalRows             int
	ContentLength         int64
	StagingFileID         int64
	DestinationRevisionID string
	UseRudderStorage      bool
	SourceID              string
	DestinationID         string
	DestinationType       string
}

type Matcher added in v1.6.0

type Matcher interface {
	MatchString(string) bool
}

type Schema added in v1.6.0

type Schema = warehouseutils.SchemaT

type SchemaType

type SchemaType string
const (
	StringDataType  SchemaType = "string"
	BooleanDataType SchemaType = "boolean"
	IntDataType     SchemaType = "int"
	BigIntDataType  SchemaType = "bigint"
	FloatDataType   SchemaType = "float"
	JSONDataType    SchemaType = "json"
	TextDataType    SchemaType = "text"
)

type StagingFile

type StagingFile struct {
	ID                    int64
	WorkspaceID           string
	Location              string
	SourceID              string
	DestinationID         string
	Status                string // enum
	Error                 error
	FirstEventAt          time.Time
	LastEventAt           time.Time
	UseRudderStorage      bool
	DestinationRevisionID string
	TotalEvents           int
	TotalBytes            int
	// cloud sources specific info
	SourceTaskRunID string
	SourceJobID     string
	SourceJobRunID  string
	TimeWindow      time.Time

	CreatedAt time.Time
	UpdatedAt time.Time
}

StagingFile a domain model for a staging file.

The staging file contains events that should be loaded into a warehouse.
It is located in a cloud storage bucket.
The model includes ownership, file location, and other metadata.

func (StagingFile) WithSchema added in v1.4.3

func (s StagingFile) WithSchema(schema json.RawMessage) StagingFileWithSchema

type StagingFileWithSchema added in v1.4.3

type StagingFileWithSchema struct {
	StagingFile
	Schema json.RawMessage
}

StagingFileWithSchema is a StagingFile with schema field for included events.

schema size can be really big, and thus it should be included only when required.

type Timings added in v1.6.0

type Timings []map[string]time.Time

type Upload added in v1.6.0

type Upload struct {
	ID          int64
	WorkspaceID string

	Namespace       string
	SourceID        string
	DestinationID   string
	DestinationType string
	Status          string
	Error           json.RawMessage
	FirstEventAt    time.Time
	LastEventAt     time.Time

	UseRudderStorage bool
	SourceTaskRunID  string
	SourceJobID      string
	SourceJobRunID   string
	LoadFileType     string
	NextRetryTime    time.Time
	Priority         int
	Retried          bool

	StagingFileStartID int64
	StagingFileEndID   int64

	LoadFileStartID int64
	LoadFileEndID   int64

	Timings        Timings
	FirstAttemptAt time.Time
	LastAttemptAt  time.Time
	Attempts       int64

	UploadSchema Schema
	MergedSchema Schema
}

type UploadJob added in v1.6.0

type UploadJob struct {
	Warehouse            warehouseutils.Warehouse
	Upload               Upload
	StagingFiles         []*StagingFile
	LoadFileGenStartTime time.Time
}

type UploadJobsStats added in v1.6.0

type UploadJobsStats struct {
	PendingJobs    int64
	PickupLag      time.Duration
	PickupWaitTime time.Duration
}

type UploadStatus added in v1.6.0

type UploadStatus = string

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL