Documentation ¶
Index ¶
- Constants
- Variables
- func CheckCurrentTimeExistsInExcludeWindow(currentTime time.Time, windowStartTime string, windowEndTime string) bool
- func CheckForWarehouseEnvVars() bool
- func CheckPGHealth(dbHandle *sql.DB) bool
- func GetExludeWindowStartEndTimes(excludeWindow map[string]interface{}) (string, string)
- func GetPrevScheduledTime(syncFrequency, syncStartAt string, currTime time.Time) time.Time
- func InitWarehouseAPI(dbHandle *sql.DB, log logger.LoggerI)
- func ScheduledTimes(syncFrequency, syncStartAt string) []int
- func Start(app app.Interface)
- func TriggerUploadHandler(sourceID string, destID string) error
- type BatchRouterEventT
- type ColumnInfoT
- type DataT
- type ErrorResponseT
- type HandleT
- type JobRunT
- type LoadFileJobT
- type MetadataT
- type PayloadT
- type ProcessStagingFilesJobT
- type QueryInput
- type SchemaHandleT
- type StagingFileT
- type TableSkipError
- type TableUploadReqT
- type TableUploadResT
- type TableUploadStatusT
- type TableUploadT
- type TablesResT
- type UploadAPIT
- type UploadColumnT
- type UploadColumnsOpts
- type UploadJobT
- func (job *UploadJobT) GetLoadFileGenStartTIme() time.Time
- func (job *UploadJobT) GetLoadFileType() string
- func (job *UploadJobT) GetLoadFilesMetadata(options warehouseutils.GetLoadFilesOptionsT) (loadFiles []warehouseutils.LoadFileT)
- func (job *UploadJobT) GetLocalSchema() warehouseutils.SchemaT
- func (job *UploadJobT) GetSampleLoadFileLocation(tableName string) (location string, err error)
- func (job *UploadJobT) GetSchemaInWarehouse() (schema warehouseutils.SchemaT)
- func (job *UploadJobT) GetSingleLoadFile(tableName string) (warehouseutils.LoadFileT, error)
- func (job *UploadJobT) GetTableSchemaInUpload(tableName string) warehouseutils.TableSchemaT
- func (job *UploadJobT) GetTableSchemaInWarehouse(tableName string) warehouseutils.TableSchemaT
- func (job *UploadJobT) ShouldOnDedupUseNewRecord() bool
- func (job *UploadJobT) UpdateLocalSchema(schema warehouseutils.SchemaT) error
- func (job *UploadJobT) UseRudderStorage() bool
- type UploadPagination
- type UploadReqT
- type UploadResT
- type UploadStatusOpts
- type UploadT
- type UploadsReqT
- type UploadsResT
- type WarehouseAdmin
- func (wh *WarehouseAdmin) Query(s QueryInput, reply *warehouseutils.QueryResult) error
- func (wh *WarehouseAdmin) QueryWhTables(tableUploadReq TableUploadReqT, reply *[]byte) error
- func (wh *WarehouseAdmin) QueryWhUploads(uploadsReq UploadsReqT, reply *[]byte) error
- func (wh *WarehouseAdmin) TriggerUpload(off bool, reply *string) error
Constants ¶
const ( Waiting = "waiting" GeneratedUploadSchema = "generated_upload_schema" CreatedTableUploads = "created_table_uploads" GeneratedLoadFiles = "generated_load_files" UpdatedTableUploadsCounts = "updated_table_uploads_counts" CreatedRemoteSchema = "created_remote_schema" ExportedUserTables = "exported_user_tables" ExportedData = "exported_data" ExportedIdentities = "exported_identities" Aborted = "aborted" )
Upload Status
const ( GeneratingStagingFileFailedState = "generating_staging_file_failed" GeneratedStagingFileState = "generated_staging_file" PopulatingHistoricIdentitiesState = "populating_historic_identities" PopulatingHistoricIdentitiesStateFailed = "populating_historic_identities_failed" FetchingRemoteSchemaFailed = "fetching_remote_schema_failed" InternalProcessingFailed = "internal_processing_failed" )
const ( TableUploadExecuting = "executing" TableUploadUpdatingSchema = "updating_schema" TableUploadUpdatingSchemaFailed = "updating_schema_failed" TableUploadUpdatedSchema = "updated_schema" TableUploadExporting = "exporting_data" TableUploadExportingFailed = "exporting_data_failed" UserTableUploadExportingFailed = "exporting_user_tables_failed" IdentityTableUploadExportingFailed = "exporting_identities_failed" TableUploadExported = "exported_data" )
Table Upload status
const ( UploadStatusField = "status" UploadStartLoadFileIDField = "start_load_file_id" UploadEndLoadFileIDField = "end_load_file_id" UploadUpdatedAtField = "updated_at" UploadTimingsField = "timings" UploadSchemaField = "schema" MergedSchemaField = "mergedschema" UploadLastExecAtField = "last_exec_at" )
const ( MasterMode = "master" SlaveMode = "slave" MasterSlaveMode = "master_and_slave" EmbeddedMode = "embedded" PooledWHSlaveMode = "embedded_master" )
warehouses worker modes
const ( DegradedMode = "degraded" StagingFilesPGNotifierChannel = "process_staging_file" )
const (
CloudSourceCateogry = "cloud"
)
Variables ¶
var ( WarehouseDestinations []string ShouldForceSetLowerVersion bool )
Functions ¶
func CheckCurrentTimeExistsInExcludeWindow ¶ added in v0.1.10
func CheckForWarehouseEnvVars ¶ added in v0.1.10
func CheckForWarehouseEnvVars() bool
CheckForWarehouseEnvVars Checks if all the required Env Variables for Warehouse are present
func CheckPGHealth ¶
func GetExludeWindowStartEndTimes ¶ added in v0.1.10
func GetPrevScheduledTime ¶
GetPrevScheduledTime returns closest previous scheduled time eg. Syncing every 3hrs starting at 13:00 (scheduled times: 13:00, 16:00, 19:00, 22:00, 01:00, 04:00, 07:00, 10:00) prev scheduled time for current time (eg. 18:00 -> 16:00 same day, 00:30 -> 22:00 prev day)
func InitWarehouseAPI ¶ added in v0.1.10
func ScheduledTimes ¶
ScheduledTimes returns all possible start times (minutes from start of day) as per schedule eg. Syncing every 3hrs starting at 13:00 (scheduled times: 13:00, 16:00, 19:00, 22:00, 01:00, 04:00, 07:00, 10:00)
func TriggerUploadHandler ¶ added in v0.1.10
Types ¶
type BatchRouterEventT ¶ added in v0.1.10
type ColumnInfoT ¶ added in v0.1.10
type ColumnInfoT struct { ColumnVal interface{} ColumnType string }
type ErrorResponseT ¶
type ErrorResponseT struct {
Error string
}
type JobRunT ¶ added in v0.1.10
type JobRunT struct {
// contains filtered or unexported fields
}
Temporary store for processing staging file to load file
func (*JobRunT) GetWriter ¶ added in v0.1.10
func (jobRun *JobRunT) GetWriter(tableName string) (warehouseutils.LoadFileWriterI, error)
type LoadFileJobT ¶
type LoadFileJobT struct { Upload UploadT StagingFile *StagingFileT Schema map[string]map[string]string Warehouse warehouseutils.WarehouseT Wg *misc.WaitGroup LoadFileIDsChan chan []int64 TableToBucketFolderMap map[string]string TableToBucketFolderMapLock *sync.RWMutex }
type PayloadT ¶
type PayloadT struct { BatchID string UploadID int64 StagingFileID int64 StagingFileLocation string UploadSchema map[string]map[string]string SourceID string SourceName string DestinationID string DestinationName string DestinationType string DestinationNamespace string DestinationConfig interface{} UniqueLoadGenID string UseRudderStorage bool RudderStoragePrefix string Output []loadFileUploadOutputT LoadFilePrefix string // prefix for the load file name LoadFileType string }
type ProcessStagingFilesJobT ¶
type ProcessStagingFilesJobT struct { Upload UploadT List []*StagingFileT Warehouse warehouseutils.WarehouseT }
type QueryInput ¶ added in v0.1.10
type SchemaHandleT ¶ added in v0.1.10
type SchemaHandleT struct {
// contains filtered or unexported fields
}
type StagingFileT ¶
type StagingFileT struct { ID int64 Location string SourceID string Schema json.RawMessage Status string // enum CreatedAt time.Time FirstEventAt time.Time LastEventAt time.Time UseRudderStorage bool // cloud sources specific info SourceBatchID string SourceTaskID string SourceTaskRunID string SourceJobID string SourceJobRunID string TimeWindow time.Time }
type TableSkipError ¶ added in v0.1.10
type TableSkipError struct {
// contains filtered or unexported fields
}
TableSkipError is a custom error type to capture if a table load is skipped because of a previously failed table load
func (*TableSkipError) Error ¶ added in v0.1.10
func (tse *TableSkipError) Error() string
type TableUploadReqT ¶ added in v0.1.10
type TableUploadReqT struct { UploadID int64 Name string API UploadAPIT }
func (TableUploadReqT) GetWhTableUploads ¶ added in v0.1.10
func (tableUploadReq TableUploadReqT) GetWhTableUploads() ([]*proto.WHTable, error)
type TableUploadResT ¶ added in v0.1.10
type TableUploadStatusT ¶ added in v0.1.10
type TableUploadStatusT struct {
// contains filtered or unexported fields
}
TableUploadStatusT captures the status of each table upload along with its parent upload_job's info like destionation_id and namespace
type TableUploadT ¶ added in v0.1.10
type TableUploadT struct {
// contains filtered or unexported fields
}
func NewTableUpload ¶ added in v0.1.10
func NewTableUpload(uploadID int64, tableName string) *TableUploadT
type TablesResT ¶ added in v0.1.10
type TablesResT struct {
Tables []TableUploadResT `json:"tables,omitempty"`
}
type UploadAPIT ¶ added in v0.1.10
type UploadAPIT struct {
// contains filtered or unexported fields
}
var UploadAPI UploadAPIT
type UploadColumnT ¶ added in v0.1.10
type UploadColumnT struct { Column string Value interface{} }
type UploadColumnsOpts ¶ added in v0.1.10
type UploadColumnsOpts struct { Fields []UploadColumnT Txn *sql.Tx }
type UploadJobT ¶ added in v0.1.10
type UploadJobT struct {
// contains filtered or unexported fields
}
func (*UploadJobT) GetLoadFileGenStartTIme ¶ added in v0.1.10
func (job *UploadJobT) GetLoadFileGenStartTIme() time.Time
func (*UploadJobT) GetLoadFileType ¶ added in v0.1.10
func (job *UploadJobT) GetLoadFileType() string
func (*UploadJobT) GetLoadFilesMetadata ¶ added in v0.1.10
func (job *UploadJobT) GetLoadFilesMetadata(options warehouseutils.GetLoadFilesOptionsT) (loadFiles []warehouseutils.LoadFileT)
func (*UploadJobT) GetLocalSchema ¶ added in v0.1.10
func (job *UploadJobT) GetLocalSchema() warehouseutils.SchemaT
func (*UploadJobT) GetSampleLoadFileLocation ¶ added in v0.1.10
func (job *UploadJobT) GetSampleLoadFileLocation(tableName string) (location string, err error)
func (*UploadJobT) GetSchemaInWarehouse ¶ added in v0.1.10
func (job *UploadJobT) GetSchemaInWarehouse() (schema warehouseutils.SchemaT)
func (*UploadJobT) GetSingleLoadFile ¶ added in v0.1.10
func (job *UploadJobT) GetSingleLoadFile(tableName string) (warehouseutils.LoadFileT, error)
func (*UploadJobT) GetTableSchemaInUpload ¶ added in v0.1.10
func (job *UploadJobT) GetTableSchemaInUpload(tableName string) warehouseutils.TableSchemaT
func (*UploadJobT) GetTableSchemaInWarehouse ¶ added in v0.1.10
func (job *UploadJobT) GetTableSchemaInWarehouse(tableName string) warehouseutils.TableSchemaT
func (*UploadJobT) ShouldOnDedupUseNewRecord ¶ added in v0.1.10
func (job *UploadJobT) ShouldOnDedupUseNewRecord() bool
func (*UploadJobT) UpdateLocalSchema ¶ added in v0.1.10
func (job *UploadJobT) UpdateLocalSchema(schema warehouseutils.SchemaT) error
func (*UploadJobT) UseRudderStorage ¶ added in v0.1.10
func (job *UploadJobT) UseRudderStorage() bool
type UploadPagination ¶ added in v0.1.10
type UploadReqT ¶ added in v0.1.10
type UploadReqT struct { WorkspaceID string UploadId int64 API UploadAPIT }
func (UploadReqT) GetWHUpload ¶ added in v0.1.10
func (uploadReq UploadReqT) GetWHUpload() (*proto.WHUploadResponse, error)
func (UploadReqT) TriggerWHUpload ¶ added in v0.1.10
func (uploadReq UploadReqT) TriggerWHUpload() error
type UploadResT ¶ added in v0.1.10
type UploadResT struct { ID int64 `json:"id"` Namespace string `json:"namespace"` SourceID string `json:"source_id"` DestinationID string `json:"destination_id"` DestinationType string `json:"destination_type"` Status string `json:"status"` Error string `json:"error"` Attempt int32 `json:"attempt"` Duration int32 `json:"duration"` NextRetryTime string `json:"nextRetryTime"` FirstEventAt time.Time `json:"first_event_at"` LastEventAt time.Time `json:"last_event_at"` Tables []TableUploadResT `json:"tables,omitempty"` }
type UploadStatusOpts ¶ added in v0.1.10
type UploadStatusOpts struct { Status string AdditionalFields []UploadColumnT ReportingMetric types.PUReportedMetric }
type UploadT ¶ added in v0.1.10
type UploadT struct { ID int64 Namespace string SourceID string SourceType string SourceCategory string DestinationID string DestinationType string StartStagingFileID int64 EndStagingFileID int64 StartLoadFileID int64 EndLoadFileID int64 Status string UploadSchema warehouseutils.SchemaT MergedSchema warehouseutils.SchemaT Error json.RawMessage Timings []map[string]string FirstAttemptAt time.Time LastAttemptAt time.Time Attempts int64 Metadata json.RawMessage FirstEventAt time.Time LastEventAt time.Time UseRudderStorage bool LoadFileGenStartTime time.Time TimingsObj sql.NullString Priority int // cloud sources specific info SourceBatchID string SourceTaskID string SourceTaskRunID string SourceJobID string SourceJobRunID string LoadFileType string }
type UploadsReqT ¶ added in v0.1.10
type UploadsReqT struct { WorkspaceID string SourceID string DestinationID string DestinationType string Status string Limit int32 Offset int32 API UploadAPIT }
func (*UploadsReqT) GetWhUploads ¶ added in v0.1.10
func (uploadsReq *UploadsReqT) GetWhUploads() (uploadsRes *proto.WHUploadsResponse, err error)
func (*UploadsReqT) TriggerWhUploads ¶ added in v0.1.10
func (uploadsReq *UploadsReqT) TriggerWhUploads() (err error)
type UploadsResT ¶ added in v0.1.10
type UploadsResT struct { Uploads []UploadResT `json:"uploads"` Pagination UploadPagination `json:"pagination"` }
type WarehouseAdmin ¶ added in v0.1.10
type WarehouseAdmin struct{}
func (*WarehouseAdmin) Query ¶ added in v0.1.10
func (wh *WarehouseAdmin) Query(s QueryInput, reply *warehouseutils.QueryResult) error
Query the underlying warehouse
func (*WarehouseAdmin) QueryWhTables ¶ added in v0.1.10
func (wh *WarehouseAdmin) QueryWhTables(tableUploadReq TableUploadReqT, reply *[]byte) error
func (*WarehouseAdmin) QueryWhUploads ¶ added in v0.1.10
func (wh *WarehouseAdmin) QueryWhUploads(uploadsReq UploadsReqT, reply *[]byte) error
func (*WarehouseAdmin) TriggerUpload ¶ added in v0.1.10
func (wh *WarehouseAdmin) TriggerUpload(off bool, reply *string) error
TriggerUpload sets uploads to start without delay