Documentation
¶
Index ¶
- func OutputLogDataToGlazedProcessor(ctx context.Context, gp middlewares.Processor, logData LogData, verbose bool, ...) error
- type DateData
- type EmptyData
- type GenericPairData
- type InputData
- type Job
- type JobEndData
- type JobIDData
- type JobStartData
- type JobStatsData
- type JobStatus
- type JobSubmittedData
- type LogData
- type OutputData
- type Parser
- type ReasonData
- type Resource
- type ResourcesData
- type Rule
- type ThreadsData
- type Token
- type TokenData
- type TokenType
- type Tokenizer
- type WildcardsData
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
Types ¶
type EmptyData ¶
type EmptyData struct{}
EmptyData represents an empty data structure for tokens that don't carry additional information
type GenericPairData ¶
GenericPairData represents the parsed generic pair information
func (GenericPairData) ToHash ¶
func (g GenericPairData) ToHash() map[string]interface{}
type InputData ¶
type InputData struct {
Inputs []string
}
InputData represents the parsed input information
type Job ¶
type Job struct { ID string Rule string StartTime time.Time EndTime time.Time Duration time.Duration Status JobStatus Input []string Output []string Reason string Threads int Details map[string]string Resources []Resource Wildcards map[string]string ExternalID string ScannerError string // New field to store scanner error }
Job represents a Snakemake job with its details.
type JobEndData ¶
type JobEndData struct {
JobID int
}
JobEndData represents the parsed job end information
func (JobEndData) ToHash ¶
func (j JobEndData) ToHash() map[string]interface{}
type JobIDData ¶
type JobIDData struct {
ID int
}
JobIDData represents the parsed job ID information
type JobStartData ¶
type JobStartData struct {
RuleName string
}
JobStartData represents the parsed job start information
func (JobStartData) ToHash ¶
func (j JobStartData) ToHash() map[string]interface{}
type JobStatsData ¶
JobStatsData represents the parsed job stats information
func (JobStatsData) ToHash ¶
func (j JobStatsData) ToHash() map[string]interface{}
type JobSubmittedData ¶
JobSubmittedData represents the parsed job submitted information
func (JobSubmittedData) ToHash ¶
func (j JobSubmittedData) ToHash() map[string]interface{}
type LogData ¶
type LogData struct { Rules map[string]*Rule Jobs []*Job FullLog string TotalJobs int Completed int InProgress int LastUpdated time.Time JobStats map[string]int }
LogData holds the parsed data from the Snakemake log.
type OutputData ¶
type OutputData struct {
Outputs []string
}
OutputData represents the parsed output information
func (OutputData) ToHash ¶
func (o OutputData) ToHash() map[string]interface{}
type Parser ¶
type Parser struct {
// contains filtered or unexported fields
}
Parser handles the parsing logic using tokens provided by the Tokenizer.
type ReasonData ¶
type ReasonData struct {
Reason string
}
ReasonData represents the parsed reason information
func (ReasonData) ToHash ¶
func (r ReasonData) ToHash() map[string]interface{}
type ResourcesData ¶
ResourcesData represents the parsed resources information
func (ResourcesData) ToHash ¶
func (r ResourcesData) ToHash() map[string]interface{}
type ThreadsData ¶
type ThreadsData struct {
Threads int
}
ThreadsData represents the parsed threads information
func (ThreadsData) ToHash ¶
func (t ThreadsData) ToHash() map[string]interface{}
type TokenData ¶
type TokenData interface {
ToHash() map[string]interface{}
}
TokenData is an interface for structured token data
type TokenType ¶
type TokenType string
TokenType defines the type of token identified by the tokenizer.
const ( TokenUnknown TokenType = "UNKNOWN" TokenDate TokenType = "DATE" TokenJobStart TokenType = "JOB_START" TokenJobEnd TokenType = "JOB_END" TokenJobID TokenType = "JOB_ID" TokenWildcards TokenType = "WILDCARDS" TokenResources TokenType = "RESOURCES" TokenJobSubmitted TokenType = "JOB_SUBMITTED" TokenConfigInfo TokenType = "CONFIG_INFO" TokenDAGBuilding TokenType = "DAG_BUILDING" TokenJobStats TokenType = "JOB_STATS" TokenJobSelection TokenType = "JOB_SELECTION" TokenEOF TokenType = "EOF" TokenInput TokenType = "INPUT" TokenOutput TokenType = "OUTPUT" TokenReason TokenType = "REASON" TokenThreads TokenType = "THREADS" TokenGenericPair TokenType = "GENERIC_PAIR" TokenScannerError TokenType = "SCANNER_ERROR" )
type Tokenizer ¶
type Tokenizer struct {
// contains filtered or unexported fields
}
Tokenizer is responsible for breaking the log into tokens.
func NewTokenizer ¶
NewTokenizer initializes and returns a new Tokenizer.
type WildcardsData ¶
WildcardsData represents the parsed wildcards information
func (WildcardsData) ToHash ¶
func (w WildcardsData) ToHash() map[string]interface{}