snakemake

package
v0.0.4 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Oct 14, 2024 License: MIT Imports: 11 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func OutputLogDataToGlazedProcessor

func OutputLogDataToGlazedProcessor(ctx context.Context, gp middlewares.Processor, logData LogData, verbose bool, dataType string, filename string) error

OutputLogDataToGlazedProcessor outputs the LogData to a Glazed Processor

Types

type DateData

type DateData struct {
	Time time.Time
}

DateData represents the parsed date information

func (DateData) ToHash

func (d DateData) ToHash() map[string]interface{}

type EmptyData

type EmptyData struct{}

EmptyData represents an empty data structure for tokens that don't carry additional information

func (EmptyData) ToHash

func (e EmptyData) ToHash() map[string]interface{}

type GenericPairData

type GenericPairData struct {
	Field string
	Value string
}

GenericPairData represents the parsed generic pair information

func (GenericPairData) ToHash

func (g GenericPairData) ToHash() map[string]interface{}

type InputData

type InputData struct {
	Inputs []string
}

InputData represents the parsed input information

func (InputData) ToHash

func (i InputData) ToHash() map[string]interface{}

type Job

type Job struct {
	ID           string
	Rule         string
	StartTime    time.Time
	EndTime      time.Time
	Duration     time.Duration
	Status       JobStatus
	Input        []string
	Output       []string
	Reason       string
	Threads      int
	Details      map[string]string
	Resources    []Resource
	Wildcards    map[string]string
	ExternalID   string
	ScannerError string // New field to store scanner error
}

Job represents a Snakemake job with its details.

type JobEndData

type JobEndData struct {
	JobID int
}

JobEndData represents the parsed job end information

func (JobEndData) ToHash

func (j JobEndData) ToHash() map[string]interface{}

type JobIDData

type JobIDData struct {
	ID int
}

JobIDData represents the parsed job ID information

func (JobIDData) ToHash

func (j JobIDData) ToHash() map[string]interface{}

type JobStartData

type JobStartData struct {
	RuleName string
}

JobStartData represents the parsed job start information

func (JobStartData) ToHash

func (j JobStartData) ToHash() map[string]interface{}

type JobStatsData

type JobStatsData struct {
	Stats map[string]int
}

JobStatsData represents the parsed job stats information

func (JobStatsData) ToHash

func (j JobStatsData) ToHash() map[string]interface{}

type JobStatus

type JobStatus string

JobStatus represents the status of a job.

const (
	StatusInProgress JobStatus = "In Progress"
	StatusCompleted  JobStatus = "Completed"
)

type JobSubmittedData

type JobSubmittedData struct {
	JobID      int
	ExternalID string
}

JobSubmittedData represents the parsed job submitted information

func (JobSubmittedData) ToHash

func (j JobSubmittedData) ToHash() map[string]interface{}

type LogData

type LogData struct {
	Rules       map[string]*Rule
	Jobs        []*Job
	FullLog     string
	TotalJobs   int
	Completed   int
	InProgress  int
	LastUpdated time.Time
	JobStats    map[string]int
}

LogData holds the parsed data from the Snakemake log.

func ParseLog

func ParseLog(filename string, debug bool) (LogData, error)

ParseLog parses the Snakemake log file and returns structured LogData.

type OutputData

type OutputData struct {
	Outputs []string
}

OutputData represents the parsed output information

func (OutputData) ToHash

func (o OutputData) ToHash() map[string]interface{}

type Parser

type Parser struct {
	// contains filtered or unexported fields
}

Parser handles the parsing logic using tokens provided by the Tokenizer.

func NewParser

func NewParser(tokenizer *Tokenizer, debug bool) *Parser

NewParser initializes and returns a new Parser.

func (*Parser) ParseLog

func (p *Parser) ParseLog() (LogData, error)

ParseLog parses the Snakemake log and returns structured LogData.

type ReasonData

type ReasonData struct {
	Reason string
}

ReasonData represents the parsed reason information

func (ReasonData) ToHash

func (r ReasonData) ToHash() map[string]interface{}

type Resource

type Resource struct {
	Name  string
	Value string
}

Resource represents a resource with a name and value.

type ResourcesData

type ResourcesData struct {
	Resources map[string]string
}

ResourcesData represents the parsed resources information

func (ResourcesData) ToHash

func (r ResourcesData) ToHash() map[string]interface{}

type Rule

type Rule struct {
	Name      string
	Jobs      []*Job
	Resources []Resource
}

Rule represents a Snakemake rule with its associated jobs and resources.

type ThreadsData

type ThreadsData struct {
	Threads int
}

ThreadsData represents the parsed threads information

func (ThreadsData) ToHash

func (t ThreadsData) ToHash() map[string]interface{}

type Token

type Token struct {
	Type    TokenType
	Content string
}

Token represents a single token identified by the tokenizer.

type TokenData

type TokenData interface {
	ToHash() map[string]interface{}
}

TokenData is an interface for structured token data

type TokenType

type TokenType string

TokenType defines the type of token identified by the tokenizer.

const (
	TokenUnknown      TokenType = "UNKNOWN"
	TokenDate         TokenType = "DATE"
	TokenJobStart     TokenType = "JOB_START"
	TokenJobEnd       TokenType = "JOB_END"
	TokenJobID        TokenType = "JOB_ID"
	TokenWildcards    TokenType = "WILDCARDS"
	TokenResources    TokenType = "RESOURCES"
	TokenJobSubmitted TokenType = "JOB_SUBMITTED"
	TokenConfigInfo   TokenType = "CONFIG_INFO"
	TokenDAGBuilding  TokenType = "DAG_BUILDING"
	TokenJobStats     TokenType = "JOB_STATS"
	TokenJobSelection TokenType = "JOB_SELECTION"
	TokenEOF          TokenType = "EOF"
	TokenInput        TokenType = "INPUT"
	TokenOutput       TokenType = "OUTPUT"
	TokenReason       TokenType = "REASON"
	TokenThreads      TokenType = "THREADS"
	TokenGenericPair  TokenType = "GENERIC_PAIR"
	TokenScannerError TokenType = "SCANNER_ERROR"
)

type Tokenizer

type Tokenizer struct {
	// contains filtered or unexported fields
}

Tokenizer is responsible for breaking the log into tokens.

func NewTokenizer

func NewTokenizer(filename string, debug bool) (*Tokenizer, error)

NewTokenizer initializes and returns a new Tokenizer.

func (*Tokenizer) Close

func (t *Tokenizer) Close() error

Close closes the file associated with the Tokenizer.

func (*Tokenizer) NextToken

func (t *Tokenizer) NextToken() (Token, TokenData, error)

NextToken returns the next token from the log.

type WildcardsData

type WildcardsData struct {
	Wildcards map[string]string
}

WildcardsData represents the parsed wildcards information

func (WildcardsData) ToHash

func (w WildcardsData) ToHash() map[string]interface{}

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL