Documentation ¶
Index ¶
- Constants
- Variables
- func Copy(sourceFolder string, destinationFolder string) error
- func CopyFile(sourceFile string, destinationFile string) error
- func Delete(filename string)
- func FileExists(filename string) bool
- func GenerateTimeFileNameStr() string
- func GetDirectories(inputPath string) ([]string, error)
- func GetFolderFileType(folder string) (string, error)
- func GetUniqueFolder(folder string) string
- func GetUniqueName(filename string) string
- func Hash(data string) (string, error)
- func InitializeDeleteBuffer(bufferTimeSeconds int)
- func IsArchiveFile(filePath string) bool
- func IsDatasetDir(dir string) bool
- func IsDirectory(filePath string) bool
- func IsInDirectory(directory string, filename string) bool
- func Move(sourcePath string, destinationPath string) error
- func ReadCSVFile(filename string, hasHeader bool) ([][]string, error)
- func ReadCSVHeader(filename string) ([]string, error)
- func RemoveContents(dir string, includeParent bool) error
- func StartMemLogging(intervalMs int)
- func Unzip(zipFile string, destination string) error
- func WriteFileWithDirs(filename string, data []byte, perm os.FileMode) error
- func WriteFormFileWithDirs(filename string, formFile multipart.File, perm os.FileMode) error
- type Handler
- type Metric
- type MetricID
- type TimerStack
Constants ¶
const ( // Accuracy identifies model metric based on nearness to the original result. Accuracy = "accuracy" // F1 identifies model metric based on precision and recall F1 = "f1" // F1Micro identifies model metric based on precision and recall F1Micro = "f1Micro" // F1Macro identifies model metric based on precision and recall. F1Macro = "f1Macro" // MeanAbsoluteError identifies model metric based on MeanAbsoluteError = "meanAbsoluteError" // MeanSquaredError identifies model metric based on the quality of the estimator. MeanSquaredError = "meanSquaredError" // NormalizedMutualInformation identifies model metric based on the relationship between variables. NormalizedMutualInformation = "normalizedMutualInformation" // RocAuc identifies model metric based on RocAuc = "rocAuc" // RocAucMicro identifies model metric based on RocAucMicro = "rocAucMicro" // RocAucMacro identifies model metric based on RocAucMacro = "rocAucMacro" // RootMeanSquaredError identifies model metric based on RootMeanSquaredError = "rootMeanSquaredError" // RootMeanSquaredErrorAvg identifies model metric based on RootMeanSquaredErrorAvg = "rootMeanSquaredErrorAvg" // RSquared identifies model metric based on RSquared = "rSquared" )
Variables ¶
var ( // AllModelMetrics defines a list of model scoring metrics AllModelMetrics = map[string]Metric{ Accuracy: { Accuracy, allModelLabels[Accuracy], allModelLabels[Accuracy] + " scores the result based only on the percentage of correct predictions.", }, F1: { F1, allModelLabels[F1], allModelLabels[F1] + " scoring averages true positives, false negatives and false positives for binary classifications, balancing precision and recall.", }, F1Macro: { F1Macro, allModelLabels[F1Macro], "F1 Macro scoring averages true positives, false negatives and false positives for all multi-class classification options, balancing precision and recall.", }, F1Micro: { F1Micro, allModelLabels[F1Micro], allModelLabels[F1Micro] + " scoring averages true positives, false negatives and false positives for each multi-class classification options for multi-class problems, balancing precision and recall.", }, RocAuc: { RocAuc, allModelLabels[RocAuc], allModelLabels[RocAuc] + " scoring compares relationship between inputs on result for binary classifications.", }, RocAucMacro: { RocAucMacro, allModelLabels[RocAucMacro], allModelLabels[RocAucMacro] + " scoring compares the relationship between inputs and the result for all multi-class classification options.", }, RocAucMicro: { RocAucMicro, allModelLabels[RocAucMicro], allModelLabels[RocAucMicro] + " scoring compares hte relationship between inputs and the result for each multi-class classification options.", }, MeanAbsoluteError: { MeanAbsoluteError, allModelLabels[MeanAbsoluteError], allModelLabels[MeanAbsoluteError] + " measures the average magnitude of errors in a set of predictions.", }, MeanSquaredError: { MeanSquaredError, allModelLabels[MeanSquaredError], allModelLabels[MeanSquaredError] + " measures the quality of an estimator where values closer to 0 are better.", }, NormalizedMutualInformation: { NormalizedMutualInformation, allModelLabels[NormalizedMutualInformation], allModelLabels[NormalizedMutualInformation] + " scores the relationship / lack of entropy between variables where 0 is no relationship and 1 is a strong relationship.", }, RootMeanSquaredError: { RootMeanSquaredError, allModelLabels[RootMeanSquaredError], allModelLabels[RootMeanSquaredError] + " measures the quality of an estimator and the average magnitude of the error.", }, RootMeanSquaredErrorAvg: { RootMeanSquaredErrorAvg, allModelLabels[RootMeanSquaredErrorAvg], allModelLabels[RootMeanSquaredErrorAvg] + " measures the quality of an estimator and the average magnitude of the error averaged across classifcations options.", }, RSquared: { RSquared, allModelLabels[RSquared], allModelLabels[RSquared] + " measures the relationship between predictions and their inputs where values closer to 1 suggest a strong correlation.", }, } //TaskMetricMap maps tasks to metrics TaskMetricMap = map[string]map[string]Metric{ compute.BinaryTask: { Accuracy: AllModelMetrics[Accuracy], F1: AllModelMetrics[F1], RocAuc: AllModelMetrics[RocAuc], }, compute.MultiClassTask: { Accuracy: AllModelMetrics[Accuracy], F1Macro: AllModelMetrics[F1Micro], F1Micro: AllModelMetrics[F1Macro], RocAucMacro: AllModelMetrics[RocAucMacro], RocAucMicro: AllModelMetrics[RocAucMicro], }, compute.SemiSupervisedTask: { Accuracy: AllModelMetrics[Accuracy], F1Micro: AllModelMetrics[F1Macro], RocAucMicro: AllModelMetrics[RocAucMicro], }, compute.ClassificationTask: { Accuracy: AllModelMetrics[Accuracy], F1: AllModelMetrics[F1], F1Macro: AllModelMetrics[F1Micro], F1Micro: AllModelMetrics[F1Macro], RocAuc: AllModelMetrics[RocAuc], RocAucMacro: AllModelMetrics[RocAucMacro], RocAucMicro: AllModelMetrics[RocAucMicro], }, compute.RegressionTask: { MeanAbsoluteError: AllModelMetrics[MeanAbsoluteError], MeanSquaredError: AllModelMetrics[MeanSquaredError], RootMeanSquaredError: AllModelMetrics[RootMeanSquaredError], RootMeanSquaredErrorAvg: AllModelMetrics[RootMeanSquaredErrorAvg], RSquared: AllModelMetrics[RSquared], }, } )
Functions ¶
func CopyFile ¶
CopyFile the source file to destination. Any existing file will be overwritten and will not copy file attributes.
func Delete ¶
func Delete(filename string)
Delete removes the specified path from disk. It will buffer the delete if the system initialized the buffering.
func FileExists ¶
FileExists checks if a file already exists on disk.
func GenerateTimeFileNameStr ¶
func GenerateTimeFileNameStr() string
GenerateTimeFileNameStr generates an ISO 8601 representation of current time, with any colons and dashes removed to that it can be used as part of a filename.
func GetDirectories ¶
GetDirectories returns a list of directories found using the supplied path.
func GetFolderFileType ¶
GetFolderFileType returns the extension of the first file in the media folder.
func GetUniqueFolder ¶
GetUniqueFolder creates a unique folder name using a base folder name.
func GetUniqueName ¶
GetUniqueName creates a unique filename using a base filename.
func InitializeDeleteBuffer ¶
func InitializeDeleteBuffer(bufferTimeSeconds int)
InitializeDeleteBuffer will initialize the routines and channels used to buffer deletions.
func IsArchiveFile ¶
IsArchiveFile returns true if the specified path is an archive.
func IsDatasetDir ¶
IsDatasetDir indicates whether or not a directory contains a single d3m dataset.
func IsDirectory ¶
IsDirectory checks if a path is a directory.
func IsInDirectory ¶
IsInDirectory indicates whether or not the supplied path is in a directory.
func ReadCSVFile ¶
ReadCSVFile reads a csv file and returns the string slice representation of the data.
func ReadCSVHeader ¶
ReadCSVHeader reads the first line of a CSV file.
func RemoveContents ¶
RemoveContents removes the files and directories from the supplied parent. includeParent will remove the parent directory as well if true
func StartMemLogging ¶
func StartMemLogging(intervalMs int)
StartMemLogging starts logging memory usage at the caller specified interval
func WriteFileWithDirs ¶
WriteFileWithDirs writes the file and creates any missing directories along the way.
Types ¶
type Handler ¶
Handler is the interface that provides the PopEvent function, which is invoked whenever an event is popped from the timer stack.
type TimerStack ¶
type TimerStack struct {
// contains filtered or unexported fields
}
TimerStack provides a stack that allows for nested timing of named system events. Note that the timer not thread safe.
func NewPrintTimerStack ¶
func NewPrintTimerStack() *TimerStack
NewPrintTimerStack create a new timer stack with basic output to standard out.
func NewTimerStack ¶
func NewTimerStack(popEventHandler Handler) *TimerStack
NewTimerStack creates a new timer stack.
func (*TimerStack) Pop ¶
func (p *TimerStack) Pop()
Pop pops the top of the timer stack and invokes the pop handler, passing it the popped event name and the time elapsed since the event was pushed.
func (*TimerStack) Push ¶
func (p *TimerStack) Push(eventName string)
Push pushes an event onto the timer stack, saving its start time.