Documentation ¶
Overview ¶
queriesjson package contains the logic for collecting queries.json information
Index ¶
- func AddRowsToSet(queriesrows []QueriesRow, profilesToCollect map[string]string)
- type HistoryJobs
- type QueriesRow
- func CollectJobHistoryJSON(jobhistoryjsons []string) []QueriesRow
- func CollectQueriesJSON(queriesjsons []string) []QueriesRow
- func GetHighCostJobs(queriesrows []QueriesRow, limit int) []QueriesRow
- func GetRecentErrorJobs(queriesrows []QueriesRow, limit int) []QueriesRow
- func GetSlowExecJobs(queriesrows []QueriesRow, limit int) []QueriesRow
- func GetSlowPlanningJobs(queriesrows []QueriesRow, limit int) []QueriesRow
- func ReadGzFile(filename string) ([]QueriesRow, error)
- func ReadHistoryJobsJSONFile(filename string) ([]QueriesRow, error)
- func ReadJSONFile(filename string) ([]QueriesRow, error)
- type Row
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func AddRowsToSet ¶
func AddRowsToSet(queriesrows []QueriesRow, profilesToCollect map[string]string)
Types ¶
type HistoryJobs ¶ added in v0.5.0
type QueriesRow ¶
type QueriesRow struct { QueryID string `json:"queryId"` // Context string `json:"context"` QueryText string `json:"queryText"` Start float64 `json:"start"` // Finish int64 `json:"finish"` Outcome string `json:"outcome"` // OutcomeReason string `json:"outcomeReason"` // Username string `json:"username"` // InputRecords int `json:"inputRecords"` // InputBytes int `json:"inputBytes"` // OutputRecords int `json:"outputRecords"` // OutputBytes int `json:"outputBytes"` // RequestType string `json:"requestType"` QueryType string `json:"queryType"` // ParentsList []any `json:"parentsList"` // Accelerated bool `json:"accelerated"` // ReflectionRelationships []any `json:"reflectionRelationships"` QueryCost float64 `json:"queryCost"` // QueueName string `json:"queueName"` // PoolWaitTime int `json:"poolWaitTime"` // PendingTime int `json:"pendingTime"` // MetadataRetrievalTime int `json:"metadataRetrievalTime"` // PlanningTime int `json:"planningTime"` // EngineStartTime int `json:"engineStartTime"` // QueuedTime int `json:"queuedTime"` ExecutionPlanningTime float64 `json:"executionPlanningTime"` // StartingTime int `json:"startingTime"` RunningTime float64 `json:"runningTime"` }
func CollectJobHistoryJSON ¶ added in v0.5.0
func CollectJobHistoryJSON(jobhistoryjsons []string) []QueriesRow
func CollectQueriesJSON ¶
func CollectQueriesJSON(queriesjsons []string) []QueriesRow
func GetHighCostJobs ¶
func GetHighCostJobs(queriesrows []QueriesRow, limit int) []QueriesRow
func GetRecentErrorJobs ¶
func GetRecentErrorJobs(queriesrows []QueriesRow, limit int) []QueriesRow
func GetSlowExecJobs ¶
func GetSlowExecJobs(queriesrows []QueriesRow, limit int) []QueriesRow
func GetSlowPlanningJobs ¶
func GetSlowPlanningJobs(queriesrows []QueriesRow, limit int) []QueriesRow
func ReadGzFile ¶
func ReadGzFile(filename string) ([]QueriesRow, error)
func ReadHistoryJobsJSONFile ¶ added in v0.5.0
func ReadHistoryJobsJSONFile(filename string) ([]QueriesRow, error)
func ReadJSONFile ¶
func ReadJSONFile(filename string) ([]QueriesRow, error)
type Row ¶ added in v0.5.0
type Row struct { JobID string `json:"job_id"` Status string `json:"status"` QueryType string `json:"query_type"` UserName string `json:"user_name"` QueriedDatasets any `json:"queried_datasets"` ScannedDatasets any `json:"scanned_datasets"` ExecutionCPUTimeNs int `json:"execution_cpu_time_ns"` AttemptCount int `json:"attempt_count"` SubmittedTs string `json:"submitted_ts"` AttemptStartedTs string `json:"attempt_started_ts"` MetadataRetrievalTs any `json:"metadata_retrieval_ts"` PlanningStartTs any `json:"planning_start_ts"` QueryEnqueuedTs any `json:"query_enqueued_ts"` EngineStartTs any `json:"engine_start_ts"` ExecutionPlanningStartTs any `json:"execution_planning_start_ts"` ExecutionStartTs any `json:"execution_start_ts"` FinalStateTs string `json:"final_state_ts"` SubmittedEpoch int64 `json:"submitted_epoch"` AttemptStartedEpoch int64 `json:"attempt_started_epoch"` MetadataRetrievalEpoch int `json:"metadata_retrieval_epoch"` PlanningStartEpoch int `json:"planning_start_epoch"` QueryEnqueuedEpoch int `json:"query_enqueued_epoch"` EngineStartEpoch int `json:"engine_start_epoch"` ExecutionPlanningStartEpoch int `json:"execution_planning_start_epoch,omitempty"` ExecutionStartEpoch int `json:"execution_start_epoch"` FinalStateEpoch int64 `json:"final_state_epoch"` PlannerEstimatedCost float64 `json:"planner_estimated_cost"` RowsScanned int `json:"rows_scanned"` BytesScanned int `json:"bytes_scanned"` RowsReturned int `json:"rows_returned"` BytesReturned int `json:"bytes_returned"` Accelerated bool `json:"accelerated"` QueueName any `json:"queue_name"` Engine any `json:"engine"` ExecutionNodes any `json:"execution_nodes"` MemoryAvailable int `json:"memory_available"` ErrorMsg string `json:"error_msg"` Query string `json:"query"` QueryChunks any `json:"query_chunks"` ReflectionMatches any `json:"reflection_matches"` StartingTs any `json:"starting_ts"` StartingEpoch int `json:"starting_epoch"` ExecutionPlanningStart int64 `json:"execution_planning_start_,omitempty"` }
Click to show internal directories.
Click to hide internal directories.