sqlutil

package
v0.236.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jun 17, 2024 License: Apache-2.0 Imports: 15 Imported by: 41

Documentation

Overview

Package sqlutil provides multiple utilities for working with SQL data sources.

Index

Constants

View Source
const STRING = "string"

Variables

View Source
var (
	// NullStringConverter creates a *string using the scan type of `sql.NullString`
	NullStringConverter = Converter{
		Name:          "nullable string converter",
		InputScanType: reflect.TypeOf(sql.NullString{}),
		InputTypeName: "STRING",
		FrameConverter: FrameConverter{
			FieldType: data.FieldTypeNullableString,
			ConverterFunc: func(n interface{}) (interface{}, error) {
				v := n.(*sql.NullString)

				if !v.Valid {
					return (*string)(nil), nil
				}

				f := v.String
				return &f, nil
			},
		},
	}

	// NullDecimalConverter creates a *float64 using the scan type of `sql.NullFloat64`
	NullDecimalConverter = Converter{
		Name:          "NULLABLE decimal converter",
		InputScanType: reflect.TypeOf(sql.NullFloat64{}),
		InputTypeName: "DOUBLE",
		FrameConverter: FrameConverter{
			FieldType: data.FieldTypeNullableFloat64,
			ConverterFunc: func(n interface{}) (interface{}, error) {
				v := n.(*sql.NullFloat64)

				if !v.Valid {
					return (*float64)(nil), nil
				}

				f := v.Float64
				return &f, nil
			},
		},
	}

	// NullInt64Converter creates a *int64 using the scan type of `sql.NullInt64`
	NullInt64Converter = Converter{
		Name:          "NULLABLE int64 converter",
		InputScanType: reflect.TypeOf(sql.NullInt64{}),
		InputTypeName: "INTEGER",
		FrameConverter: FrameConverter{
			FieldType: data.FieldTypeNullableInt64,
			ConverterFunc: func(n interface{}) (interface{}, error) {
				v := n.(*sql.NullInt64)

				if !v.Valid {
					return (*int64)(nil), nil
				}

				f := v.Int64
				return &f, nil
			},
		},
	}

	// NullInt32Converter creates a *int32 using the scan type of `sql.NullInt32`
	NullInt32Converter = Converter{
		Name:          "NULLABLE int32 converter",
		InputScanType: reflect.TypeOf(sql.NullInt32{}),
		InputTypeName: "INTEGER",
		FrameConverter: FrameConverter{
			FieldType: data.FieldTypeNullableInt32,
			ConverterFunc: func(n interface{}) (interface{}, error) {
				v := n.(*sql.NullInt32)

				if !v.Valid {
					return (*int32)(nil), nil
				}

				f := v.Int32
				return &f, nil
			},
		},
	}

	// NullInt16Converter creates a *int16 using the scan type of `sql.NullInt16`
	NullInt16Converter = Converter{
		Name:          "NULLABLE int16 converter",
		InputScanType: reflect.TypeOf(sql.NullInt16{}),
		InputTypeName: "INTEGER",
		FrameConverter: FrameConverter{
			FieldType: data.FieldTypeNullableInt16,
			ConverterFunc: func(n any) (any, error) {
				v := n.(*sql.NullInt16)

				if !v.Valid {
					return (*int16)(nil), nil
				}

				f := v.Int16
				return &f, nil
			},
		},
	}

	// NullTimeConverter creates a *time.time using the scan type of `sql.NullTime`
	NullTimeConverter = Converter{
		Name:          "NULLABLE time.Time converter",
		InputScanType: reflect.TypeOf(sql.NullTime{}),
		InputTypeName: "TIMESTAMP",
		FrameConverter: FrameConverter{
			FieldType: data.FieldTypeNullableTime,
			ConverterFunc: func(n interface{}) (interface{}, error) {
				v := n.(*sql.NullTime)

				if !v.Valid {
					return (*time.Time)(nil), nil
				}

				f := v.Time
				return &f, nil
			},
		},
	}

	// NullBoolConverter creates a *bool using the scan type of `sql.NullBool`
	NullBoolConverter = Converter{
		Name:          "nullable bool converter",
		InputScanType: reflect.TypeOf(sql.NullBool{}),
		InputTypeName: "BOOLEAN",
		FrameConverter: FrameConverter{
			FieldType: data.FieldTypeNullableBool,
			ConverterFunc: func(n interface{}) (interface{}, error) {
				v := n.(*sql.NullBool)

				if !v.Valid {
					return (*bool)(nil), nil
				}

				return &v.Bool, nil
			},
		},
	}

	// NullByteConverter creates a *string using the scan type of `sql.NullByte`
	NullByteConverter = Converter{
		Name:          "nullable byte converter",
		InputScanType: reflect.TypeOf(sql.NullByte{}),
		InputTypeName: "BYTE",
		FrameConverter: FrameConverter{
			FieldType: data.FieldTypeNullableString,
			ConverterFunc: func(n any) (any, error) {
				v := n.(*sql.NullByte)

				if !v.Valid {
					return (*string)(nil), nil
				}

				val := string(v.Byte)
				return &val, nil
			},
		},
	}
)
View Source
var DefaultMacros = Macros{
	"interval":    macroInterval,
	"interval_ms": macroIntervalMS,
	"timeFilter":  macroTimeFilter,
	"timeFrom":    macroTimeFrom,
	"timeGroup":   macroTimeGroup,
	"timeTo":      macroTimeTo,
	"table":       macroTable,
	"column":      macroColumn,
}
View Source
var (
	// ErrorBadArgumentCount is returned from macros when the wrong number of arguments were provided
	ErrorBadArgumentCount = errors.New("unexpected number of arguments")
)
View Source
var ErrorJSON = errors.New("error unmarshaling query JSON to the Query Model")
View Source
var IntOrFloatToNullableFloat64 = data.FieldConverter{
	OutputFieldType: data.FieldTypeNullableFloat64,
	Converter: func(v interface{}) (interface{}, error) {
		var ptr *float64
		if v == nil {
			return ptr, nil
		}

		switch val := v.(type) {
		case float64:
			return &val, nil
		case float32:
			fval := float64(val)
			return &fval, nil
		case int:
			fval := float64(val)
			return &fval, nil
		case int8:
			fval := float64(val)
			return &fval, nil
		case int16:
			fval := float64(val)
			return &fval, nil
		case int32:
			fval := float64(val)
			return &fval, nil
		case int64:
			fval := float64(val)
			return &fval, nil
		case uint:
			fval := float64(val)
			return &fval, nil
		case uint8:
			fval := float64(val)
			return &fval, nil
		case uint16:
			fval := float64(val)
			return &fval, nil
		case uint32:
			fval := float64(val)
			return &fval, nil
		case uint64:
			fval := float64(val)
			return &fval, nil
		}

		return ptr, toConversionError("int or float", v)
	},
}

IntOrFloatToNullableFloat64 returns an error if the input is not a variation of int or float.

NullConverters is a map of data type names (from reflect.TypeOf(...).String()) to converters Converters supplied here are used as defaults for fields that do not have a supplied Converter

View Source
var TimeToNullableTime = data.FieldConverter{
	OutputFieldType: data.FieldTypeNullableTime,
	Converter: func(v interface{}) (interface{}, error) {
		if v == nil {
			return nil, nil
		}
		val, ok := v.(time.Time)
		if ok {
			return &val, nil
		}
		return v, toConversionError("time", v)
	},
}

TimeToNullableTime returns an error if the input is not a time

Functions

func Append added in v0.94.0

func Append(frame *data.Frame, row []interface{}, converters ...Converter) error

Append appends the row to the dataframe, using the converters to convert the scanned value into a value that can be put into a data.Frame

func DefaultConverterFunc added in v0.94.0

func DefaultConverterFunc(t reflect.Type) func(in interface{}) (interface{}, error)

DefaultConverterFunc assumes that the scanned value, in, is already a type that can be put into a dataframe.

func ErrorFrameFromQuery added in v0.148.0

func ErrorFrameFromQuery(query *Query) data.Frames

ErrorFrameFromQuery returns a error frames with empty data and meta fields

func FrameFromRows

func FrameFromRows(rows *sql.Rows, rowLimit int64, converters ...Converter) (*data.Frame, error)

FrameFromRows returns a new Frame populated with the data from rows. The field types will be nullable ([]*T) if the SQL column is nullable or if the nullable property is unknown. Otherwise, the field types will be non-nullable ([]T) types.

The number of rows scanned is limited to rowLimit. If maxRows is reached, then a data.Notice with a warning severity will be attached to the frame. If rowLimit is less than 0, there is no limit.

Fields will be named to match name of the SQL columns.

A converter must be supplied in order to support data types that are scanned from sql.Rows, but not supported in data.Frame. The converter defines what type to use for scanning, what type to place in the data frame, and a function for converting from one to the other. If you find yourself here after upgrading, you can continue to your StringConverters here by using the `ToConverters` function.

func Interpolate added in v0.148.0

func Interpolate(query *Query, macros Macros) (string, error)

Interpolate returns an interpolated query string given a backend.DataQuery

func NewFrame added in v0.94.0

func NewFrame(columns []string, converters ...Converter) *data.Frame

NewFrame creates a new data.Frame with empty fields given the columns and converters

func ResampleWideFrame added in v0.222.0

func ResampleWideFrame(f *data.Frame, fillMissing *data.FillMissing, timeRange backend.TimeRange, interval time.Duration) (*data.Frame, error)

resample resample provided time-series data.Frame. This is needed in the case of the selected query interval doesn't match the intervals of the time-series field in the data.Frame and therefore needs to be resampled. Deprecated: only used in legacy situations, for new projects use dataplane-based solutions.

Types

type Converter added in v0.94.0

type Converter struct {
	// Name is the name of the converter that is used to distinguish them when debugging or parsing log output
	Name string

	// InputScanType is the type that is used when (*sql.Rows).Scan(...) is called.
	// Some drivers require certain data types to be used when scanning data from sql rows, and this type should reflect that.
	InputScanType reflect.Type

	// InputTypeName is the case-sensitive name that must match the type that this converter matches
	InputTypeName string

	// InputTypeRegex will be used if not nil instead of InputTypeName
	InputTypeRegex *regexp.Regexp

	// InputColumnName is the case-sensitive name that must match the column that this converter matches
	InputColumnName string

	// FrameConverter defines how to convert the scanned value into a value that can be put into a dataframe
	FrameConverter FrameConverter

	// try to determine the type
	Dynamic bool
	// contains filtered or unexported fields
}

Converter is used to convert known types returned in sql.Row to a type usable in a dataframe.

func NewDefaultConverter added in v0.94.0

func NewDefaultConverter(name string, nullable bool, t reflect.Type) Converter

NewDefaultConverter creates a Converter that assumes that the value is scannable into a String, and placed into the dataframe as a nullable string.

func ToConverters added in v0.94.0

func ToConverters(s ...StringConverter) []Converter

ToConverters creates a slice of Converters from a slice of StringConverters

type Field added in v0.134.0

type Field struct {
	// contains filtered or unexported fields
}

type FormatQueryOption added in v0.148.0

type FormatQueryOption uint32

FormatQueryOption defines how the user has chosen to represent the data

const (
	// FormatOptionTimeSeries formats the query results as a timeseries using "WideToLong"
	FormatOptionTimeSeries FormatQueryOption = iota
	// FormatOptionTable formats the query results as a table using "LongToWide"
	FormatOptionTable
	// FormatOptionLogs sets the preferred visualization to logs
	FormatOptionLogs
	// FormatOptionsTrace sets the preferred visualization to trace
	FormatOptionTrace
	// FormatOptionMulti formats the query results as a timeseries using "LongToMulti"
	FormatOptionMulti
)

type FrameConverter added in v0.94.0

type FrameConverter struct {
	// FieldType is the type that is created for the dataframe field.
	// The returned value from `ConverterFunc` should match this type, otherwise the data package will panic.
	FieldType data.FieldType
	// ConverterFunc defines how to convert the scanned `InputScanType` to the supplied `FieldType`.
	// `in` is always supplied as a pointer, as it is scanned as a pointer, even if `InputScanType` is not a pointer.
	// For example, if `InputScanType` is `string`, then `in` is `*string`
	ConverterFunc func(in interface{}) (interface{}, error)
	// ConvertWithColumn is the same as ConverterFunc, but allows passing the column type
	// useful when column attributes are needed during conversion
	ConvertWithColumn func(in interface{}, col sql.ColumnType) (interface{}, error)
}

FrameConverter defines how to convert the scanned value into a value that can be put into a dataframe (OutputFieldType)

func StringFrameConverter added in v0.94.0

func StringFrameConverter(s StringConverter) FrameConverter

StringFrameConverter creates a FrameConverter from a StringConverter

type MacroFunc added in v0.148.0

type MacroFunc func(*Query, []string) (string, error)

MacroFunc defines a signature for applying a query macro Query macro implementations are defined by users/consumers of this package

type Macros added in v0.148.0

type Macros map[string]MacroFunc

Macros is a map of macro name to MacroFunc. The name must be regex friendly.

type Query added in v0.148.0

type Query struct {
	RawSQL         string            `json:"rawSql"`
	Format         FormatQueryOption `json:"format"`
	ConnectionArgs json.RawMessage   `json:"connectionArgs"`

	RefID         string            `json:"-"`
	Interval      time.Duration     `json:"-"`
	TimeRange     backend.TimeRange `json:"-"`
	MaxDataPoints int64             `json:"-"`
	FillMissing   *data.FillMissing `json:"fillMode,omitempty"`

	// Macros
	Schema string `json:"schema,omitempty"`
	Table  string `json:"table,omitempty"`
	Column string `json:"column,omitempty"`
}

Query is the model that represents the query that users submit from the panel/queryeditor. For the sake of backwards compatibility, when making changes to this type, ensure that changes are only additive.

func GetQuery added in v0.148.0

func GetQuery(query backend.DataQuery) (*Query, error)

GetQuery returns a Query object given a backend.DataQuery using json.Unmarshal

func (*Query) WithSQL added in v0.148.0

func (q *Query) WithSQL(query string) *Query

WithSQL copies the Query, but with a different RawSQL value. This is mostly useful in the Interpolate function, where the RawSQL value is modified in a loop

type ResultSetIterator added in v0.178.0

type ResultSetIterator interface {
	NextResultSet() bool
}

type RowConverter added in v0.149.0

type RowConverter struct {
	Row        *ScanRow
	Converters []Converter
}

func MakeScanRow added in v0.94.0

func MakeScanRow(colTypes []*sql.ColumnType, colNames []string, converters ...Converter) (*RowConverter, error)

MakeScanRow creates a new scan row given the column types and names. Applicable converters will substitute the SQL scan type with the one provided by the converter. The list of returned converters is the same length as the SQL rows and corresponds with the rows at the same index. (e.g. value at slice element 3 corresponds with the converter at slice element 3) If no converter is provided for a row that has a type that does not fit into a dataframe, it is skipped.

func NewRowConverter added in v0.149.0

func NewRowConverter() *RowConverter

func (*RowConverter) NewScannableRow added in v0.149.0

func (r *RowConverter) NewScannableRow() []any

type RowIterator added in v0.134.0

type RowIterator interface {
	Next() bool
	Scan(dest ...interface{}) error
}

type Rows added in v0.134.0

type Rows struct {
	// contains filtered or unexported fields
}

func (Rows) Next added in v0.134.0

func (rs Rows) Next() bool

func (Rows) NextResultSet added in v0.178.0

func (rs Rows) NextResultSet() bool

func (Rows) Scan added in v0.134.0

func (rs Rows) Scan(dest []interface{}) error

type ScanRow added in v0.94.0

type ScanRow struct {
	Columns []string
	Types   []reflect.Type
}

A ScanRow is a container for SQL metadata for a single row. The row metadata is used to generate dataframe fields and a slice that can be used with sql.Scan

func NewScanRow added in v0.94.0

func NewScanRow(length int) *ScanRow

NewScanRow creates a new ScanRow with a length of `length`. Use the `Set` function to manually set elements at specific indices.

func (*ScanRow) Append added in v0.94.0

func (s *ScanRow) Append(name string, colType reflect.Type)

Append adds data to the end of the list of types and columns

func (*ScanRow) NewScannableRow added in v0.94.0

func (s *ScanRow) NewScannableRow() []interface{}

NewScannableRow creates a slice where each element is usable in a call to `(database/sql.Rows).Scan` aka a pointer

func (*ScanRow) Set added in v0.94.0

func (s *ScanRow) Set(i int, name string, colType reflect.Type)

Set sets the internal data at i

type StringConverter

type StringConverter struct {
	// Name is an optional property that can be used to identify a converter
	Name          string
	InputScanKind reflect.Kind // reflect.Type might better or worse option?
	InputTypeName string

	// Conversion func may be nil to do no additional operations on the string conversion.
	ConversionFunc func(in *string) (*string, error)

	// If the Replacer is not nil, the replacement will be performed.
	Replacer *StringFieldReplacer
}

StringConverter can be used to store types not supported by a Frame into a *string. When scanning, if a SQL's row's InputScanType's Kind and InputScanKind match that returned by the sql response, then the conversion func will be run on the row. Note, a Converter should be favored over a StringConverter as not all SQL rows can be scanned into a string. This type is only here for backwards compatibility.

func (StringConverter) ToConverter added in v0.94.0

func (s StringConverter) ToConverter() Converter

ToConverter turns this StringConverter into a Converter, using the ScanType of string

type StringFieldReplacer

type StringFieldReplacer struct {
	OutputFieldType data.FieldType
	ReplaceFunc     func(in *string) (interface{}, error)
}

StringFieldReplacer is used to replace a *string Field in a Frame. The type returned by the ReplaceFunc must match the type of elements of VectorType. Both properties must be non-nil. Note, a Converter should be favored over a StringConverter as not all SQL rows can be scanned into a string. This type is only here for backwards compatibility.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL