Documentation ¶
Index ¶
- Constants
- func Aggregator() *aggregator
- func FilterFromExpr(n *ql.ASTNode) internalFilter
- func InMemoryBuffer() *inmemBuffer
- func Initialized() bool
- func IteratorEncodeJSON(ctx context.Context, w io.Writer, iter Iterator, initTarget func() ValueSetter) (err error)
- func IteratorSorting(iter Iterator) filter.SortExprSet
- func New(log *zap.Logger, inDev bool) (*service, error)
- func NormalizeAttrNames(name string) string
- func PreLoadCursor(ctx context.Context, iter Iterator, limit uint, reverse bool, r ValueGetter) (out *filter.PagingCursor, err error)
- func RegisterConnector(fn ConnectorFn, tt ...string)
- func RegisterDriver(d Driver)
- func SensitivityLevelIndex(levels ...SensitivityLevel) *sensitivityLevelIndex
- func Service() *service
- func SetGlobal(svc *service, err error)
- type Adder
- type Aggregate
- func (def *Aggregate) Analysis() map[string]OpAnalysis
- func (def *Aggregate) Analyze(ctx context.Context) (err error)
- func (def *Aggregate) Attributes() [][]AttributeMapping
- func (def *Aggregate) Identifier() string
- func (def *Aggregate) Optimize(reqFilter internalFilter) (rspFilter internalFilter, err error)
- func (def *Aggregate) Sources() []string
- type AggregateAttr
- type Alteration
- type AlterationSet
- type Attribute
- type AttributeAdd
- type AttributeCodecType
- type AttributeDelete
- type AttributeMapping
- type AttributeReEncode
- type AttributeReType
- type AttributeSet
- type AttributeType
- type Buffer
- type Codec
- type CodecAlias
- type CodecPlain
- type CodecRecordValueSetJSON
- type Connection
- type ConnectionCloser
- type ConnectionConfig
- type ConnectionParams
- type ConnectionWrap
- type ConnectorFn
- type Datasource
- func (def *Datasource) Analysis() map[string]OpAnalysis
- func (def *Datasource) Analyze(ctx context.Context) (err error)
- func (def *Datasource) Attributes() [][]AttributeMapping
- func (def *Datasource) Identifier() string
- func (def *Datasource) Optimize(req internalFilter) (res internalFilter, err error)
- func (def *Datasource) Sources() []string
- type Driver
- type DriverConnectionConfig
- type DriverConnectionParam
- type FullService
- type Index
- type IndexField
- type IndexFieldModifier
- type IndexFieldNulls
- type IndexFieldSort
- type IndexSet
- type Issue
- type Iterator
- type Join
- func (def *Join) Analysis() map[string]OpAnalysis
- func (def *Join) Analyze(ctx context.Context) (err error)
- func (def *Join) Attributes() [][]AttributeMapping
- func (def *Join) Identifier() string
- func (def *Join) Optimize(req internalFilter) (res internalFilter, err error)
- func (def *Join) Sources() []string
- type JoinPredicate
- type Link
- func (def *Link) Analysis() map[string]OpAnalysis
- func (def *Link) Analyze(ctx context.Context) (err error)
- func (def *Link) Attributes() [][]AttributeMapping
- func (def *Link) Identifier() string
- func (def *Link) Optimize(req internalFilter) (res internalFilter, err error)
- func (def *Link) Sources() []string
- type LinkPredicate
- type MapProperties
- type Model
- type ModelAdd
- type ModelDelete
- type ModelDiff
- type ModelDiffSet
- type ModelRef
- type ModelSet
- func (aa ModelSet) FilterByReferenced(b *Model) (out ModelSet)
- func (mm ModelSet) FindByIdent(ident string) *Model
- func (mm ModelSet) FindByRefs(refs map[string]any) *Model
- func (mm ModelSet) FindByResourceID(resourceID uint64) *Model
- func (mm ModelSet) FindByResourceIdent(resourceType, resourceIdent string) *Model
- type OpAnalysis
- type Operation
- type OperationSet
- func CreateOperations(requested ...Operation) (required OperationSet)
- func DeleteOperations(requested ...Operation) (required OperationSet)
- func FullOperations() (cc OperationSet)
- func LookupOperations(requested ...Operation) (required OperationSet)
- func SearchOperations(requested ...Operation) (required OperationSet)
- func UpdateOperations(requested ...Operation) (required OperationSet)
- func (aa OperationSet) Diff(bb OperationSet) (cc OperationSet)
- func (aa OperationSet) Intersect(bb OperationSet) (cc OperationSet)
- func (aa OperationSet) IsSubset(bb ...Operation) bool
- func (aa OperationSet) IsSuperset(bb ...Operation) bool
- func (aa OperationSet) Union(bb OperationSet) (cc OperationSet)
- type OrderedBuffer
- type PKValues
- type Pipeline
- type PipelineStep
- type Row
- func (r *Row) CountValues() map[string]uint
- func (r *Row) GetValue(name string, pos uint) (any, error)
- func (r *Row) Reset()
- func (r Row) SelectGVal(ctx context.Context, wk string) (interface{}, error)
- func (r *Row) SetValue(name string, pos uint, v any) error
- func (r *Row) String() string
- func (r *Row) WithValue(name string, pos uint, v any) *Row
- type SensitivityLevel
- type SensitivityLevelSet
- type SensitivityLevelUsage
- type SimpleAttr
- type Type
- type TypeBlob
- type TypeBoolean
- type TypeDate
- type TypeEnum
- type TypeGeometry
- type TypeID
- type TypeJSON
- type TypeNumber
- type TypeNumberStoreNativeType
- type TypeRef
- type TypeText
- type TypeTime
- type TypeTimestamp
- type TypeUUID
- type ValueGetter
- type ValueSetter
Constants ¶
const ( AttributeMissing modelDiffType = "attributeMissing" AttributeTypeMissmatch modelDiffType = "typeMissmatch" AttributeSensitivityMismatch modelDiffType = "sensitivityMismatch" AttributeCodecMismatch modelDiffType = "codecMismatch" )
const ( IndexFieldSortDesc IndexFieldSort = -1 IndexFieldSortAsc IndexFieldSort = 1 IndexFieldNullsLast IndexFieldNulls = -1 IndexFieldNullsFirst IndexFieldNulls = 1 IndexFieldModifierLower = "LOWERCASE" )
const ( // operation computation indicators CostUnknown opCost = iota CostFree CostCheep CostAcceptable CostExpensive CostInfinite )
const ( // dataset size indicators SizeUnknown dsSize = iota SizeTiny SizeSmall SizeMedium SizeLarge )
const ( OpAnalysisIterate string = "iterate" OpAnalysisAggregate string = "aggregate" OpAnalysisJoin string = "join" )
const (
LinkRefIdent = "$sys.ref"
)
Variables ¶
This section is empty.
Functions ¶
func Aggregator ¶
func Aggregator() *aggregator
Aggregator initializes a new aggregator for the given set of mappings
The aggregator is not routine safe; consider defining multiple aggregators and then combining them together.
func FilterFromExpr ¶
func InMemoryBuffer ¶
func InMemoryBuffer() *inmemBuffer
InMemoryBuffer initializes a new DAL buffer where the data is kept in memory
func Initialized ¶
func Initialized() bool
func IteratorEncodeJSON ¶
func IteratorEncodeJSON(ctx context.Context, w io.Writer, iter Iterator, initTarget func() ValueSetter) (err error)
IteratorEncodeJSON helper function that encodes each item from the iterator as JSON and writes it to th given io.Writer.
target initialization function is intentionally used to avoid use of reflection
func IteratorSorting ¶
func IteratorSorting(iter Iterator) filter.SortExprSet
IteratorSorting return iterator sorting @todo this should be move to the Iterator
func New ¶
New creates a DAL service with the primary connection
It needs an established and working connection to the primary store
func NormalizeAttrNames ¶
NormalizeAttrNames normalizes system identifiers to attribute identifiers @todo this is temporary until we re-visit field referencing
The normalization step must exist since legacy versions allowed multiple identifier variations for the same system field, such as created_at and createdAt.
func PreLoadCursor ¶
func PreLoadCursor(ctx context.Context, iter Iterator, limit uint, reverse bool, r ValueGetter) (out *filter.PagingCursor, err error)
PreLoadCursor into iterator and check it exist then return the cursor @todo this should be move to the Iterator
func RegisterConnector ¶
func RegisterConnector(fn ConnectorFn, tt ...string)
RegisterConnector registers a new connector for the given DSN schema
In case of a duplicate schema the latter overwrites the prior
func RegisterDriver ¶
func RegisterDriver(d Driver)
func SensitivityLevelIndex ¶
func SensitivityLevelIndex(levels ...SensitivityLevel) *sensitivityLevelIndex
Types ¶
type Adder ¶
type Adder interface { // Add adds a new ValueGetter to the buffer Add(context.Context, ValueGetter) (err error) }
type Aggregate ¶
type Aggregate struct { Ident string RelSource string Filter filter.Filter Group []AggregateAttr OutAttributes []AggregateAttr SourceAttributes []AttributeMapping // contains filtered or unexported fields }
Aggregate produces a series of aggregated rows from the provided sources based on the specified group.
func (*Aggregate) Analysis ¶
func (def *Aggregate) Analysis() map[string]OpAnalysis
func (*Aggregate) Attributes ¶
func (def *Aggregate) Attributes() [][]AttributeMapping
func (*Aggregate) Identifier ¶
type AggregateAttr ¶
type AggregateAttr struct { MultiValue bool Key bool // @todo change; temporary for compose service RawExpr string Identifier string Label string Expression *ql.ASTNode Type Type Store Codec }
AggregateAttr is a simple wrapper to outline aggregated attribute definitions
type Alteration ¶
type Alteration struct { ID uint64 BatchID uint64 DependsOn uint64 Resource string ResourceType string ConnectionID uint64 AttributeAdd *AttributeAdd AttributeDelete *AttributeDelete AttributeReType *AttributeReType AttributeReEncode *AttributeReEncode ModelAdd *ModelAdd ModelDelete *ModelDelete }
type AlterationSet ¶
type AlterationSet []*Alteration
func (AlterationSet) Merge ¶
func (aa AlterationSet) Merge(bb AlterationSet) (cc AlterationSet)
Merge merges the two alteration slices
type Attribute ¶
type Attribute struct { Ident string `json:"ident"` Label string `json:"label"` SensitivityLevelID uint64 `json:"sensitivityLevelID"` MultiValue bool `json:"multiValue"` PrimaryKey bool `json:"primaryKey"` // If attribute has SoftDeleteFlag=true we use it // when filtering out deleted items SoftDeleteFlag bool `json:"softDeleteFlag"` // System indicates the attribute was defined by the system System bool `json:"system"` // Is attribute sortable? // Note: all primary keys are sortable Sortable bool `json:"sortable"` // Can attribute be used in query expression? Filterable bool `json:"filterable"` // Store describes the strategy the underlying storage system should // apply to the underlying value Store Codec `json:"store"` // Type describes what the value represents and how it should be // encoded/decoded Type Type `json:"type"` }
Attribute describes a specific value of the dataset
func PrimaryAttribute ¶
func (*Attribute) MarshalJSON ¶
func (*Attribute) StoreIdent ¶
func (*Attribute) UnmarshalJSON ¶
func (*Attribute) WithMultiValue ¶
func (*Attribute) WithSoftDelete ¶
type AttributeAdd ¶
type AttributeAdd struct {
Attr *Attribute `json:"attr"`
}
type AttributeCodecType ¶
type AttributeCodecType string
const ( AttributeCodecPlain AttributeCodecType = "corteza::dal:attribute-codec:plain" AttributeCodecRecordValueSetJSON AttributeCodecType = "corteza::dal:attribute-codec:record-value-set-json" AttributeCodecAlias AttributeCodecType = "corteza::dal:attribute-codec:alias" )
type AttributeDelete ¶
type AttributeDelete struct {
Attr *Attribute `json:"attr"`
}
type AttributeMapping ¶
type AttributeMapping interface { Identifier() string Expression() (expression string) Source() (ident string) Properties() MapProperties }
Attribute mapping outlines specific attributes within a pipeline @todo reconsider this interface; potentially remove it or split it up
type AttributeReEncode ¶
func (AttributeReEncode) MarshalJSON ¶
func (a AttributeReEncode) MarshalJSON() ([]byte, error)
func (*AttributeReEncode) UnmarshalJSON ¶
func (a *AttributeReEncode) UnmarshalJSON(data []byte) (err error)
type AttributeReType ¶
func (AttributeReType) MarshalJSON ¶
func (a AttributeReType) MarshalJSON() ([]byte, error)
func (*AttributeReType) UnmarshalJSON ¶
func (a *AttributeReType) UnmarshalJSON(data []byte) (err error)
type AttributeSet ¶
type AttributeSet []*Attribute
func (AttributeSet) FindByIdent ¶
func (aa AttributeSet) FindByIdent(ident string) *Attribute
func (AttributeSet) FindByStoreIdent ¶
func (aa AttributeSet) FindByStoreIdent(ident string) *Attribute
type AttributeType ¶
type AttributeType string
const ( AttributeTypeID AttributeType = "corteza::dal:attribute-type:id" AttributeTypeRef AttributeType = "corteza::dal:attribute-type:ref" AttributeTypeTimestamp AttributeType = "corteza::dal:attribute-type:timestamp" AttributeTypeTime AttributeType = "corteza::dal:attribute-type:time" AttributeTypeDate AttributeType = "corteza::dal:attribute-type:date" AttributeTypeNumber AttributeType = "corteza::dal:attribute-type:number" AttributeTypeText AttributeType = "corteza::dal:attribute-type:text" AttributeTypeBoolean AttributeType = "corteza::dal:attribute-type:boolean" AttributeTypeEnum AttributeType = "corteza::dal:attribute-type:enum" AttributeTypeGeometry AttributeType = "corteza::dal:attribute-type:geometry" AttributeTypejson AttributeType = "corteza::dal:attribute-type:json" AttributeTypeBlob AttributeType = "corteza::dal:attribute-type:blob" AttributeTypeUUID AttributeType = "corteza::dal:attribute-type:uuid" )
type Buffer ¶
type Buffer interface { // Seek moves the index pointer to the specified location // After the Seek call, a Next() call is required Seek(context.Context, int) error // Len returns the number of elements in the buffer Len() int Iterator Adder }
Buffer provides a place where you can buffer the data provided by DAL @note Buffers are currently primarily used for testing
func InMemoryBufferWith ¶
func InMemoryBufferWith(ctx context.Context, vv ...ValueGetter) (Buffer, error)
InMemoryBufferWith returns a new buffer with the given value getters
type Codec ¶
type Codec interface { Type() AttributeCodecType SingleValueOnly() bool }
Codec defines how values for a specific model attribute are retrieved or stored
If attribute does not have store strategy set store driver should use attribute name to determinate source/destination of the value (table column, json document property)
type CodecAlias ¶
type CodecAlias struct {
Ident string
}
CodecAlias defines case when value is not stored under the same column (in case of an SQL table)
Value of CodecAlias.Ident is used as base and value of Attribute.Ident holding CodecAlias is used as an alias!
Attribute{Ident: "foo", Store: CodecAlias{ Ident: "bar" } => "bar" as "foo"
func (*CodecAlias) SingleValueOnly ¶
func (*CodecAlias) SingleValueOnly() bool
func (*CodecAlias) Type ¶
func (*CodecAlias) Type() AttributeCodecType
type CodecPlain ¶
type CodecPlain struct{}
func (*CodecPlain) SingleValueOnly ¶
func (*CodecPlain) SingleValueOnly() bool
func (*CodecPlain) Type ¶
func (*CodecPlain) Type() AttributeCodecType
type CodecRecordValueSetJSON ¶
type CodecRecordValueSetJSON struct {
Ident string
}
CodecRecordValueSetJSON defines that values are encoded/decoded into a JSON simple document { [_: string]: Array<unknown> }
This only solves Attribute{Ident: "foo", Store: StoreCodecRecordValueJSON{ Ident: "bar" } => "bar"->'foo'->0
func (*CodecRecordValueSetJSON) SingleValueOnly ¶
func (*CodecRecordValueSetJSON) SingleValueOnly() bool
func (*CodecRecordValueSetJSON) Type ¶
func (*CodecRecordValueSetJSON) Type() AttributeCodecType
type Connection ¶
type Connection interface { // Models returns all the models the underlying connection already supports // // This is useful when adding support for new models since we can find out what // can work out of the box. Models(context.Context) (ModelSet, error) // Operations returns all of the operations the given store supports Operations() OperationSet // Can returns true if this store can handle the given operations Can(operations ...Operation) bool // Create stores the given data into the underlying database Create(ctx context.Context, m *Model, rr ...ValueGetter) error // Update updates the given value in the underlying connection Update(ctx context.Context, m *Model, r ValueGetter) error // Lookup returns one bit of data Lookup(context.Context, *Model, ValueGetter, ValueSetter) error // Search returns an iterator which can be used to access all if the bits Search(context.Context, *Model, filter.Filter) (Iterator, error) // Analyze returns the operation analysis the connection can perform for the model Analyze(ctx context.Context, m *Model) (map[string]OpAnalysis, error) // Aggregate returns the iterator with aggregated data from the base model Aggregate(ctx context.Context, m *Model, f filter.Filter, groupBy []AggregateAttr, aggrExpr []AggregateAttr, having *ql.ASTNode) (i Iterator, _ error) // Delete deletes the given value Delete(ctx context.Context, m *Model, pkv ValueGetter) error // Truncate deletes all the data for the given model Truncate(ctx context.Context, m *Model) error // CreateModel adds support for the given models to the underlying database // // The operation returns an error if any of the models already exists. CreateModel(context.Context, ...*Model) error // DeleteModel removes support for the given model from the underlying database DeleteModel(context.Context, ...*Model) error // UpdateModel requests for metadata changes to the existing model // // Only metadata (such as idents) are affected; attributes can not be changed here UpdateModel(ctx context.Context, old *Model, new *Model) error // AssertSchemaAlterations returns a new set of Alterations based on what the underlying // schema already provides -- it discards alterations for column additions that already exist, etc. AssertSchemaAlterations(ctx context.Context, sch *Model, aa ...*Alteration) ([]*Alteration, error) // ApplyAlteration applies the given alterations to the underlying schema ApplyAlteration(ctx context.Context, sch *Model, aa ...*Alteration) []error }
type ConnectionCloser ¶
type ConnectionConfig ¶
type ConnectionConfig struct { SensitivityLevelID uint64 Label string // When model does not specify the ident (table name for example), fallback to this ModelIdent string // when a new model is added on a connection, it's ident // is verified against this regexp // // ident is considered valid if it matches one of the expressions // or if the list of checks is empty ModelIdentCheck []*regexp.Regexp }
type ConnectionParams ¶
type ConnectionWrap ¶
type ConnectionWrap struct { ID uint64 Config ConnectionConfig // contains filtered or unexported fields }
func MakeConnection ¶
func MakeConnection(ID uint64, conn Connection, p ConnectionParams, c ConnectionConfig) *ConnectionWrap
MakeConnection makes and returns a new connection (wrap)
type ConnectorFn ¶
type ConnectorFn func(ctx context.Context, dsn string) (Connection, error)
type Datasource ¶
type Datasource struct { Ident string Filter filter.Filter ModelRef ModelRef OutAttributes []AttributeMapping // contains filtered or unexported fields }
Datasource is a simple passthrough step for underlaying datasources. It exists primarily to make operations consistent.
func (*Datasource) Analysis ¶
func (def *Datasource) Analysis() map[string]OpAnalysis
func (*Datasource) Attributes ¶
func (def *Datasource) Attributes() [][]AttributeMapping
func (*Datasource) Identifier ¶
func (def *Datasource) Identifier() string
func (*Datasource) Optimize ¶
func (def *Datasource) Optimize(req internalFilter) (res internalFilter, err error)
func (*Datasource) Sources ¶
func (def *Datasource) Sources() []string
type Driver ¶
type Driver struct { Type string `json:"type"` Connection DriverConnectionConfig `json:"connection"` Operations OperationSet `json:"operations"` }
type DriverConnectionConfig ¶
type DriverConnectionConfig struct { Type string `json:"type"` Params []DriverConnectionParam `json:"params"` }
func NewDSNDriverConnectionConfig ¶
func NewDSNDriverConnectionConfig() DriverConnectionConfig
type DriverConnectionParam ¶
type FullService ¶
type FullService interface { Drivers() (drivers []Driver) ReplaceSensitivityLevel(levels ...SensitivityLevel) (err error) RemoveSensitivityLevel(levelIDs ...uint64) (err error) GetConnectionByID(connectionID uint64) *ConnectionWrap ReplaceConnection(ctx context.Context, cw *ConnectionWrap, isDefault bool) (err error) RemoveConnection(ctx context.Context, ID uint64) (err error) SearchModels(ctx context.Context) (out ModelSet, err error) ReplaceModel(ctx context.Context, currentAlts []*Alteration, model *Model) (newAlts []*Alteration, err error) RemoveModel(ctx context.Context, connectionID, ID uint64) (err error) FindModelByResourceID(connectionID uint64, resourceID uint64) *Model FindModelByResourceIdent(connectionID uint64, resourceType, resourceIdent string) *Model FindModelByIdent(connectionID uint64, ident string) *Model Create(ctx context.Context, mf ModelRef, operations OperationSet, rr ...ValueGetter) (err error) Update(ctx context.Context, mf ModelRef, operations OperationSet, rr ...ValueGetter) (err error) Search(ctx context.Context, mf ModelRef, operations OperationSet, f filter.Filter) (iter Iterator, err error) Lookup(ctx context.Context, mf ModelRef, operations OperationSet, lookup ValueGetter, dst ValueSetter) (err error) Delete(ctx context.Context, mf ModelRef, operations OperationSet, vv ...ValueGetter) (err error) Truncate(ctx context.Context, mf ModelRef, operations OperationSet) (err error) Run(ctx context.Context, pp Pipeline) (iter Iterator, err error) Dryrun(ctx context.Context, pp Pipeline) (err error) ApplyAlteration(ctx context.Context, alts ...*Alteration) (errs []error, err error) ReloadModel(ctx context.Context, currentAlts []*Alteration, model *Model) (newAlts []*Alteration, err error) SearchConnectionIssues(connectionID uint64) (out []Issue) SearchModelIssues(resourceID uint64) (out []Issue) SearchResourceIssues(resourceType, resource string) (out []Issue) }
type Index ¶
type Index struct { Ident string Type string Unique bool Fields []*IndexField Predicate string }
type IndexField ¶
type IndexField struct { AttributeIdent string Modifiers []IndexFieldModifier Sort IndexFieldSort Nulls IndexFieldNulls }
type IndexFieldModifier ¶
type IndexFieldModifier string
type IndexFieldNulls ¶
type IndexFieldNulls int
type IndexFieldSort ¶
type IndexFieldSort int
type Iterator ¶
type Iterator interface { Next(ctx context.Context) bool More(uint, ValueGetter) error Err() error Scan(ValueSetter) error Close() error BackCursor(ValueGetter) (*filter.PagingCursor, error) ForwardCursor(ValueGetter) (*filter.PagingCursor, error) }
Iterator provides an interface for loading data from the underlying store
type Join ¶
type Join struct { Ident string RelLeft string RelRight string // @todo allow multiple join predicates; for now (for easier indexing) // only allow one (this is the same as we had before) On JoinPredicate Filter filter.Filter OutAttributes []AttributeMapping LeftAttributes []AttributeMapping RightAttributes []AttributeMapping // contains filtered or unexported fields }
Join produces a series of joined rows from the provided sources based on the JoinPredicate.
The join step produces an SQL left join-like output where all right rows have a corresponding left row.
func (*Join) Analysis ¶
func (def *Join) Analysis() map[string]OpAnalysis
func (*Join) Attributes ¶
func (def *Join) Attributes() [][]AttributeMapping
func (*Join) Identifier ¶
type JoinPredicate ¶
JoinPredicate determines the attributes the two datasets should get joined on
type Link ¶
type Link struct { Ident string RelLeft string RelRight string // @todo allow multiple link predicates; for now (for easier indexing) // only allow one (this is the same as we had before) On LinkPredicate Filter filter.Filter OutLeftAttributes []AttributeMapping OutRightAttributes []AttributeMapping LeftAttributes []AttributeMapping RightAttributes []AttributeMapping // contains filtered or unexported fields }
Link produces a series of left and corresponding right rows based on the provided sources and the LinkPredicate.
The Link step produces an SQL left join-like output where left and right rows are served separately and the left rows are not duplicated.
func (*Link) Analysis ¶
func (def *Link) Analysis() map[string]OpAnalysis
func (*Link) Attributes ¶
func (def *Link) Attributes() [][]AttributeMapping
func (*Link) Identifier ¶
type LinkPredicate ¶
LinkPredicate determines the attributes the two datasets should get joined on
type MapProperties ¶
type MapProperties struct { Label string IsPrimary bool IsSystem bool IsFilterable bool IsSortable bool IsMultivalue bool MultivalueDelimiter string Nullable bool Type Type }
MapProperties describe the attribute such as it's type and constraints
type Model ¶
type Model struct { ConnectionID uint64 `json:"connectionID"` Ident string `json:"ident"` Label string `json:"label"` Resource string `json:"resource"` ResourceID uint64 `json:"resourceID"` ResourceType string `json:"resourceType"` // Refs is an arbitrary map to identify a model // @todo consider reworking this; I'm not the biggest fan Refs map[string]any `json:"refs"` SensitivityLevelID uint64 `json:"sensitivityLevelID"` Attributes AttributeSet `json:"attributes"` Constraints map[string][]any `json:"constraints"` Indexes IndexSet `json:"indexes"` }
Model describes the underlying data and its shape
func (Model) Compare ¶
Compare the two models
This only checks model metadata, the attributes are excluded.
func (*Model) Diff ¶
func (a *Model) Diff(b *Model) (out ModelDiffSet)
Diff calculates the diff between models a and b where a is used as base
func (Model) HasAttribute ¶
HasAttribute returns true when the model includes the specified attribute
type ModelDelete ¶
type ModelDelete struct {
Model *Model `json:"model"`
}
type ModelDiff ¶
type ModelDiff struct { Type modelDiffType // Original will be nil when a new attribute is being added Original *Attribute // Asserted will be nil wen an existing attribute is being removed Asserted *Attribute }
ModelDiff defines one identified missmatch between two models
type ModelDiffSet ¶
type ModelDiffSet []*ModelDiff
func (ModelDiffSet) Alterations ¶
func (dd ModelDiffSet) Alterations() (out []*Alteration)
type ModelRef ¶
type ModelRef struct { ConnectionID uint64 ResourceID uint64 ResourceType string Resource string Refs map[string]any }
ModelRef is used to retrieve a model from the DAL based on given params
type ModelSet ¶
type ModelSet []*Model
func (ModelSet) FilterByReferenced ¶
FilterByReferenced returns all of the models that reference b
func (ModelSet) FindByIdent ¶
func (ModelSet) FindByRefs ¶
FindByRefs returns the first Model that matches the given refs
func (ModelSet) FindByResourceID ¶
func (ModelSet) FindByResourceIdent ¶
type OpAnalysis ¶
type OpAnalysis struct { ScanCost opCost SearchCost opCost FilterCost opCost SortCost opCost OutputSize dsSize }
type Operation ¶
type Operation string
const ( Create Operation = "corteza::dal:operation:create" Update Operation = "corteza::dal:operation:update" Delete Operation = "corteza::dal:operation:delete" Search Operation = "corteza::dal:operation:search" Lookup Operation = "corteza::dal:operation:lookup" Paging Operation = "corteza::dal:operation:paging" Sorting Operation = "corteza::dal:operation:sorting" )
type OperationSet ¶
type OperationSet []Operation
func CreateOperations ¶
func CreateOperations(requested ...Operation) (required OperationSet)
CreateOperations returns only requested operations used for Create operations
func DeleteOperations ¶
func DeleteOperations(requested ...Operation) (required OperationSet)
DeleteOperations returns only requested operations used for delete operations
func FullOperations ¶
func FullOperations() (cc OperationSet)
FullOperations returns all base system defined operations
func LookupOperations ¶
func LookupOperations(requested ...Operation) (required OperationSet)
LookupOperations returns only requested operations used for Search operations
func SearchOperations ¶
func SearchOperations(requested ...Operation) (required OperationSet)
SearchOperations returns only requested operations used for Search operations
func UpdateOperations ¶
func UpdateOperations(requested ...Operation) (required OperationSet)
UpdateOperations returns only requested operations used for Update operations
func (OperationSet) Diff ¶
func (aa OperationSet) Diff(bb OperationSet) (cc OperationSet)
Diff calculates the difference between the two operation sets
The diff uses aa as base
func (OperationSet) Intersect ¶
func (aa OperationSet) Intersect(bb OperationSet) (cc OperationSet)
Intersect returns the intersection between the two sets
func (OperationSet) IsSubset ¶
func (aa OperationSet) IsSubset(bb ...Operation) bool
IsSubset checks if all aa operations are inside bb
func (OperationSet) IsSuperset ¶
func (aa OperationSet) IsSuperset(bb ...Operation) bool
IsSuperset is inverse IsSubset
IsSuperset checks if all bb operations are inside aa
func (OperationSet) Union ¶
func (aa OperationSet) Union(bb OperationSet) (cc OperationSet)
Intersect returns the union between the two sets
Duplicates are omitted
type OrderedBuffer ¶
type OrderedBuffer interface { Buffer // InOrder makes the buffer provide the stored data in the specified order InOrder(ss ...*filter.SortExpr) (err error) }
OrderedBuffer provides the buffered data in the specified order
type PKValues ¶
func (PKValues) CountValues ¶
type Pipeline ¶
type Pipeline []PipelineStep
Pipeline defines a series of steps performed over the data
General outline of how a pipeline is used:
- Populate the pipeline with steps to define the desired outcome
- Analyze the pipeline to collect statistics and assist optimizers
- Run structure optimization which may reorder/drop steps
- Run step optimization which may re-configure individual steps such as push parts of a filter to lower level, request specific sort order, ...
- Use the pipeline as an iterator to pull the data from.
type PipelineStep ¶
type PipelineStep interface { Identifier() string Sources() []string Attributes() [][]AttributeMapping Analyze(ctx context.Context) error Analysis() map[string]OpAnalysis }
PipelineStep defines an operation performed over the data in the pipeline
type Row ¶
type Row struct {
// contains filtered or unexported fields
}
Row is a generic implementation for ValueGetter and ValueSetter
Primarily used within DAL pipeline execution steps, but may also be used outside.
func (*Row) CountValues ¶
func (*Row) Reset ¶
func (r *Row) Reset()
Reset clears out the row so the same instance can be reused where possible
Important: Reset only clears out the counters and does not re-init/clear out the underlaying values. Don't directly iterate over the values, but use the counters.
func (Row) SelectGVal ¶
type SensitivityLevel ¶
func MakeSensitivityLevel ¶
func MakeSensitivityLevel(ID uint64, level int, handle string) SensitivityLevel
MakeSensitivityLevel prepares a new sensitivity level
type SensitivityLevelSet ¶
type SensitivityLevelSet []SensitivityLevel
func (SensitivityLevelSet) Len ¶
func (a SensitivityLevelSet) Len() int
func (SensitivityLevelSet) Less ¶
func (a SensitivityLevelSet) Less(i, j int) bool
func (SensitivityLevelSet) Swap ¶
func (a SensitivityLevelSet) Swap(i, j int)
type SensitivityLevelUsage ¶
type SensitivityLevelUsage struct {
// contains filtered or unexported fields
}
func (SensitivityLevelUsage) Empty ¶
func (u SensitivityLevelUsage) Empty() bool
type SimpleAttr ¶
type SimpleAttr struct { Ident string Expr string Src string Props MapProperties }
@todo consider reworking this
func (SimpleAttr) Expression ¶
func (sa SimpleAttr) Expression() (expression string)
func (SimpleAttr) Identifier ¶
func (sa SimpleAttr) Identifier() string
func (SimpleAttr) Properties ¶
func (sa SimpleAttr) Properties() MapProperties
func (SimpleAttr) Source ¶
func (sa SimpleAttr) Source() (ident string)
type TypeBlob ¶
type TypeBlob struct { // Nullable bool }
TypeBlob store/return data as
func (TypeBlob) IsNullable ¶
func (TypeBlob) Type ¶
func (t TypeBlob) Type() AttributeType
type TypeBoolean ¶
TypeBoolean
func (TypeBoolean) IsNullable ¶
func (t TypeBoolean) IsNullable() bool
func (TypeBoolean) Type ¶
func (t TypeBoolean) Type() AttributeType
type TypeDate ¶
TypeDate handles date coding
Encoding/decoding might be different depending on
- underlying store (and dialect)
- value codec (raw, jsonb...)
func (TypeDate) IsNullable ¶
func (TypeDate) Type ¶
func (t TypeDate) Type() AttributeType
type TypeEnum ¶
TypeEnum
func (TypeEnum) IsNullable ¶
func (TypeEnum) Type ¶
func (t TypeEnum) Type() AttributeType
type TypeGeometry ¶
type TypeGeometry struct { // Nullable bool }
TypeGeometry
func (TypeGeometry) IsNullable ¶
func (t TypeGeometry) IsNullable() bool
func (TypeGeometry) Type ¶
func (t TypeGeometry) Type() AttributeType
type TypeID ¶
TypeID handles ID (uint64) coding
Encoding/decoding might be different depending on
- underlying store (and dialect)
- value codec (raw, json ...)
It is always Corteza ID
func (TypeID) IsNullable ¶
func (TypeID) Type ¶
func (t TypeID) Type() AttributeType
type TypeJSON ¶
TypeJSON handles coding of arbitrary data into JSON structure NOT TO BE CONFUSED with encodedField
Encoding/decoding might be different depending on
- underlying store (and dialect)
- value codec (raw, json ...)
func (TypeJSON) IsNullable ¶
func (TypeJSON) Type ¶
func (t TypeJSON) Type() AttributeType
type TypeNumber ¶
type TypeNumber struct { Precision int Scale int Nullable bool HasDefault bool DefaultValue float64 Meta map[string]any }
TypeNumber handles number coding
Encoding/decoding might be different depending on
- underlying store (and dialect)
- value codec (raw, jsonb...)
func (TypeNumber) IsNullable ¶
func (t TypeNumber) IsNullable() bool
func (TypeNumber) Type ¶
func (t TypeNumber) Type() AttributeType
type TypeNumberStoreNativeType ¶
type TypeNumberStoreNativeType string
type TypeRef ¶
type TypeRef struct { // defaults to ID RefAttribute string RefModel *ModelRef Nullable bool HasDefault bool DefaultValue uint64 }
TypeRef handles ID (uint64) coding + reference info
Encoding/decoding might be different depending on
- underlying store (and dialect)
- value codec (raw, json ...)
func (TypeRef) IsNullable ¶
func (TypeRef) Type ¶
func (t TypeRef) Type() AttributeType
type TypeText ¶
TypeText handles string coding
Encoding/decoding might be different depending on
- underlying store (and dialect)
- value codec (raw, jsonb...)
func (TypeText) IsNullable ¶
func (TypeText) Type ¶
func (t TypeText) Type() AttributeType
type TypeTime ¶
type TypeTime struct { Timezone bool TimezonePrecision bool Precision int Nullable bool DefaultCurrentTimestamp bool }
TypeTime handles time coding
Encoding/decoding might be different depending on
- underlying store (and dialect)
- value codec (raw, json ...)
func (TypeTime) IsNullable ¶
func (TypeTime) Type ¶
func (t TypeTime) Type() AttributeType
type TypeTimestamp ¶
type TypeTimestamp struct { Timezone bool Precision int Nullable bool DefaultCurrentTimestamp bool }
TypeTimestamp handles timestamp coding
Encoding/decoding might be different depending on
- underlying store (and dialect)
- value codec (raw, json ...)
func (TypeTimestamp) IsNullable ¶
func (t TypeTimestamp) IsNullable() bool
func (TypeTimestamp) Type ¶
func (t TypeTimestamp) Type() AttributeType
type TypeUUID ¶
type TypeUUID struct {
Nullable bool
}
func (TypeUUID) IsNullable ¶
func (TypeUUID) Type ¶
func (t TypeUUID) Type() AttributeType
type ValueGetter ¶
Source Files ¶
- aggregator.go
- alteration.go
- attribute_codec.go
- attribute_types.go
- buffer.go
- conn.go
- def_aggregate.go
- def_datasource.go
- def_join.go
- def_link.go
- diff.go
- driver.go
- errors.go
- exec_aggregate.go
- exec_join_left.go
- exec_link_left.go
- filter.go
- inmem_buffer.go
- issues.go
- iterator.go
- model.go
- normalizer.go
- operations.go
- optimization.go
- optimization_clobber.go
- pipeline.go
- rel_index.go
- rel_index_buffer.go
- runner.go
- runner_gval.go
- sensitivity_level.go
- service.go
- utils.go