Documentation ¶
Index ¶
- Constants
- Variables
- func BlockCompactionRead(ctx context.Context, location objectio.Location, deletes []int64, ...) (*batch.Batch, error)
- func BlockPrefetch(idxes []uint16, service fileservice.FileService, ...) error
- func BlockRead(ctx context.Context, info *pkgcatalog.BlockInfo, inputDeletes []int64, ...) (*batch.Batch, error)
- func BlockReadInner(ctx context.Context, info *pkgcatalog.BlockInfo, inputDeleteRows []int64, ...) (result *batch.Batch, err error)
- func DecodeCheckpointMetadataFileName(name string) (start, end types.TS)
- func DecodeGCMetadataFileName(name string) (start, end types.TS, ext string)
- func EncodeCheckpointMetadataFileName(dir, prefix string, start, end types.TS) string
- func EncodeGCMetadataFileName(dir, prefix string, start, end types.TS) string
- func EncodeLocation(name objectio.ObjectName, extent objectio.Extent, rows uint32, id uint16) objectio.Location
- func EncodeLocationFromString(info string) (objectio.Location, error)
- func ExportSelectivityString() string
- func FindIntervalForBlock(rowids []types.Rowid, id *types.Blockid) (start int, end int)
- func GetObjectSizeWithBlocks(blocks []objectio.BlockObject) (uint32, error)
- func LoadColumns(ctx context.Context, cols []uint16, typs []types.Type, ...) (bat *batch.Batch, err error)
- func LoadColumnsData(ctx context.Context, metaType objectio.DataMetaType, cols []uint16, ...) (bat *batch.Batch, err error)
- func LoadOneBlock(ctx context.Context, fs fileservice.FileService, key objectio.Location, ...) (*batch.Batch, error)
- func LoadTombstoneColumns(ctx context.Context, cols []uint16, typs []types.Type, ...) (bat *batch.Batch, err error)
- func Prefetch(idxes []uint16, ids []uint16, service fileservice.FileService, ...) error
- func PrefetchFile(service fileservice.FileService, name string) error
- func PrefetchMeta(service fileservice.FileService, key objectio.Location) error
- func PrefetchTombstone(idxes []uint16, ids []uint16, service fileservice.FileService, ...) error
- func PrefetchWithMerged(params PrefetchParams) error
- func ReadBlockDelete(ctx context.Context, deltaloc objectio.Location, fs fileservice.FileService) (bat *batch.Batch, isPersistedByCN bool, err error)
- func ReadByFilter(ctx context.Context, info *pkgcatalog.BlockInfo, inputDeletes []int64, ...) (sels []int32, err error)
- func RecordBlockSelectivity(hit, total int)
- func RecordColumnSelectivity(hit, total int)
- func RecordReadDel(total, read, bisect time.Duration)
- func RecordReadFilterSelectivity(hit, total int)
- func ResetPipeline()
- func Start()
- func Stop()
- type BlockReader
- func (r *BlockReader) GetName() string
- func (r *BlockReader) GetObjectName() *objectio.ObjectName
- func (r *BlockReader) GetObjectReader() *objectio.ObjectReader
- func (r *BlockReader) LoadAllBF(ctx context.Context) (objectio.BloomFilter, uint32, error)
- func (r *BlockReader) LoadAllBlocks(ctx context.Context, m *mpool.MPool) ([]objectio.BlockObject, error)
- func (r *BlockReader) LoadAllColumns(ctx context.Context, idxs []uint16, m *mpool.MPool) ([]*batch.Batch, error)
- func (r *BlockReader) LoadColumns(ctx context.Context, cols []uint16, typs []types.Type, blk uint16, ...) (bat *batch.Batch, err error)
- func (r *BlockReader) LoadObjectMeta(ctx context.Context, m *mpool.MPool) (objectio.ObjectDataMeta, error)
- func (r *BlockReader) LoadOneBF(ctx context.Context, blk uint16) (objectio.StaticFilter, uint32, error)
- func (r *BlockReader) LoadOneSubColumns(ctx context.Context, cols []uint16, typs []types.Type, dataType uint16, ...) (bat *batch.Batch, err error)
- func (r *BlockReader) LoadSubColumns(ctx context.Context, cols []uint16, typs []types.Type, blk uint16, ...) (bats []*batch.Batch, err error)
- func (r *BlockReader) LoadZoneMap(ctx context.Context, seqnums []uint16, block objectio.BlockObject, ...) ([]objectio.ZoneMap, error)
- func (r *BlockReader) LoadZoneMaps(ctx context.Context, seqnums []uint16, id uint16, m *mpool.MPool) ([]objectio.ZoneMap, error)
- type BlockWriter
- func (w *BlockWriter) GetName() objectio.ObjectName
- func (w *BlockWriter) GetObjectStats() []objectio.ObjectStats
- func (w *BlockWriter) SetAppendable()
- func (w *BlockWriter) SetPrimaryKey(idx uint16)
- func (w *BlockWriter) SetSortKey(idx uint16)
- func (w *BlockWriter) Stats() objectio.ObjectStats
- func (w *BlockWriter) String(blocks []objectio.BlockObject) string
- func (w *BlockWriter) Sync(ctx context.Context) ([]objectio.BlockObject, objectio.Extent, error)
- func (w *BlockWriter) WriteBatch(batch *batch.Batch) (objectio.BlockObject, error)
- func (w *BlockWriter) WriteSubBatch(batch *batch.Batch, dataType objectio.DataMetaType) (objectio.BlockObject, int, error)
- func (w *BlockWriter) WriteTombstoneBatch(batch *batch.Batch) (objectio.BlockObject, error)
- type FetchFunc
- type IOJobFactory
- type IoPipeline
- type ObjectColumnMetasBuilder
- func (b *ObjectColumnMetasBuilder) AddPKData(data containers.Vector)
- func (b *ObjectColumnMetasBuilder) AddRowCnt(rows int)
- func (b *ObjectColumnMetasBuilder) Build() (uint32, []objectio.ColumnMeta)
- func (b *ObjectColumnMetasBuilder) GetPKData() []containers.Vector
- func (b *ObjectColumnMetasBuilder) GetTotalRow() uint32
- func (b *ObjectColumnMetasBuilder) InspectVector(idx int, vec containers.Vector, isPK bool)
- func (b *ObjectColumnMetasBuilder) SetPKNdv(idx uint16, ndv uint32)
- func (b *ObjectColumnMetasBuilder) UpdateZm(idx int, zm index.ZM)
- type Option
- type PrefetchFunc
- type PrefetchParams
- type ReadFilter
Constants ¶
const ( AsyncIo = 1 SyncIo = 2 )
const ( JTLoad tasks.JobType = 200 + iota JTFlush )
const ( CheckpointExt = "ckp" GCFullExt = "fgc" )
Variables ¶
var IoModel = SyncIo
Functions ¶
func BlockCompactionRead ¶
func BlockPrefetch ¶
func BlockPrefetch(idxes []uint16, service fileservice.FileService, infos [][]*pkgcatalog.BlockInfo) error
BlockPrefetch is the interface for cn to call read ahead columns Which columns should be taken for columns service fileservice infos [s3object name][block]
func BlockRead ¶
func BlockRead( ctx context.Context, info *pkgcatalog.BlockInfo, inputDeletes []int64, columns []uint16, colTypes []types.Type, ts timestamp.Timestamp, filterSeqnums []uint16, filterColTypes []types.Type, filter ReadFilter, fs fileservice.FileService, mp *mpool.MPool, vp engine.VectorPool, ) (*batch.Batch, error)
BlockRead read block data from storage and apply deletes according given timestamp. Caller make sure metaloc is not empty
func BlockReadInner ¶
func BlockReadInner( ctx context.Context, info *pkgcatalog.BlockInfo, inputDeleteRows []int64, columns []uint16, colTypes []types.Type, ts types.TS, selectRows []int32, fs fileservice.FileService, mp *mpool.MPool, vp engine.VectorPool, ) (result *batch.Batch, err error)
func EncodeLocation ¶
func EncodeLocation( name objectio.ObjectName, extent objectio.Extent, rows uint32, id uint16) objectio.Location
EncodeLocation Generate a metaloc
func EncodeLocationFromString ¶
EncodeLocationFromString Generate a metaloc from an info string
func ExportSelectivityString ¶
func ExportSelectivityString() string
func FindIntervalForBlock ¶ added in v1.0.0
func GetObjectSizeWithBlocks ¶
func GetObjectSizeWithBlocks(blocks []objectio.BlockObject) (uint32, error)
func LoadColumns ¶
func LoadColumnsData ¶ added in v1.0.0
func LoadOneBlock ¶ added in v1.1.0
func LoadOneBlock( ctx context.Context, fs fileservice.FileService, key objectio.Location, metaType objectio.DataMetaType, ) (*batch.Batch, error)
func LoadTombstoneColumns ¶ added in v1.0.0
func Prefetch ¶
func Prefetch(idxes []uint16, ids []uint16, service fileservice.FileService, key objectio.Location) error
func PrefetchFile ¶
func PrefetchFile(service fileservice.FileService, name string) error
func PrefetchMeta ¶
func PrefetchMeta(service fileservice.FileService, key objectio.Location) error
func PrefetchTombstone ¶ added in v1.0.0
func PrefetchTombstone(idxes []uint16, ids []uint16, service fileservice.FileService, key objectio.Location) error
func PrefetchWithMerged ¶
func PrefetchWithMerged(params PrefetchParams) error
The caller has merged the block information that needs to be prefetched
func ReadBlockDelete ¶ added in v1.0.0
func ReadBlockDelete(ctx context.Context, deltaloc objectio.Location, fs fileservice.FileService) (bat *batch.Batch, isPersistedByCN bool, err error)
func ReadByFilter ¶ added in v1.0.0
func ReadByFilter( ctx context.Context, info *pkgcatalog.BlockInfo, inputDeletes []int64, columns []uint16, colTypes []types.Type, ts types.TS, filter ReadFilter, fs fileservice.FileService, mp *mpool.MPool, ) (sels []int32, err error)
func RecordBlockSelectivity ¶
func RecordBlockSelectivity(hit, total int)
func RecordColumnSelectivity ¶
func RecordColumnSelectivity(hit, total int)
func RecordReadDel ¶ added in v1.0.0
func RecordReadFilterSelectivity ¶
func RecordReadFilterSelectivity(hit, total int)
func ResetPipeline ¶
func ResetPipeline()
Types ¶
type BlockReader ¶
type BlockReader struct {
// contains filtered or unexported fields
}
func NewFileReader ¶
func NewFileReader(service fileservice.FileService, name string) (*BlockReader, error)
func NewFileReaderNoCache ¶
func NewFileReaderNoCache(service fileservice.FileService, name string) (*BlockReader, error)
func NewObjectReader ¶
func NewObjectReader( service fileservice.FileService, key objectio.Location, opts ...objectio.ReaderOptionFunc, ) (*BlockReader, error)
func (*BlockReader) GetName ¶
func (r *BlockReader) GetName() string
func (*BlockReader) GetObjectName ¶
func (r *BlockReader) GetObjectName() *objectio.ObjectName
func (*BlockReader) GetObjectReader ¶
func (r *BlockReader) GetObjectReader() *objectio.ObjectReader
func (*BlockReader) LoadAllBF ¶
func (r *BlockReader) LoadAllBF( ctx context.Context, ) (objectio.BloomFilter, uint32, error)
func (*BlockReader) LoadAllBlocks ¶
func (r *BlockReader) LoadAllBlocks(ctx context.Context, m *mpool.MPool) ([]objectio.BlockObject, error)
func (*BlockReader) LoadAllColumns ¶
func (*BlockReader) LoadColumns ¶
func (r *BlockReader) LoadColumns( ctx context.Context, cols []uint16, typs []types.Type, blk uint16, m *mpool.MPool, ) (bat *batch.Batch, err error)
LoadColumns needs typs to generate columns, if the target table has no schema change, nil can be passed.
func (*BlockReader) LoadObjectMeta ¶
func (r *BlockReader) LoadObjectMeta(ctx context.Context, m *mpool.MPool) (objectio.ObjectDataMeta, error)
func (*BlockReader) LoadOneBF ¶
func (r *BlockReader) LoadOneBF( ctx context.Context, blk uint16, ) (objectio.StaticFilter, uint32, error)
func (*BlockReader) LoadOneSubColumns ¶ added in v1.0.0
func (r *BlockReader) LoadOneSubColumns( ctx context.Context, cols []uint16, typs []types.Type, dataType uint16, blk uint16, m *mpool.MPool, ) (bat *batch.Batch, err error)
LoadColumns needs typs to generate columns, if the target table has no schema change, nil can be passed.
func (*BlockReader) LoadSubColumns ¶ added in v1.0.0
func (r *BlockReader) LoadSubColumns( ctx context.Context, cols []uint16, typs []types.Type, blk uint16, m *mpool.MPool, ) (bats []*batch.Batch, err error)
LoadColumns needs typs to generate columns, if the target table has no schema change, nil can be passed.
func (*BlockReader) LoadZoneMap ¶
type BlockWriter ¶
type BlockWriter struct {
// contains filtered or unexported fields
}
func NewBlockWriter ¶
func NewBlockWriter(fs fileservice.FileService, name string) (*BlockWriter, error)
func NewBlockWriterNew ¶
func NewBlockWriterNew(fs fileservice.FileService, name objectio.ObjectName, schemaVer uint32, seqnums []uint16) (*BlockWriter, error)
seqnums is the column's seqnums of the batch written by `WriteBatch`. `WriteBatchWithoutIndex` will ignore the seqnums
func (*BlockWriter) GetName ¶
func (w *BlockWriter) GetName() objectio.ObjectName
func (*BlockWriter) GetObjectStats ¶ added in v1.1.0
func (w *BlockWriter) GetObjectStats() []objectio.ObjectStats
func (*BlockWriter) SetAppendable ¶ added in v1.1.0
func (w *BlockWriter) SetAppendable()
func (*BlockWriter) SetPrimaryKey ¶
func (w *BlockWriter) SetPrimaryKey(idx uint16)
func (*BlockWriter) SetSortKey ¶ added in v1.1.0
func (w *BlockWriter) SetSortKey(idx uint16)
func (*BlockWriter) Stats ¶ added in v1.1.0
func (w *BlockWriter) Stats() objectio.ObjectStats
func (*BlockWriter) String ¶
func (w *BlockWriter) String( blocks []objectio.BlockObject) string
func (*BlockWriter) Sync ¶
func (w *BlockWriter) Sync(ctx context.Context) ([]objectio.BlockObject, objectio.Extent, error)
func (*BlockWriter) WriteBatch ¶
func (w *BlockWriter) WriteBatch(batch *batch.Batch) (objectio.BlockObject, error)
WriteBatch write a batch whose schema is decribed by seqnum in NewBlockWriterNew
func (*BlockWriter) WriteSubBatch ¶ added in v1.0.0
func (w *BlockWriter) WriteSubBatch(batch *batch.Batch, dataType objectio.DataMetaType) (objectio.BlockObject, int, error)
func (*BlockWriter) WriteTombstoneBatch ¶ added in v1.0.0
func (w *BlockWriter) WriteTombstoneBatch(batch *batch.Batch) (objectio.BlockObject, error)
type IoPipeline ¶
type IoPipeline struct {
// contains filtered or unexported fields
}
func NewIOPipeline ¶
func NewIOPipeline( opts ...Option, ) *IoPipeline
func (*IoPipeline) Fetch ¶
func (p *IoPipeline) Fetch( ctx context.Context, params fetchParams, ) (res any, err error)
func (*IoPipeline) Prefetch ¶
func (p *IoPipeline) Prefetch(params PrefetchParams) (err error)
func (*IoPipeline) Start ¶
func (p *IoPipeline) Start()
func (*IoPipeline) Stop ¶
func (p *IoPipeline) Stop()
type ObjectColumnMetasBuilder ¶
type ObjectColumnMetasBuilder struct {
// contains filtered or unexported fields
}
func NewObjectColumnMetasBuilder ¶
func NewObjectColumnMetasBuilder(colIdx int) *ObjectColumnMetasBuilder
func (*ObjectColumnMetasBuilder) AddPKData ¶
func (b *ObjectColumnMetasBuilder) AddPKData(data containers.Vector)
func (*ObjectColumnMetasBuilder) AddRowCnt ¶
func (b *ObjectColumnMetasBuilder) AddRowCnt(rows int)
func (*ObjectColumnMetasBuilder) Build ¶
func (b *ObjectColumnMetasBuilder) Build() (uint32, []objectio.ColumnMeta)
func (*ObjectColumnMetasBuilder) GetPKData ¶
func (b *ObjectColumnMetasBuilder) GetPKData() []containers.Vector
func (*ObjectColumnMetasBuilder) GetTotalRow ¶
func (b *ObjectColumnMetasBuilder) GetTotalRow() uint32
func (*ObjectColumnMetasBuilder) InspectVector ¶
func (b *ObjectColumnMetasBuilder) InspectVector(idx int, vec containers.Vector, isPK bool)
func (*ObjectColumnMetasBuilder) SetPKNdv ¶
func (b *ObjectColumnMetasBuilder) SetPKNdv(idx uint16, ndv uint32)
type Option ¶
type Option func(*IoPipeline)
func WithFetchParallism ¶
func WithJobFactory ¶
func WithJobFactory(factory IOJobFactory) Option
func WithPrefetchParallism ¶
type PrefetchFunc ¶
type PrefetchFunc = func(params PrefetchParams) error
type PrefetchParams ¶ added in v1.0.0
type PrefetchParams struct {
// contains filtered or unexported fields
}
PrefetchParams is the parameter of the executed IoPipeline.PrefetchParams, which provides the merge function, which can merge the PrefetchParams requests of multiple blocks in an object/file
func BuildPrefetchParams ¶
func BuildPrefetchParams(service fileservice.FileService, key objectio.Location) (PrefetchParams, error)
func (*PrefetchParams) AddBlock ¶ added in v1.0.0
func (p *PrefetchParams) AddBlock(idxes []uint16, ids []uint16)
func (*PrefetchParams) AddBlockWithType ¶ added in v1.0.0
func (p *PrefetchParams) AddBlockWithType(idxes []uint16, ids []uint16, dataType uint16)