Versions in this module Expand all Collapse all v1 v1.7.9 Oct 27, 2019 Changes in this version + const DefaultCacheMaxMemorySize + const DefaultCacheSnapshotMemorySize + const DefaultCacheSnapshotWriteColdDuration + const DefaultCompactFullWriteColdDuration + const DefaultCompactThroughput + const DefaultCompactThroughputBurst + const DefaultEngine + const DefaultIndex + const DefaultMaxConcurrentCompactions + const DefaultMaxIndexLogFileSize + const DefaultMaxPointsPerBlock + const DefaultMaxSeriesPerDatabase + const DefaultMaxValuesPerTag + const DefaultSeriesIDSetCacheSize + const DefaultSeriesPartitionCompactThreshold + const EOF + const InmemIndexName + const SeriesEntryFlagSize + const SeriesEntryHeaderSize + const SeriesEntryInsertFlag + const SeriesEntryTombstoneFlag + const SeriesFileDirectory + const SeriesFilePartitionN + const SeriesIDSize + const SeriesIndexElemSize + const SeriesIndexHeaderSize + const SeriesIndexLoadFactor + const SeriesIndexMagic + const SeriesIndexVersion + const SeriesSegmentHeaderSize + const SeriesSegmentMagic + const SeriesSegmentVersion + const TSI1IndexName + var ErrEngineClosed = errors.New("engine is closed") + var ErrFieldNotFound = errors.New("field not found") + var ErrFieldOverflow = errors.New("field overflow") + var ErrFieldTypeConflict = errors.New("field type conflict") + var ErrFieldUnmappedID = errors.New("field ID not mapped") + var ErrFormatNotFound = errors.New("format not found") + var ErrIndexClosing = errors.New("index is closing") + var ErrInvalidSeriesIndex = errors.New("invalid series index") + var ErrInvalidSeriesPartitionID = errors.New("tsdb: invalid series partition id") + var ErrInvalidSeriesSegment = errors.New("invalid series segment") + var ErrInvalidSeriesSegmentVersion = errors.New("invalid series segment version") + var ErrMultipleIndexTypes = errors.New(...) + var ErrSeriesFileClosed = errors.New("tsdb: series file closed") + var ErrSeriesPartitionClosed = errors.New("tsdb: series partition closed") + var ErrSeriesPartitionCompactionCancelled = errors.New("tsdb: series partition compaction cancelled") + var ErrSeriesSegmentNotWritable = errors.New("series segment not writable") + var ErrShardDeletion = errors.New("shard is being deleted") + var ErrShardDisabled = errors.New("shard is disabled") + var ErrShardNotFound = fmt.Errorf("shard not found") + var ErrShardNotIdle = errors.New("shard not idle") + var ErrStoreClosed = fmt.Errorf("store is closed") + var ErrUnknownEngineFormat = errors.New("unknown engine format") + var ErrUnknownFieldType = errors.New("unknown field type") + var ErrUnknownFieldsFormat = errors.New("unknown field index format") + var NewInmemIndex func(name string, sfile *SeriesFile) (interface{}, error) + func AppendSeriesEntry(dst []byte, flag uint8, id uint64, key []byte) []byte + func AppendSeriesKey(dst []byte, name []byte, tags models.Tags) []byte + func CompareSeriesKeys(a, b []byte) int + func GenerateSeriesKeys(names [][]byte, tagsSlice []models.Tags) [][]byte + func IsValidSeriesEntryFlag(flag byte) bool + func IsValidSeriesSegmentFilename(filename string) bool + func JoinSeriesOffset(segmentID uint16, pos uint32) int64 + func MakeTagsKey(keys []string, tags models.Tags) []byte + func MarshalTags(tags map[string]string) []byte + func NewFieldKeysIterator(sh *Shard, opt query.IteratorOptions) (query.Iterator, error) + func NewMeasurementSliceIterator(names [][]byte) *measurementSliceIterator + func NewSeriesPointIterator(indexSet IndexSet, opt query.IteratorOptions) (_ query.Iterator, err error) + func NewSeriesQueryAdapterIterator(sfile *SeriesFile, itr SeriesIDIterator, fieldset *MeasurementFieldSet, ...) query.Iterator + func NewShardError(id uint64, err error) error + func NewTagKeySliceIterator(keys [][]byte) *tagKeySliceIterator + func NewTagKeysIterator(sh *Shard, opt query.IteratorOptions) (query.Iterator, error) + func NewTagValueSliceIterator(values [][]byte) *tagValueSliceIterator + func ParseSeriesKey(data []byte) (name []byte, tags models.Tags) + func ParseSeriesKeyInto(data []byte, dstTags models.Tags) ([]byte, models.Tags) + func ParseSeriesSegmentFilename(filename string) (uint16, error) + func ReadAllSeriesIDIterator(itr SeriesIDIterator) ([]uint64, error) + func ReadSeriesEntry(data []byte) (flag uint8, id uint64, key []byte, sz int64) + func ReadSeriesKey(data []byte) (key, remainder []byte) + func ReadSeriesKeyFromSegments(a []*SeriesSegment, offset int64) []byte + func ReadSeriesKeyLen(data []byte) (sz int, remainder []byte) + func ReadSeriesKeyMeasurement(data []byte) (name, remainder []byte) + func ReadSeriesKeyTag(data []byte) (key, value, remainder []byte) + func ReadSeriesKeyTagN(data []byte) (n int, remainder []byte) + func RegisterEngine(name string, fn NewEngineFunc) + func RegisterIndex(name string, fn NewIndexFunc) + func RegisteredEngines() []string + func RegisteredIndexes() []string + func SeriesKeySize(name []byte, tags models.Tags) int + func SeriesKeysSize(names [][]byte, tagsSlice []models.Tags) int + func SeriesSegmentSize(id uint16) uint32 + func SplitSeriesOffset(offset int64) (segmentID uint16, pos uint32) + type BooleanArray = cursors.BooleanArray + func NewBooleanArrayLen(sz int) *BooleanArray + type BooleanArrayCursor = cursors.BooleanArrayCursor + type CompactionPlannerCreator func(cfg Config) interface + type Config struct + CacheMaxMemorySize toml.Size + CacheSnapshotMemorySize toml.Size + CacheSnapshotWriteColdDuration toml.Duration + CompactFullWriteColdDuration toml.Duration + CompactThroughput toml.Size + CompactThroughputBurst toml.Size + Dir string + Engine string + Index string + MaxConcurrentCompactions int + MaxIndexLogFileSize toml.Size + MaxSeriesPerDatabase int + MaxValuesPerTag int + QueryLogEnabled bool + SeriesIDSetCacheSize int + TSMWillNeed bool + TraceLoggingEnabled bool + ValidateKeys bool + WALDir string + WALFsyncDelay toml.Duration + func NewConfig() Config + func (c *Config) Validate() error + func (c Config) Diagnostics() (*diagnostics.Diagnostics, error) + type Cursor = cursors.Cursor + type CursorIterator = cursors.CursorIterator + type CursorIterators = cursors.CursorIterators + func CreateCursorIterators(ctx context.Context, shards []*Shard) (CursorIterators, error) + type CursorRequest = cursors.CursorRequest + type CursorStats = cursors.CursorStats + type Engine interface + Backup func(w io.Writer, basePath string, since time.Time) error + Close func() error + CreateCursorIterator func(ctx context.Context) (CursorIterator, error) + CreateIterator func(ctx context.Context, measurement string, opt query.IteratorOptions) (query.Iterator, error) + CreateSeriesIfNotExists func(key, name []byte, tags models.Tags) error + CreateSeriesListIfNotExists func(keys, names [][]byte, tags []models.Tags) error + CreateSnapshot func() (string, error) + DeleteMeasurement func(name []byte) error + DeleteSeriesRange func(itr SeriesIterator, min, max int64) error + DeleteSeriesRangeWithPredicate func(itr SeriesIterator, ...) error + Digest func() (io.ReadCloser, int64, error) + DiskSize func() int64 + Export func(w io.Writer, basePath string, start time.Time, end time.Time) error + ForEachMeasurementName func(fn func(name []byte) error) error + Free func() error + HasTagKey func(name, key []byte) (bool, error) + Import func(r io.Reader, basePath string) error + IsIdle func() bool + IteratorCost func(measurement string, opt query.IteratorOptions) (query.IteratorCost, error) + LastModified func() time.Time + LoadMetadataIndex func(shardID uint64, index Index) error + MeasurementExists func(name []byte) (bool, error) + MeasurementFieldSet func() *MeasurementFieldSet + MeasurementFields func(measurement []byte) *MeasurementFields + MeasurementNamesByRegex func(re *regexp.Regexp) ([][]byte, error) + MeasurementTagKeysByExpr func(name []byte, expr influxql.Expr) (map[string]struct{}, error) + MeasurementsSketches func() (estimator.Sketch, estimator.Sketch, error) + Open func() error + Restore func(r io.Reader, basePath string) error + ScheduleFullCompaction func() error + SeriesN func() int64 + SeriesSketches func() (estimator.Sketch, estimator.Sketch, error) + SetCompactionsEnabled func(enabled bool) + SetEnabled func(enabled bool) + Statistics func(tags map[string]string) []models.Statistic + TagKeyCardinality func(name, key []byte) int + WithLogger func(*zap.Logger) + WritePoints func(points []models.Point) error + func NewEngine(id uint64, i Index, path string, walPath string, sfile *SeriesFile, ...) (Engine, error) + type EngineFormat int + const TSM1Format + type EngineOptions struct + CompactionDisabled bool + CompactionLimiter limiter.Fixed + CompactionPlannerCreator CompactionPlannerCreator + CompactionThroughputLimiter limiter.Rate + Config Config + DatabaseFilter func(database string) bool + EngineVersion string + FieldValidator FieldValidator + FileStoreObserver FileStoreObserver + IndexVersion string + InmemIndex interface{} + MonitorDisabled bool + OnNewEngine func(Engine) + OpenLimiter limiter.Fixed + RetentionPolicyFilter func(database, rp string) bool + SeriesIDSets SeriesIDSets + ShardFilter func(database, rp string, id uint64) bool + ShardID uint64 + WALEnabled bool + func NewEngineOptions() EngineOptions + type Field struct + ID uint8 + Name string + Type influxql.DataType + type FieldCreate struct + Field *Field + Measurement []byte + type FieldValidator interface + Validate func(mf *MeasurementFields, point models.Point) error + type FileStoreObserver interface + FileFinishing func(path string) error + FileUnlinking func(path string) error + type FloatArray = cursors.FloatArray + func NewFloatArrayLen(sz int) *FloatArray + type FloatArrayCursor = cursors.FloatArrayCursor + type Index interface + Bytes func() int + Close func() error + CreateSeriesIfNotExists func(key, name []byte, tags models.Tags) error + CreateSeriesListIfNotExists func(keys, names [][]byte, tags []models.Tags) error + Database func() string + DiskSizeBytes func() int64 + DropMeasurement func(name []byte) error + DropMeasurementIfSeriesNotExist func(name []byte) (bool, error) + DropSeries func(seriesID uint64, key []byte, cascade bool) error + DropSeriesGlobal func(key []byte) error + FieldSet func() *MeasurementFieldSet + ForEachMeasurementName func(fn func(name []byte) error) error + HasTagKey func(name, key []byte) (bool, error) + HasTagValue func(name, key, value []byte) (bool, error) + InitializeSeries func(keys, names [][]byte, tags []models.Tags) error + MeasurementExists func(name []byte) (bool, error) + MeasurementIterator func() (MeasurementIterator, error) + MeasurementNamesByRegex func(re *regexp.Regexp) ([][]byte, error) + MeasurementSeriesIDIterator func(name []byte) (SeriesIDIterator, error) + MeasurementTagKeysByExpr func(name []byte, expr influxql.Expr) (map[string]struct{}, error) + MeasurementsSketches func() (estimator.Sketch, estimator.Sketch, error) + Open func() error + Rebuild func() + SeriesIDSet func() *SeriesIDSet + SeriesN func() int64 + SeriesSketches func() (estimator.Sketch, estimator.Sketch, error) + SetFieldName func(measurement []byte, name string) + SetFieldSet func(fs *MeasurementFieldSet) + TagKeyCardinality func(name, key []byte) int + TagKeyIterator func(name []byte) (TagKeyIterator, error) + TagKeySeriesIDIterator func(name, key []byte) (SeriesIDIterator, error) + TagValueIterator func(name, key []byte) (TagValueIterator, error) + TagValueSeriesIDIterator func(name, key, value []byte) (SeriesIDIterator, error) + Type func() string + UniqueReferenceID func() uintptr + WithLogger func(*zap.Logger) + func MustOpenIndex(id uint64, database, path string, seriesIDSet *SeriesIDSet, sfile *SeriesFile, ...) Index + func NewIndex(id uint64, database, path string, seriesIDSet *SeriesIDSet, sfile *SeriesFile, ...) (Index, error) + type IndexFormat int + const InMemFormat + const TSI1Format + type IndexSet struct + Indexes []Index + SeriesFile *SeriesFile + func (is IndexSet) Database() string + func (is IndexSet) DedupeInmemIndexes() IndexSet + func (is IndexSet) ForEachMeasurementTagKey(name []byte, fn func(key []byte) error) error + func (is IndexSet) HasField(measurement []byte, field string) bool + func (is IndexSet) HasInmemIndex() bool + func (is IndexSet) HasTagKey(name, key []byte) (bool, error) + func (is IndexSet) HasTagValue(name, key, value []byte) (bool, error) + func (is IndexSet) MatchTagValueSeriesIDIterator(name, key []byte, value *regexp.Regexp, matches bool) (SeriesIDIterator, error) + func (is IndexSet) MeasurementIterator() (MeasurementIterator, error) + func (is IndexSet) MeasurementNamesByExpr(auth query.Authorizer, expr influxql.Expr) ([][]byte, error) + func (is IndexSet) MeasurementSeriesByExprIterator(name []byte, expr influxql.Expr) (SeriesIDIterator, error) + func (is IndexSet) MeasurementSeriesIDIterator(name []byte) (SeriesIDIterator, error) + func (is IndexSet) MeasurementSeriesKeysByExpr(name []byte, expr influxql.Expr) ([][]byte, error) + func (is IndexSet) MeasurementTagKeyValuesByExpr(auth query.Authorizer, name []byte, keys []string, expr influxql.Expr, ...) ([][]string, error) + func (is IndexSet) MeasurementTagKeysByExpr(name []byte, expr influxql.Expr) (map[string]struct{}, error) + func (is IndexSet) TagKeyHasAuthorizedSeries(auth query.Authorizer, name, tagKey []byte) (bool, error) + func (is IndexSet) TagKeyIterator(name []byte) (TagKeyIterator, error) + func (is IndexSet) TagKeySeriesIDIterator(name, key []byte) (SeriesIDIterator, error) + func (is IndexSet) TagSets(sfile *SeriesFile, name []byte, opt query.IteratorOptions) ([]*query.TagSet, error) + func (is IndexSet) TagValueIterator(name, key []byte) (TagValueIterator, error) + func (is IndexSet) TagValueSeriesIDIterator(name, key, value []byte) (SeriesIDIterator, error) + func (is IndexSet) TagValuesByKeyAndExpr(auth query.Authorizer, name []byte, keys []string, expr influxql.Expr, ...) ([]map[string]struct{}, error) + type IntegerArray = cursors.IntegerArray + func NewIntegerArrayLen(sz int) *IntegerArray + type IntegerArrayCursor = cursors.IntegerArrayCursor + type KeyValue struct + Key string + Value string + type KeyValues []KeyValue + func (a KeyValues) Len() int + func (a KeyValues) Less(i, j int) bool + func (a KeyValues) Swap(i, j int) + type LimitError struct + Reason string + func (e *LimitError) Error() string + type MeasurementFieldSet struct + func NewMeasurementFieldSet(path string) (*MeasurementFieldSet, error) + func (fs *MeasurementFieldSet) Bytes() int + func (fs *MeasurementFieldSet) CreateFieldsIfNotExists(name []byte) *MeasurementFields + func (fs *MeasurementFieldSet) Delete(name string) + func (fs *MeasurementFieldSet) DeleteWithLock(name string, fn func() error) error + func (fs *MeasurementFieldSet) Fields(name []byte) *MeasurementFields + func (fs *MeasurementFieldSet) FieldsByString(name string) *MeasurementFields + func (fs *MeasurementFieldSet) IsEmpty() bool + func (fs *MeasurementFieldSet) Save() error + type MeasurementFields struct + func NewMeasurementFields() *MeasurementFields + func (m *MeasurementFields) CreateFieldIfNotExists(name []byte, typ influxql.DataType) error + func (m *MeasurementFields) Field(name string) *Field + func (m *MeasurementFields) FieldBytes(name []byte) *Field + func (m *MeasurementFields) FieldKeys() []string + func (m *MeasurementFields) FieldN() int + func (m *MeasurementFields) FieldSet() map[string]influxql.DataType + func (m *MeasurementFields) ForEachField(fn func(name string, typ influxql.DataType) bool) + func (m *MeasurementFields) HasField(name string) bool + type MeasurementIterator interface + Close func() error + Next func() ([]byte, error) + func MergeMeasurementIterators(itrs ...MeasurementIterator) MeasurementIterator + type MeasurementIterators []MeasurementIterator + func (a MeasurementIterators) Close() (err error) + type NewEngineFunc func(id uint64, i Index, path string, walPath string, sfile *SeriesFile, ...) Engine + type NewIndexFunc func(id uint64, database, path string, seriesIDSet *SeriesIDSet, sfile *SeriesFile, ...) Index + type PartialWriteError struct + Dropped int + DroppedKeys [][]byte + Reason string + func (e PartialWriteError) Error() string + type PointBatcher struct + func NewPointBatcher(sz int, bp int, d time.Duration) *PointBatcher + func (b *PointBatcher) Flush() + func (b *PointBatcher) In() chan<- models.Point + func (b *PointBatcher) Out() <-chan []models.Point + func (b *PointBatcher) Start() + func (b *PointBatcher) Stats() *PointBatcherStats + func (b *PointBatcher) Stop() + type PointBatcherStats struct + BatchTotal uint64 + PointTotal uint64 + SizeTotal uint64 + TimeoutTotal uint64 + type SeriesCursor interface + Close func() error + Next func() (*SeriesCursorRow, error) + type SeriesCursorRequest struct + Measurements MeasurementIterator + type SeriesCursorRow struct + Name []byte + Tags models.Tags + func (r *SeriesCursorRow) Compare(other *SeriesCursorRow) int + type SeriesElem interface + Deleted func() bool + Expr func() influxql.Expr + Name func() []byte + Tags func() models.Tags + type SeriesFile struct + Logger *zap.Logger + func NewSeriesFile(path string) *SeriesFile + func (f *SeriesFile) Close() (err error) + func (f *SeriesFile) CreateSeriesListIfNotExists(names [][]byte, tagsSlice []models.Tags) ([]uint64, error) + func (f *SeriesFile) DeleteSeriesID(id uint64) error + func (f *SeriesFile) DisableCompactions() + func (f *SeriesFile) EnableCompactions() + func (f *SeriesFile) HasSeries(name []byte, tags models.Tags, buf []byte) bool + func (f *SeriesFile) IsDeleted(id uint64) bool + func (f *SeriesFile) Open() error + func (f *SeriesFile) Partitions() []*SeriesPartition + func (f *SeriesFile) Path() string + func (f *SeriesFile) Retain() func() + func (f *SeriesFile) Series(id uint64) ([]byte, models.Tags) + func (f *SeriesFile) SeriesCount() uint64 + func (f *SeriesFile) SeriesID(name []byte, tags models.Tags, buf []byte) uint64 + func (f *SeriesFile) SeriesIDIterator() SeriesIDIterator + func (f *SeriesFile) SeriesIDPartition(id uint64) *SeriesPartition + func (f *SeriesFile) SeriesIDPartitionID(id uint64) int + func (f *SeriesFile) SeriesKey(id uint64) []byte + func (f *SeriesFile) SeriesKeyPartition(key []byte) *SeriesPartition + func (f *SeriesFile) SeriesKeyPartitionID(key []byte) int + func (f *SeriesFile) SeriesKeys(ids []uint64) [][]byte + func (f *SeriesFile) SeriesKeysPartitionIDs(keys [][]byte) []int + func (f *SeriesFile) SeriesPartitionPath(i int) string + func (f *SeriesFile) Wait() + type SeriesIDElem struct + Expr influxql.Expr + SeriesID uint64 + type SeriesIDElems []SeriesIDElem + func (a SeriesIDElems) Len() int + func (a SeriesIDElems) Less(i, j int) bool + func (a SeriesIDElems) Swap(i, j int) + type SeriesIDIterator interface + Close func() error + Next func() (SeriesIDElem, error) + func DifferenceSeriesIDIterators(itr0, itr1 SeriesIDIterator) SeriesIDIterator + func FilterUndeletedSeriesIDIterator(sfile *SeriesFile, itr SeriesIDIterator) SeriesIDIterator + func IntersectSeriesIDIterators(itr0, itr1 SeriesIDIterator) SeriesIDIterator + func MergeSeriesIDIterators(itrs ...SeriesIDIterator) SeriesIDIterator + func UnionSeriesIDIterators(itr0, itr1 SeriesIDIterator) SeriesIDIterator + type SeriesIDIterators []SeriesIDIterator + func (a SeriesIDIterators) Close() (err error) + type SeriesIDSet struct + func NewSeriesIDSet(a ...uint64) *SeriesIDSet + func (s *SeriesIDSet) Add(id uint64) + func (s *SeriesIDSet) AddMany(ids ...uint64) + func (s *SeriesIDSet) AddNoLock(id uint64) + func (s *SeriesIDSet) And(other *SeriesIDSet) *SeriesIDSet + func (s *SeriesIDSet) AndNot(other *SeriesIDSet) *SeriesIDSet + func (s *SeriesIDSet) Bytes() int + func (s *SeriesIDSet) Cardinality() uint64 + func (s *SeriesIDSet) Clear() + func (s *SeriesIDSet) ClearNoLock() + func (s *SeriesIDSet) Clone() *SeriesIDSet + func (s *SeriesIDSet) CloneNoLock() *SeriesIDSet + func (s *SeriesIDSet) Contains(id uint64) bool + func (s *SeriesIDSet) ContainsNoLock(id uint64) bool + func (s *SeriesIDSet) Diff(other *SeriesIDSet) + func (s *SeriesIDSet) Equals(other *SeriesIDSet) bool + func (s *SeriesIDSet) ForEach(f func(id uint64)) + func (s *SeriesIDSet) ForEachNoLock(f func(id uint64)) + func (s *SeriesIDSet) Iterator() SeriesIDSetIterable + func (s *SeriesIDSet) Merge(others ...*SeriesIDSet) + func (s *SeriesIDSet) MergeInPlace(other *SeriesIDSet) + func (s *SeriesIDSet) Remove(id uint64) + func (s *SeriesIDSet) RemoveNoLock(id uint64) + func (s *SeriesIDSet) Slice() []uint64 + func (s *SeriesIDSet) String() string + func (s *SeriesIDSet) UnmarshalBinary(data []byte) error + func (s *SeriesIDSet) UnmarshalBinaryUnsafe(data []byte) error + func (s *SeriesIDSet) WriteTo(w io.Writer) (int64, error) + type SeriesIDSetIterable interface + HasNext func() bool + Next func() uint32 + type SeriesIDSetIterator interface + SeriesIDSet func() *SeriesIDSet + func NewSeriesIDSetIterator(ss *SeriesIDSet) SeriesIDSetIterator + func NewSeriesIDSetIterators(itrs []SeriesIDIterator) []SeriesIDSetIterator + type SeriesIDSets interface + ForEach func(f func(ids *SeriesIDSet)) error + type SeriesIDSliceIterator struct + func NewSeriesIDSliceIterator(ids []uint64) *SeriesIDSliceIterator + func (itr *SeriesIDSliceIterator) Close() error + func (itr *SeriesIDSliceIterator) Next() (SeriesIDElem, error) + func (itr *SeriesIDSliceIterator) SeriesIDSet() *SeriesIDSet + type SeriesIndex struct + func NewSeriesIndex(path string) *SeriesIndex + func (idx *SeriesIndex) Clone() *SeriesIndex + func (idx *SeriesIndex) Close() (err error) + func (idx *SeriesIndex) Count() uint64 + func (idx *SeriesIndex) Delete(id uint64) + func (idx *SeriesIndex) FindIDByNameTags(segments []*SeriesSegment, name []byte, tags models.Tags, buf []byte) uint64 + func (idx *SeriesIndex) FindIDBySeriesKey(segments []*SeriesSegment, key []byte) uint64 + func (idx *SeriesIndex) FindIDListByNameTags(segments []*SeriesSegment, names [][]byte, tagsSlice []models.Tags, buf []byte) (ids []uint64, ok bool) + func (idx *SeriesIndex) FindOffsetByID(id uint64) int64 + func (idx *SeriesIndex) InMemCount() uint64 + func (idx *SeriesIndex) Insert(key []byte, id uint64, offset int64) + func (idx *SeriesIndex) IsDeleted(id uint64) bool + func (idx *SeriesIndex) OnDiskCount() uint64 + func (idx *SeriesIndex) Open() (err error) + func (idx *SeriesIndex) Recover(segments []*SeriesSegment) error + type SeriesIndexHeader struct + Capacity int64 + Count uint64 + IDOffsetMap struct{ ... } + KeyIDMap struct{ ... } + MaxOffset int64 + MaxSeriesID uint64 + Version uint8 + func NewSeriesIndexHeader() SeriesIndexHeader + func ReadSeriesIndexHeader(data []byte) (hdr SeriesIndexHeader, err error) + func (hdr *SeriesIndexHeader) WriteTo(w io.Writer) (n int64, err error) + type SeriesIterator interface + Close func() error + Next func() (SeriesElem, error) + func NewSeriesIteratorAdapter(sfile *SeriesFile, itr SeriesIDIterator) SeriesIterator + type SeriesPartition struct + CompactThreshold int + Logger *zap.Logger + func NewSeriesPartition(id int, path string) *SeriesPartition + func (p *SeriesPartition) AppendSeriesIDs(a []uint64) []uint64 + func (p *SeriesPartition) Close() (err error) + func (p *SeriesPartition) Compacting() bool + func (p *SeriesPartition) CreateSeriesListIfNotExists(keys [][]byte, keyPartitionIDs []int, ids []uint64) error + func (p *SeriesPartition) DeleteSeriesID(id uint64) error + func (p *SeriesPartition) DisableCompactions() + func (p *SeriesPartition) EnableCompactions() + func (p *SeriesPartition) FindIDBySeriesKey(key []byte) uint64 + func (p *SeriesPartition) ID() int + func (p *SeriesPartition) IndexPath() string + func (p *SeriesPartition) IsDeleted(id uint64) bool + func (p *SeriesPartition) Open() error + func (p *SeriesPartition) Path() string + func (p *SeriesPartition) Series(id uint64) ([]byte, models.Tags) + func (p *SeriesPartition) SeriesCount() uint64 + func (p *SeriesPartition) SeriesKey(id uint64) []byte + type SeriesPartitionCompactor struct + func NewSeriesPartitionCompactor() *SeriesPartitionCompactor + func (c *SeriesPartitionCompactor) Compact(p *SeriesPartition) error + type SeriesSegment struct + func CloneSeriesSegments(a []*SeriesSegment) []*SeriesSegment + func CreateSeriesSegment(id uint16, path string) (*SeriesSegment, error) + func FindSegment(a []*SeriesSegment, id uint16) *SeriesSegment + func NewSeriesSegment(id uint16, path string) *SeriesSegment + func (s *SeriesSegment) AppendSeriesIDs(a []uint64) []uint64 + func (s *SeriesSegment) CanWrite(data []byte) bool + func (s *SeriesSegment) Clone() *SeriesSegment + func (s *SeriesSegment) Close() (err error) + func (s *SeriesSegment) CloseForWrite() (err error) + func (s *SeriesSegment) Data() []byte + func (s *SeriesSegment) Flush() error + func (s *SeriesSegment) ForEachEntry(fn func(flag uint8, id uint64, offset int64, key []byte) error) error + func (s *SeriesSegment) ID() uint16 + func (s *SeriesSegment) InitForWrite() (err error) + func (s *SeriesSegment) MaxSeriesID() uint64 + func (s *SeriesSegment) Open() error + func (s *SeriesSegment) Size() int64 + func (s *SeriesSegment) Slice(pos uint32) []byte + func (s *SeriesSegment) WriteLogEntry(data []byte) (offset int64, err error) + type SeriesSegmentHeader struct + Version uint8 + func NewSeriesSegmentHeader() SeriesSegmentHeader + func ReadSeriesSegmentHeader(data []byte) (hdr SeriesSegmentHeader, err error) + func (hdr *SeriesSegmentHeader) WriteTo(w io.Writer) (n int64, err error) + type Shard struct + CompactionDisabled bool + EnableOnOpen bool + func NewShard(id uint64, path string, walPath string, sfile *SeriesFile, opt EngineOptions) *Shard + func (s *Shard) Backup(w io.Writer, basePath string, since time.Time) error + func (s *Shard) Close() error + func (s *Shard) CreateCursorIterator(ctx context.Context) (CursorIterator, error) + func (s *Shard) CreateIterator(ctx context.Context, m *influxql.Measurement, opt query.IteratorOptions) (query.Iterator, error) + func (s *Shard) CreateSeriesCursor(ctx context.Context, req SeriesCursorRequest, cond influxql.Expr) (SeriesCursor, error) + func (s *Shard) CreateSnapshot() (string, error) + func (s *Shard) Database() string + func (s *Shard) DeleteMeasurement(name []byte) error + func (s *Shard) DeleteSeriesRange(itr SeriesIterator, min, max int64) error + func (s *Shard) DeleteSeriesRangeWithPredicate(itr SeriesIterator, ...) error + func (s *Shard) Digest() (io.ReadCloser, int64, error) + func (s *Shard) DiskSize() (int64, error) + func (s *Shard) Engine() (Engine, error) + func (s *Shard) Export(w io.Writer, basePath string, start time.Time, end time.Time) error + func (s *Shard) FieldDimensions(measurements []string) (fields map[string]influxql.DataType, dimensions map[string]struct{}, err error) + func (s *Shard) ForEachMeasurementName(fn func(name []byte) error) error + func (s *Shard) Free() error + func (s *Shard) ID() uint64 + func (s *Shard) Import(r io.Reader, basePath string) error + func (s *Shard) Index() (Index, error) + func (s *Shard) IndexType() string + func (s *Shard) IsIdle() bool + func (s *Shard) LastModified() time.Time + func (s *Shard) MeasurementExists(name []byte) (bool, error) + func (s *Shard) MeasurementFields(name []byte) *MeasurementFields + func (s *Shard) MeasurementNamesByRegex(re *regexp.Regexp) ([][]byte, error) + func (s *Shard) MeasurementTagKeyValuesByExpr(auth query.Authorizer, name []byte, key []string, expr influxql.Expr, ...) ([][]string, error) + func (s *Shard) MeasurementTagKeysByExpr(name []byte, expr influxql.Expr) (map[string]struct{}, error) + func (s *Shard) MeasurementsSketches() (estimator.Sketch, estimator.Sketch, error) + func (s *Shard) Open() error + func (s *Shard) Path() string + func (s *Shard) Restore(r io.Reader, basePath string) error + func (s *Shard) RetentionPolicy() string + func (s *Shard) ScheduleFullCompaction() error + func (s *Shard) SeriesFile() (*SeriesFile, error) + func (s *Shard) SeriesN() int64 + func (s *Shard) SeriesSketches() (estimator.Sketch, estimator.Sketch, error) + func (s *Shard) SetCompactionsEnabled(enabled bool) + func (s *Shard) SetEnabled(enabled bool) + func (s *Shard) Statistics(tags map[string]string) []models.Statistic + func (s *Shard) TagKeyCardinality(name, key []byte) int + func (s *Shard) WithLogger(log *zap.Logger) + func (s *Shard) WritePoints(points []models.Point) error + func (s *Shard) WriteTo(w io.Writer) (int64, error) + type ShardError struct + Err error + func (e ShardError) Error() string + type ShardGroup interface + CreateIterator func(ctx context.Context, measurement *influxql.Measurement, ...) (query.Iterator, error) + ExpandSources func(sources influxql.Sources) (influxql.Sources, error) + FieldDimensions func(measurements []string) (fields map[string]influxql.DataType, dimensions map[string]struct{}, err error) + FieldKeysByMeasurement func(name []byte) []string + IteratorCost func(measurement string, opt query.IteratorOptions) (query.IteratorCost, error) + MapType func(measurement, field string) influxql.DataType + MeasurementsByRegex func(re *regexp.Regexp) []string + type ShardStatistics struct + BytesWritten int64 + DiskBytes int64 + FieldsCreated int64 + WritePointsDropped int64 + WritePointsErr int64 + WritePointsOK int64 + WriteReq int64 + WriteReqErr int64 + WriteReqOK int64 + type Shards []*Shard + func (a Shards) CallType(name string, args []influxql.DataType) (influxql.DataType, error) + func (a Shards) CreateIterator(ctx context.Context, measurement *influxql.Measurement, ...) (query.Iterator, error) + func (a Shards) CreateSeriesCursor(ctx context.Context, req SeriesCursorRequest, cond influxql.Expr) (_ SeriesCursor, err error) + func (a Shards) ExpandSources(sources influxql.Sources) (influxql.Sources, error) + func (a Shards) FieldDimensions(measurements []string) (fields map[string]influxql.DataType, dimensions map[string]struct{}, err error) + func (a Shards) FieldKeysByMeasurement(name []byte) []string + func (a Shards) IteratorCost(measurement string, opt query.IteratorOptions) (query.IteratorCost, error) + func (a Shards) Len() int + func (a Shards) Less(i, j int) bool + func (a Shards) MapType(measurement, field string) influxql.DataType + func (a Shards) MeasurementsByRegex(re *regexp.Regexp) []string + func (a Shards) Swap(i, j int) + type Store struct + EngineOptions EngineOptions + Logger *zap.Logger + SeriesFileMaxSize int64 + func NewStore(path string) *Store + func (s *Store) BackupShard(id uint64, since time.Time, w io.Writer) error + func (s *Store) Close() error + func (s *Store) CreateShard(database, retentionPolicy string, shardID uint64, enabled bool) error + func (s *Store) CreateShardSnapshot(id uint64) (string, error) + func (s *Store) Databases() []string + func (s *Store) DeleteDatabase(name string) error + func (s *Store) DeleteMeasurement(database, name string) error + func (s *Store) DeleteRetentionPolicy(database, name string) error + func (s *Store) DeleteSeries(database string, sources []influxql.Source, condition influxql.Expr) error + func (s *Store) DeleteShard(shardID uint64) error + func (s *Store) DiskSize() (int64, error) + func (s *Store) ExpandSources(sources influxql.Sources) (influxql.Sources, error) + func (s *Store) ExportShard(id uint64, start time.Time, end time.Time, w io.Writer) error + func (s *Store) ImportShard(id uint64, r io.Reader) error + func (s *Store) IndexBytes() int + func (s *Store) MeasurementNames(auth query.Authorizer, database string, cond influxql.Expr) ([][]byte, error) + func (s *Store) MeasurementSeriesCounts(database string) (measuments int, series int) + func (s *Store) MeasurementsCardinality(database string) (int64, error) + func (s *Store) MeasurementsSketches(database string) (estimator.Sketch, estimator.Sketch, error) + func (s *Store) Open() error + func (s *Store) Path() string + func (s *Store) RestoreShard(id uint64, r io.Reader) error + func (s *Store) SeriesCardinality(database string) (int64, error) + func (s *Store) SeriesSketches(database string) (estimator.Sketch, estimator.Sketch, error) + func (s *Store) SetShardEnabled(shardID uint64, enabled bool) error + func (s *Store) Shard(id uint64) *Shard + func (s *Store) ShardDigest(id uint64) (io.ReadCloser, int64, error) + func (s *Store) ShardGroup(ids []uint64) ShardGroup + func (s *Store) ShardIDs() []uint64 + func (s *Store) ShardN() int + func (s *Store) ShardRelativePath(id uint64) (string, error) + func (s *Store) Shards(ids []uint64) []*Shard + func (s *Store) Statistics(tags map[string]string) []models.Statistic + func (s *Store) TagKeys(auth query.Authorizer, shardIDs []uint64, cond influxql.Expr) ([]TagKeys, error) + func (s *Store) TagValues(auth query.Authorizer, shardIDs []uint64, cond influxql.Expr) ([]TagValues, error) + func (s *Store) WithLogger(log *zap.Logger) + func (s *Store) WriteToShard(shardID uint64, points []models.Point) error + type StringArray = cursors.StringArray + func NewStringArrayLen(sz int) *StringArray + type StringArrayCursor = cursors.StringArrayCursor + type TagKeyIterator interface + Close func() error + Next func() ([]byte, error) + func MergeTagKeyIterators(itrs ...TagKeyIterator) TagKeyIterator + type TagKeyIterators []TagKeyIterator + func (a TagKeyIterators) Close() (err error) + type TagKeys struct + Keys []string + Measurement string + type TagKeysSlice []TagKeys + func (a TagKeysSlice) Len() int + func (a TagKeysSlice) Less(i, j int) bool + func (a TagKeysSlice) Swap(i, j int) + type TagValueIterator interface + Close func() error + Next func() ([]byte, error) + func MergeTagValueIterators(itrs ...TagValueIterator) TagValueIterator + type TagValueIterators []TagValueIterator + func (a TagValueIterators) Close() (err error) + type TagValues struct + Measurement string + Values []KeyValue + type TagValuesSlice []TagValues + func (a TagValuesSlice) Len() int + func (a TagValuesSlice) Less(i, j int) bool + func (a TagValuesSlice) Swap(i, j int) + type UnsignedArray = cursors.UnsignedArray + func NewUnsignedArrayLen(sz int) *UnsignedArray + type UnsignedArrayCursor = cursors.UnsignedArrayCursor