Documentation ¶
Overview ¶
An LRU cached aimed at high concurrency
An LRU cached aimed at high concurrency
Index ¶
- Variables
- type Cache
- func (c *Cache) Clear()
- func (c *Cache) Delete(key string) bool
- func (c *Cache) DeleteFunc(matches func(key string, item *Item) bool) int
- func (c *Cache) DeletePrefix(prefix string) int
- func (c *Cache) Fetch(key string, duration time.Duration, fetch func() (interface{}, error)) (*Item, error)
- func (c *Cache) ForEachFunc(matches func(key string, item *Item) bool)
- func (c *Cache) Get(key string) *Item
- func (c *Cache) GetDropped() int
- func (c *Cache) ItemCount() int
- func (c *Cache) Replace(key string, value interface{}) bool
- func (c *Cache) Set(key string, value interface{}, duration time.Duration)
- func (c *Cache) SetMaxSize(size int64)
- func (c *Cache) Stop()
- func (c *Cache) TrackingGet(key string) TrackedItem
- func (c *Cache) TrackingSet(key string, value interface{}, duration time.Duration) TrackedItem
- type Configuration
- func (c *Configuration) Buckets(count uint32) *Configuration
- func (c *Configuration) DeleteBuffer(size uint32) *Configuration
- func (c *Configuration) GetsPerPromote(count int32) *Configuration
- func (c *Configuration) ItemsToPrune(count uint32) *Configuration
- func (c *Configuration) MaxSize(max int64) *Configuration
- func (c *Configuration) OnDelete(callback func(item *Item)) *Configuration
- func (c *Configuration) PromoteBuffer(size uint32) *Configuration
- func (c *Configuration) Track() *Configuration
- type Item
- type LayeredCache
- func (c *LayeredCache) Clear()
- func (c *LayeredCache) Delete(primary, secondary string) bool
- func (c *LayeredCache) DeleteAll(primary string) bool
- func (c *LayeredCache) DeleteFunc(primary string, matches func(key string, item *Item) bool) int
- func (c *LayeredCache) DeletePrefix(primary, prefix string) int
- func (c *LayeredCache) Fetch(primary, secondary string, duration time.Duration, ...) (*Item, error)
- func (c *LayeredCache) ForEachFunc(primary string, matches func(key string, item *Item) bool)
- func (c *LayeredCache) Get(primary, secondary string) *Item
- func (c *LayeredCache) GetDropped() int
- func (c *LayeredCache) GetOrCreateSecondaryCache(primary string) *SecondaryCache
- func (c *LayeredCache) ItemCount() int
- func (c *LayeredCache) Replace(primary, secondary string, value interface{}) bool
- func (c *LayeredCache) Set(primary, secondary string, value interface{}, duration time.Duration)
- func (c *LayeredCache) SetMaxSize(size int64)
- func (c *LayeredCache) Stop()
- func (c *LayeredCache) TrackingGet(primary, secondary string) TrackedItem
- func (c *LayeredCache) TrackingSet(primary, secondary string, value interface{}, duration time.Duration) TrackedItem
- type SecondaryCache
- func (s *SecondaryCache) Delete(secondary string) bool
- func (s *SecondaryCache) Fetch(secondary string, duration time.Duration, fetch func() (interface{}, error)) (*Item, error)
- func (s *SecondaryCache) Get(secondary string) *Item
- func (s *SecondaryCache) Replace(secondary string, value interface{}) bool
- func (s *SecondaryCache) Set(secondary string, value interface{}, duration time.Duration) *Item
- func (c *SecondaryCache) TrackingGet(secondary string) TrackedItem
- type Sized
- type TrackedItem
Constants ¶
This section is empty.
Variables ¶
var NilTracked = new(nilItem)
Functions ¶
This section is empty.
Types ¶
type Cache ¶
type Cache struct { *Configuration // contains filtered or unexported fields }
func New ¶
func New(config *Configuration) *Cache
Create a new cache with the specified configuration See ccache.Configure() for creating a configuration
func (*Cache) Delete ¶
Remove the item from the cache, return true if the item was present, false otherwise.
func (*Cache) DeleteFunc ¶ added in v2.0.7
Deletes all items that the matches func evaluates to true.
func (*Cache) DeletePrefix ¶
func (*Cache) Fetch ¶
func (c *Cache) Fetch(key string, duration time.Duration, fetch func() (interface{}, error)) (*Item, error)
Attempts to get the value from the cache and calles fetch on a miss (missing or stale item). If fetch returns an error, no value is cached and the error is returned back to the caller.
func (*Cache) ForEachFunc ¶ added in v2.0.8
func (*Cache) Get ¶
Get an item from the cache. Returns nil if the item wasn't found. This can return an expired item. Use item.Expired() to see if the item is expired and item.TTL() to see how long until the item expires (which will be negative for an already expired item).
func (*Cache) GetDropped ¶
Gets the number of items removed from the cache due to memory pressure since the last time GetDropped was called
func (*Cache) Replace ¶
Replace the value if it exists, does not set if it doesn't. Returns true if the item existed an was replaced, false otherwise. Replace does not reset item's TTL
func (*Cache) SetMaxSize ¶
Sets a new max size. That can result in a GC being run if the new maxium size is smaller than the cached size
func (*Cache) Stop ¶
func (c *Cache) Stop()
Stops the background worker. Operations performed on the cache after Stop is called are likely to panic
func (*Cache) TrackingGet ¶
func (c *Cache) TrackingGet(key string) TrackedItem
Used when the cache was created with the Track() configuration option. Avoid otherwise
func (*Cache) TrackingSet ¶ added in v2.0.7
func (c *Cache) TrackingSet(key string, value interface{}, duration time.Duration) TrackedItem
Used when the cache was created with the Track() configuration option. Sets the item, and returns a tracked reference to it.
type Configuration ¶
type Configuration struct {
// contains filtered or unexported fields
}
func Configure ¶
func Configure() *Configuration
Creates a configuration object with sensible defaults Use this as the start of the fluent configuration: e.g.: ccache.New(ccache.Configure().MaxSize(10000))
func (*Configuration) Buckets ¶
func (c *Configuration) Buckets(count uint32) *Configuration
Keys are hashed into % bucket count to provide greater concurrency (every set requires a write lock on the bucket). Must be a power of 2 (1, 2, 4, 8, 16, ...) [16]
func (*Configuration) DeleteBuffer ¶
func (c *Configuration) DeleteBuffer(size uint32) *Configuration
The size of the queue for items which should be deleted. If the queue fills up, calls to Delete() will block
func (*Configuration) GetsPerPromote ¶
func (c *Configuration) GetsPerPromote(count int32) *Configuration
Give a large cache with a high read / write ratio, it's usually unnecessary to promote an item on every Get. GetsPerPromote specifies the number of Gets a key must have before being promoted [3]
func (*Configuration) ItemsToPrune ¶
func (c *Configuration) ItemsToPrune(count uint32) *Configuration
The number of items to prune when memory is low [500]
func (*Configuration) MaxSize ¶
func (c *Configuration) MaxSize(max int64) *Configuration
The max size for the cache [5000]
func (*Configuration) OnDelete ¶
func (c *Configuration) OnDelete(callback func(item *Item)) *Configuration
OnDelete allows setting a callback function to react to ideam deletion. This typically allows to do a cleanup of resources, such as calling a Close() on cached object that require some kind of tear-down.
func (*Configuration) PromoteBuffer ¶
func (c *Configuration) PromoteBuffer(size uint32) *Configuration
The size of the queue for items which should be promoted. If the queue fills up, promotions are skipped [1024]
func (*Configuration) Track ¶
func (c *Configuration) Track() *Configuration
By turning tracking on and using the cache's TrackingGet, the cache won't evict items which you haven't called Release() on. It's a simple reference counter.
type LayeredCache ¶
type LayeredCache struct { *Configuration // contains filtered or unexported fields }
func Layered ¶
func Layered(config *Configuration) *LayeredCache
See ccache.Configure() for creating a configuration
func (*LayeredCache) Delete ¶
func (c *LayeredCache) Delete(primary, secondary string) bool
Remove the item from the cache, return true if the item was present, false otherwise.
func (*LayeredCache) DeleteAll ¶
func (c *LayeredCache) DeleteAll(primary string) bool
Deletes all items that share the same primary key
func (*LayeredCache) DeleteFunc ¶ added in v2.0.7
Deletes all items that share the same primary key and where the matches func evaluates to true.
func (*LayeredCache) DeletePrefix ¶ added in v2.0.7
func (c *LayeredCache) DeletePrefix(primary, prefix string) int
Deletes all items that share the same primary key and prefix.
func (*LayeredCache) Fetch ¶
func (c *LayeredCache) Fetch(primary, secondary string, duration time.Duration, fetch func() (interface{}, error)) (*Item, error)
Attempts to get the value from the cache and calles fetch on a miss. If fetch returns an error, no value is cached and the error is returned back to the caller.
func (*LayeredCache) ForEachFunc ¶ added in v2.0.8
func (c *LayeredCache) ForEachFunc(primary string, matches func(key string, item *Item) bool)
func (*LayeredCache) Get ¶
func (c *LayeredCache) Get(primary, secondary string) *Item
Get an item from the cache. Returns nil if the item wasn't found. This can return an expired item. Use item.Expired() to see if the item is expired and item.TTL() to see how long until the item expires (which will be negative for an already expired item).
func (*LayeredCache) GetDropped ¶
func (c *LayeredCache) GetDropped() int
Gets the number of items removed from the cache due to memory pressure since the last time GetDropped was called
func (*LayeredCache) GetOrCreateSecondaryCache ¶
func (c *LayeredCache) GetOrCreateSecondaryCache(primary string) *SecondaryCache
Get the secondary cache for a given primary key. This operation will never return nil. In the case where the primary key does not exist, a new, underlying, empty bucket will be created and returned.
func (*LayeredCache) ItemCount ¶
func (c *LayeredCache) ItemCount() int
func (*LayeredCache) Replace ¶
func (c *LayeredCache) Replace(primary, secondary string, value interface{}) bool
Replace the value if it exists, does not set if it doesn't. Returns true if the item existed an was replaced, false otherwise. Replace does not reset item's TTL nor does it alter its position in the LRU
func (*LayeredCache) Set ¶
func (c *LayeredCache) Set(primary, secondary string, value interface{}, duration time.Duration)
Set the value in the cache for the specified duration
func (*LayeredCache) SetMaxSize ¶
func (c *LayeredCache) SetMaxSize(size int64)
Sets a new max size. That can result in a GC being run if the new maxium size is smaller than the cached size
func (*LayeredCache) Stop ¶
func (c *LayeredCache) Stop()
func (*LayeredCache) TrackingGet ¶
func (c *LayeredCache) TrackingGet(primary, secondary string) TrackedItem
Used when the cache was created with the Track() configuration option. Avoid otherwise
func (*LayeredCache) TrackingSet ¶ added in v2.0.7
func (c *LayeredCache) TrackingSet(primary, secondary string, value interface{}, duration time.Duration) TrackedItem
Set the value in the cache for the specified duration
type SecondaryCache ¶
type SecondaryCache struct {
// contains filtered or unexported fields
}
func (*SecondaryCache) Delete ¶
func (s *SecondaryCache) Delete(secondary string) bool
Delete a secondary key. The semantics are the same as for LayeredCache.Delete
func (*SecondaryCache) Fetch ¶
func (s *SecondaryCache) Fetch(secondary string, duration time.Duration, fetch func() (interface{}, error)) (*Item, error)
Fetch or set a secondary key. The semantics are the same as for LayeredCache.Fetch
func (*SecondaryCache) Get ¶
func (s *SecondaryCache) Get(secondary string) *Item
Get the secondary key. The semantics are the same as for LayeredCache.Get
func (*SecondaryCache) Replace ¶
func (s *SecondaryCache) Replace(secondary string, value interface{}) bool
Replace a secondary key. The semantics are the same as for LayeredCache.Replace
func (*SecondaryCache) Set ¶
func (s *SecondaryCache) Set(secondary string, value interface{}, duration time.Duration) *Item
Set the secondary key to a value. The semantics are the same as for LayeredCache.Set
func (*SecondaryCache) TrackingGet ¶
func (c *SecondaryCache) TrackingGet(secondary string) TrackedItem
Track a secondary key. The semantics are the same as for LayeredCache.TrackingGet