Documentation ¶
Overview ¶
An LRU cached aimed at high concurrency
An LRU cached aimed at high concurrency
Index ¶
- Variables
- type Cache
- func (c *Cache) Clear()
- func (c *Cache) Delete(key string) bool
- func (c *Cache) Fetch(key string, duration time.Duration, fetch func() (interface{}, error)) (*Item, error)
- func (c *Cache) Get(key string) *Item
- func (c *Cache) ItemCount() int
- func (c *Cache) Replace(key string, value interface{}) bool
- func (c *Cache) Set(key string, value interface{}, duration time.Duration)
- func (c *Cache) Stop()
- func (c *Cache) TrackingGet(key string) TrackedItem
- type Configuration
- func (c *Configuration) Buckets(count uint32) *Configuration
- func (c *Configuration) DeleteBuffer(size uint32) *Configuration
- func (c *Configuration) GetsPerPromote(count int32) *Configuration
- func (c *Configuration) ItemsToPrune(count uint32) *Configuration
- func (c *Configuration) MaxSize(max int64) *Configuration
- func (c *Configuration) OnDelete(callback func(item *Item)) *Configuration
- func (c *Configuration) PromoteBuffer(size uint32) *Configuration
- func (c *Configuration) Track() *Configuration
- type Item
- type LayeredCache
- func (c *LayeredCache) Clear()
- func (c *LayeredCache) Delete(primary, secondary string) bool
- func (c *LayeredCache) DeleteAll(primary string) bool
- func (c *LayeredCache) Fetch(primary, secondary string, duration time.Duration, ...) (*Item, error)
- func (c *LayeredCache) Get(primary, secondary string) *Item
- func (c *LayeredCache) GetOrCreateSecondaryCache(primary string) *SecondaryCache
- func (c *LayeredCache) ItemCount() int
- func (c *LayeredCache) Replace(primary, secondary string, value interface{}) bool
- func (c *LayeredCache) Set(primary, secondary string, value interface{}, duration time.Duration)
- func (c *LayeredCache) Stop()
- func (c *LayeredCache) TrackingGet(primary, secondary string) TrackedItem
- type SecondaryCache
- func (s *SecondaryCache) Delete(secondary string) bool
- func (s *SecondaryCache) Fetch(secondary string, duration time.Duration, fetch func() (interface{}, error)) (*Item, error)
- func (s *SecondaryCache) Get(secondary string) *Item
- func (s *SecondaryCache) Replace(secondary string, value interface{}) bool
- func (s *SecondaryCache) Set(secondary string, value interface{}, duration time.Duration) *Item
- func (c *SecondaryCache) TrackingGet(secondary string) TrackedItem
- type Sized
- type TrackedItem
Constants ¶
This section is empty.
Variables ¶
var NilTracked = new(nilItem)
Functions ¶
This section is empty.
Types ¶
type Cache ¶
type Cache struct { *Configuration // contains filtered or unexported fields }
func New ¶
func New(config *Configuration) *Cache
Create a new cache with the specified configuration See ccache.Configure() for creating a configuration
func (*Cache) Clear ¶
func (c *Cache) Clear()
this isn't thread safe. It's meant to be called from non-concurrent tests
func (*Cache) Delete ¶
Remove the item from the cache, return true if the item was present, false otherwise.
func (*Cache) Fetch ¶
func (c *Cache) Fetch(key string, duration time.Duration, fetch func() (interface{}, error)) (*Item, error)
Attempts to get the value from the cache and calles fetch on a miss (missing or stale item). If fetch returns an error, no value is cached and the error is returned back to the caller.
func (*Cache) Get ¶
Get an item from the cache. Returns nil if the item wasn't found. This can return an expired item. Use item.Expired() to see if the item is expired and item.TTL() to see how long until the item expires (which will be negative for an already expired item).
func (*Cache) Replace ¶
Replace the value if it exists, does not set if it doesn't. Returns true if the item existed an was replaced, false otherwise. Replace does not reset item's TTL
func (*Cache) Stop ¶
func (c *Cache) Stop()
Stops the background worker. Operations performed on the cache after Stop is called are likely to panic
func (*Cache) TrackingGet ¶
func (c *Cache) TrackingGet(key string) TrackedItem
Used when the cache was created with the Track() configuration option. Avoid otherwise
type Configuration ¶
type Configuration struct {
// contains filtered or unexported fields
}
func Configure ¶
func Configure() *Configuration
Creates a configuration object with sensible defaults Use this as the start of the fluent configuration: e.g.: ccache.New(ccache.Configure().MaxSize(10000))
func (*Configuration) Buckets ¶
func (c *Configuration) Buckets(count uint32) *Configuration
Keys are hashed into % bucket count to provide greater concurrency (every set requires a write lock on the bucket). Must be a power of 2 (1, 2, 4, 8, 16, ...) [16]
func (*Configuration) DeleteBuffer ¶
func (c *Configuration) DeleteBuffer(size uint32) *Configuration
The size of the queue for items which should be deleted. If the queue fills up, calls to Delete() will block
func (*Configuration) GetsPerPromote ¶
func (c *Configuration) GetsPerPromote(count int32) *Configuration
Give a large cache with a high read / write ratio, it's usually unnecessary to promote an item on every Get. GetsPerPromote specifies the number of Gets a key must have before being promoted [3]
func (*Configuration) ItemsToPrune ¶
func (c *Configuration) ItemsToPrune(count uint32) *Configuration
The number of items to prune when memory is low [500]
func (*Configuration) MaxSize ¶
func (c *Configuration) MaxSize(max int64) *Configuration
The max size for the cache [5000]
func (*Configuration) OnDelete ¶
func (c *Configuration) OnDelete(callback func(item *Item)) *Configuration
OnDelete allows setting a callback function to react to ideam deletion. This typically allows to do a cleanup of resources, such as calling a Close() on cached object that require some kind of tear-down.
func (*Configuration) PromoteBuffer ¶
func (c *Configuration) PromoteBuffer(size uint32) *Configuration
The size of the queue for items which should be promoted. If the queue fills up, promotions are skipped [1024]
func (*Configuration) Track ¶
func (c *Configuration) Track() *Configuration
By turning tracking on and using the cache's TrackingGet, the cache won't evict items which you haven't called Release() on. It's a simple reference counter.
type LayeredCache ¶
type LayeredCache struct { *Configuration // contains filtered or unexported fields }
func Layered ¶
func Layered(config *Configuration) *LayeredCache
See ccache.Configure() for creating a configuration
func (*LayeredCache) Clear ¶
func (c *LayeredCache) Clear()
this isn't thread safe. It's meant to be called from non-concurrent tests
func (*LayeredCache) Delete ¶
func (c *LayeredCache) Delete(primary, secondary string) bool
Remove the item from the cache, return true if the item was present, false otherwise.
func (*LayeredCache) DeleteAll ¶
func (c *LayeredCache) DeleteAll(primary string) bool
Deletes all items that share the same primary key
func (*LayeredCache) Fetch ¶
func (c *LayeredCache) Fetch(primary, secondary string, duration time.Duration, fetch func() (interface{}, error)) (*Item, error)
Attempts to get the value from the cache and calles fetch on a miss. If fetch returns an error, no value is cached and the error is returned back to the caller.
func (*LayeredCache) Get ¶
func (c *LayeredCache) Get(primary, secondary string) *Item
Get an item from the cache. Returns nil if the item wasn't found. This can return an expired item. Use item.Expired() to see if the item is expired and item.TTL() to see how long until the item expires (which will be negative for an already expired item).
func (*LayeredCache) GetOrCreateSecondaryCache ¶
func (c *LayeredCache) GetOrCreateSecondaryCache(primary string) *SecondaryCache
Get the secondary cache for a given primary key. This operation will never return nil. In the case where the primary key does not exist, a new, underlying, empty bucket will be created and returned.
func (*LayeredCache) ItemCount ¶
func (c *LayeredCache) ItemCount() int
func (*LayeredCache) Replace ¶
func (c *LayeredCache) Replace(primary, secondary string, value interface{}) bool
Replace the value if it exists, does not set if it doesn't. Returns true if the item existed an was replaced, false otherwise. Replace does not reset item's TTL nor does it alter its position in the LRU
func (*LayeredCache) Set ¶
func (c *LayeredCache) Set(primary, secondary string, value interface{}, duration time.Duration)
Set the value in the cache for the specified duration
func (*LayeredCache) Stop ¶
func (c *LayeredCache) Stop()
func (*LayeredCache) TrackingGet ¶
func (c *LayeredCache) TrackingGet(primary, secondary string) TrackedItem
Used when the cache was created with the Track() configuration option. Avoid otherwise
type SecondaryCache ¶
type SecondaryCache struct {
// contains filtered or unexported fields
}
func (*SecondaryCache) Delete ¶
func (s *SecondaryCache) Delete(secondary string) bool
Delete a secondary key. The semantics are the same as for LayeredCache.Delete
func (*SecondaryCache) Fetch ¶
func (s *SecondaryCache) Fetch(secondary string, duration time.Duration, fetch func() (interface{}, error)) (*Item, error)
Fetch or set a secondary key. The semantics are the same as for LayeredCache.Fetch
func (*SecondaryCache) Get ¶
func (s *SecondaryCache) Get(secondary string) *Item
Get the secondary key. The semantics are the same as for LayeredCache.Get
func (*SecondaryCache) Replace ¶
func (s *SecondaryCache) Replace(secondary string, value interface{}) bool
Replace a secondary key. The semantics are the same as for LayeredCache.Replace
func (*SecondaryCache) Set ¶
func (s *SecondaryCache) Set(secondary string, value interface{}, duration time.Duration) *Item
Set the secondary key to a value. The semantics are the same as for LayeredCache.Set
func (*SecondaryCache) TrackingGet ¶
func (c *SecondaryCache) TrackingGet(secondary string) TrackedItem
Track a secondary key. The semantics are the same as for LayeredCache.TrackingGet