hot

package module
v0.4.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 20, 2024 License: MIT Imports: 14 Imported by: 0

README

HOT - In-memory caching

tag Go Version GoDoc Build Status Go report Coverage Contributors License

HOT stands for Hot Object Tracker.

A feature-complete and blazing-fast caching library for Go.

💡 Features

  • 🚀 Fast, concurrent
  • 💫 Generics
  • 🗑️ Eviction policies: LRU, LFU, 2Q
  • ⏰ TTL with jitter
  • 🔄 Stale while revalidation
  • ❌ Missing key caching
  • 🍕 Sharded cache
  • 🔒 Optional locking
  • 🔗 Chain of data loaders with in-flight deduplication
  • 🌶️ Cache warmup
  • 📦 Batching all the way
  • 🧩 Composable caching strategy
  • 📝 Optional copy on read and/or write
  • 📊 Stat collection

🚀 Install

go get github.com/samber/hot

This library is v0 and follows SemVer strictly.

Some breaking changes might be made to exported APIs before v1.0.0.

🤠 Getting started

GoDoc: https://godoc.org/github.com/samber/hot

Simple LRU cache
import "github.com/samber/hot"

// Available eviction policies: hot.LRU, hot.LFU, hot.TwoQueue, hot.ARC
// Capacity: 100k keys/values
cache := hot.NewHotCache[string, int](hot.LRU, 100_000).
    Build()

cache.Set("hello", 42)
cache.SetMany(map[string]int{"foo": 1, "bar": 2})

values, missing := cache.GetMany([]string{"bar", "baz", "hello"})
// values: {"bar": 2, "hello": 42}
// missing: ["baz"]

value, found, _ := cache.Get("foo")
// value: 1
// found: true
Cache with remote data source

If a value is not available in the in-memory cache, it will be fetched from a database or any data source.

Concurrent calls to loaders are deduplicated by key.

import "github.com/samber/hot"

cache := hot.NewHotCache[string, *User](hot.LRU, 100_000).
    WithLoaders(func(keys []string) (found map[string]*User, err error) {
        rows, err := db.Query("SELECT * FROM users WHERE id IN (?)", keys)
        // ...
        return users, err
    }).
    Build()

user, found, err := cache.Get("user-123")
// might fail if "user-123" is not in cache and loader returns error

// get or create
user, found, err := cache.GetWithLoaders(
    "user-123",
    func(keys []string) (found map[string]*User, err error) {
        rows, err := db.Query("SELECT * FROM users WHERE id IN (?)", keys)
        // ...
        return users, err
    },
    func(keys []string) (found map[string]*User, err error) {
        rows, err := db.Query("INSERT INTO users (id, email) VALUES (?, ?)", id, email)
        // ...
        return users, err
    },
)
// either `err` is not nil, or `found` is true
Cache with expiration
import "github.com/samber/hot"

cache := hot.NewHotCache[string, int](hot.LRU, 100_000).
    WithTTL(1 * time.Minute).     // items will expire after 1 minute
    WithJitter(0.2).              // optional: a 20% variation in cache expiration duration (48s to 72s)
    WithJanitor(1 * time.Minute). // optional: background job will purge expired keys every minutes
    Build()

cache.SetWithTTL("foo", 42, 10*time.Second) // shorter TTL for "foo" key

With cache revalidation:

loader := func(keys []string) (found map[string]*User, err error) {
    rows, err := db.Query("SELECT * FROM users WHERE id IN (?)", keys)
    // ...
    return users, err
}

cache := hot.NewHotCache[string, *User](hot.LRU, 100_000).
    WithTTL(1 * time.Minute).
    // Keep delivering cache 5 more second, but refresh value in background.
    // Keys that are not fetched during the interval will be dropped anyway.
    // A timeout or error in loader will drop keys.
    WithRevalidation(5 * time.Second, loader).
    // On revalidation error, the cache entries are either kept or dropped.
    // Optional (default: drop)
    WithRevalidationErrorPolicy(hot.KeepOnError).
    Build()

If WithRevalidation is used without loaders, the one provided in WithRevalidation() or GetWithLoaders() is used.

🍱 Spec

hot.NewHotCache[K, V](algorithm hot.EvictionAlgorithm, capacity int).
    // Enables cache of missing keys. The missing cache is shared with the main cache.
    WithMissingSharedCache().
    // Enables cache of missing keys. The missing keys are stored in a separate cache.
    WithMissingCache(algorithm hot.EvictionAlgorithm, capacity int).
    // Sets the time-to-live for cache entries
    WithTTL(ttl time.Duration).
    // Sets the time after which the cache entry is considered stale and needs to be revalidated
    // * keys that are not fetched during the interval will be dropped anyway
    // * a timeout or error in loader will drop keys.
    // If no revalidation loader is added, the default loaders or the one used in GetWithLoaders() are used.
    WithRevalidation(stale time.Duration, loaders ...hot.Loader[K, V]).
    // Sets the policy to apply when a revalidation loader returns an error.
    // By default, the key is dropped from the cache.
    WithRevalidationErrorPolicy(policy revalidationErrorPolicy).
    // Randomizes the TTL.
    WithJitter(jitter float64).
    // Enables cache sharding.
    WithSharding(nbr uint64, fn sharded.Hasher[K]).
    // Preloads the cache with the provided data.
    WithWarmUp(fn func() (map[K]V, []K, error)).
    // Disables mutex for the cache and improves internal performances.
    WithoutLocking().
    // Enables the cache janitor.
    WithJanitor().
    // Sets the chain of loaders to use for cache misses.
    WithLoaders(loaders ...hot.Loader[K, V]).
    // Sets the callback to be called when an entry is evicted from the cache.
    // The callback is called synchronously and might block the cache operations if it is slow.
    // This implementation choice is subject to change. Please open an issue to discuss.
    WithEvictionCallback(hook func(key K, value V)).
    // Sets the function to copy the value on read.
    WithCopyOnRead(copyOnRead func(V) V).
    // Sets the function to copy the value on write.
    WithCopyOnWrite(copyOnWrite func(V) V).
    // Returns a HotCache[K, V].
    Build()

Available eviction algorithm:

hot.LRU
hot.LFU
hot.TwoQueue
hot.ARC

Loaders:

func loader(keys []K) (found map[K]V, err error) {
    // ...
}

Shard partitioner:

func hash(key K) uint64 {
    // ...
}

🏛️ Architecture

This project has been split into multiple layers to respect the separation of concern.

Each cache layer implements the pkg/base.InMemoryCache[K, V] interface. Combining multiple encapsulation has a small cost (~1ns per call), but offers great customization.

We highly recommend using hot.HotCache[K, V] instead of lower layers.

Eviction policies

This project provides multiple eviction policies. Each implements the pkg/base.InMemoryCache[K, V] interface.

They are not protected against concurrent access. If safety is required, encapsulate it into pkg/safe.SafeInMemoryCache[K comparable, V any].

Packages:

  • pkg/lru
  • pkg/lfu
  • pkg/twoqueue
  • pkg/arc

Example:

cache := lru.NewLRUCache[string, *User](100_000)
Concurrent access

The hot.HotCache[K, V] offers protection against concurrent access by default. But in some cases, unnecessary locking might just slow down a program.

Low-level cache layers are not protected by default. Use the following encapsulation to bring safety:

import (
	"github.com/samber/hot/pkg/lfu"
	"github.com/samber/hot/pkg/safe"
)

cache := safe.NewSafeInMemoryCache(
    lru.NewLRUCache[string, *User](100_000),
)
Sharded cache

A sharded cache might be useful in two scenarios:

  • highly concurrent application slowed down by cache locking -> 1 lock per shard instead of 1 global lock
  • highly parallel application with no concurrency -> no lock

The sharding key must not be too costly to compute and must offer a nice balance between shards. The hashing function must have func(k K) uint64 signature.

A sharded cache can be created via hot.HotCache[K, V] or using a low-level layer:

import (
    "hash/fnv"
    "github.com/samber/hot/pkg/lfu"
    "github.com/samber/hot/pkg/safe"
    "github.com/samber/hot/pkg/sharded"
)

cache := sharded.NewShardedInMemoryCache(
    1_000, // number of shards
    func() base.InMemoryCache[K, *item[V]] {
        return safe.NewSafeInMemoryCache(
            lru.NewLRUCache[string, *User](100_000),
        )
    },
    func(key string) uint64 {
        h := fnv.New64a()
        h.Write([]byte(key))
        return h.Sum64()
    },
)
Missing key caching

Instead of calling the loader chain every time an invalid key is requested, a "missing cache" can be enabled. Note that it won't protect your app against a DDoS attack with high cardinality keys.

If the missing keys are infrequent, sharing the missing cache with the main cache might be reasonable:

import "github.com/samber/hot"

cache := hot.NewHotCache[string, int](hot.LRU, 100_000).
    WithMissingSharedCache().
    Build()

If the missing keys are frequent, use a dedicated cache to prevent pollution of the main cache:

import "github.com/samber/hot"

cache := hot.NewHotCache[string, int](hot.LRU, 100_000).
    WithMissingCache(hot.LFU, 50_000).
    Build()

🏎️ Benchmark

// TODO: copy here the benchmarks of bench/ directory

// - compare libraries

// - measure encapsulation cost

// - measure lock cost

// - measure ttl cost

// - measure size.Of cost

// - measure stats collection cost

🤝 Contributing

Don't hesitate ;)

# Install some dev dependencies
make tools

# Run tests
make test
# or
make watch-test

👤 Contributors

Contributors

💫 Show your support

Give a ⭐️ if this project helped you!

GitHub Sponsors

📝 License

Copyright © 2024 Samuel Berthe.

This project is MIT licensed.

Documentation

Index

Constants

View Source
const (
	DropOnError revalidationErrorPolicy = iota
	KeepOnError
)

Variables

View Source
var DebounceRevalidationFactor = 0.2

Revalidation is done in batch,.

Functions

This section is empty.

Types

type EvictionAlgorithm added in v0.2.0

type EvictionAlgorithm int
const (
	LRU EvictionAlgorithm = iota
	LFU
	TwoQueue
	ARC
)

type HotCache

type HotCache[K comparable, V any] struct {
	// contains filtered or unexported fields
}

func (*HotCache[K, V]) Algorithm

func (c *HotCache[K, V]) Algorithm() (string, string)

Algorithm returns the cache algo.

func (*HotCache[K, V]) Capacity

func (c *HotCache[K, V]) Capacity() (int, int)

Capacity returns the cache capacity.

func (*HotCache[K, V]) Delete

func (c *HotCache[K, V]) Delete(key K) bool

Delete removes a key from the cache.

func (*HotCache[K, V]) DeleteMany

func (c *HotCache[K, V]) DeleteMany(keys []K) map[K]bool

DeleteMany removes many keys from the cache.

func (*HotCache[K, V]) Get

func (c *HotCache[K, V]) Get(key K) (value V, found bool, err error)

Get returns a value from the cache, a boolean indicating whether the key was found and an error when loaders fail.

func (*HotCache[K, V]) GetMany

func (c *HotCache[K, V]) GetMany(keys []K) (values map[K]V, missing []K, err error)

GetMany returns many values from the cache, a slice of missing keys and an error when loaders fail.

func (*HotCache[K, V]) GetManyWithLoaders added in v0.3.2

func (c *HotCache[K, V]) GetManyWithLoaders(keys []K, loaders ...Loader[K, V]) (values map[K]V, missing []K, err error)

GetManyWithLoaders returns many values from the cache, a slice of missing keys and an error when loaders fail.

func (*HotCache[K, V]) GetWithLoaders added in v0.3.2

func (c *HotCache[K, V]) GetWithLoaders(key K, loaders ...Loader[K, V]) (value V, found bool, err error)

GetWithLoaders returns a value from the cache, a boolean indicating whether the key was found and an error when loaders fail.

func (*HotCache[K, V]) Has

func (c *HotCache[K, V]) Has(key K) bool

Has checks if a key exists in the cache. Missing values are not valid, even if cached.

func (*HotCache[K, V]) HasMany

func (c *HotCache[K, V]) HasMany(keys []K) map[K]bool

HasMany checks if keys exist in the cache. Missing values are not valid, even if cached.

func (*HotCache[K, V]) Janitor

func (c *HotCache[K, V]) Janitor()

Janitor runs a background goroutine to clean up the cache.

func (*HotCache[K, V]) Keys

func (c *HotCache[K, V]) Keys() []K

Keys returns all keys in the cache. Missing keys are not included.

func (*HotCache[K, V]) Len

func (c *HotCache[K, V]) Len() int

Len returns the number of items in the cache. Missing items are included.

func (*HotCache[K, V]) MustGet added in v0.2.0

func (c *HotCache[K, V]) MustGet(key K) (value V, found bool)

MustGet returns a value from the cache, a boolean indicating whether the key was found. It panics when loaders fail.

func (*HotCache[K, V]) MustGetMany added in v0.2.0

func (c *HotCache[K, V]) MustGetMany(keys []K) (values map[K]V, missing []K)

MustGetMany returns many values from the cache, a slice of missing keys. It panics when loaders fail.

func (*HotCache[K, V]) MustGetManyWithLoaders added in v0.3.2

func (c *HotCache[K, V]) MustGetManyWithLoaders(keys []K, loaders ...Loader[K, V]) (values map[K]V, missing []K)

MustGetManyWithLoaders returns many values from the cache, a slice of missing keys. It panics when loaders fail.

func (*HotCache[K, V]) MustGetWithLoaders added in v0.3.2

func (c *HotCache[K, V]) MustGetWithLoaders(key K, loaders ...Loader[K, V]) (value V, found bool)

MustGetWithLoaders returns a value from the cache, a boolean indicating whether the key was found. It panics when loaders fail.

func (*HotCache[K, V]) Peek

func (c *HotCache[K, V]) Peek(key K) (value V, ok bool)

Peek is similar to Get, but do not check expiration and do not call loaders/revalidation. Missing values are not returned, even if cached.

func (*HotCache[K, V]) PeekMany

func (c *HotCache[K, V]) PeekMany(keys []K) (map[K]V, []K)

PeekMany is similar to GetMany, but do not check expiration and do not call loaders/revalidation. Missing values are not returned, even if cached.

func (*HotCache[K, V]) Purge

func (c *HotCache[K, V]) Purge()

Purge removes all items from the cache.

func (*HotCache[K, V]) Range

func (c *HotCache[K, V]) Range(f func(K, V) bool)

Range calls a function for each key/value pair in the cache. The callback should be kept short because it is called while holding a read lock. @TODO: loop over missingCache? Use a different callback? Missing values are not included.

func (*HotCache[K, V]) Set

func (c *HotCache[K, V]) Set(key K, v V)

Set adds a value to the cache. If the key already exists, its value is updated. It uses the default ttl or none.

func (*HotCache[K, V]) SetMany

func (c *HotCache[K, V]) SetMany(items map[K]V)

SetMany adds many values to the cache. If the keys already exist, values are updated. It uses the default ttl or none.

func (*HotCache[K, V]) SetManyWithTTL

func (c *HotCache[K, V]) SetManyWithTTL(items map[K]V, ttl time.Duration)

SetManyWithTTL adds many values to the cache. If the keys already exist, values are updated. It uses the given ttl.

func (*HotCache[K, V]) SetMissing

func (c *HotCache[K, V]) SetMissing(key K)

SetMissing adds a key to the `missing` cache. If the key already exists, its value is dropped. It uses the default ttl or none.

func (*HotCache[K, V]) SetMissingMany

func (c *HotCache[K, V]) SetMissingMany(missingKeys []K)

SetMissingMany adds many keys to the cache. If the keys already exist, values are dropped. It uses the default ttl or none.

func (*HotCache[K, V]) SetMissingManyWithTTL

func (c *HotCache[K, V]) SetMissingManyWithTTL(missingKeys []K, ttl time.Duration)

SetManyWithTTL adds many keys to the cache. If the keys already exist, values are dropped. It uses the given ttl.

func (*HotCache[K, V]) SetMissingWithTTL

func (c *HotCache[K, V]) SetMissingWithTTL(key K, ttl time.Duration)

SetMissingWithTTL adds a key to the `missing` cache. If the key already exists, its value is dropped. It uses the given ttl.

func (*HotCache[K, V]) SetWithTTL

func (c *HotCache[K, V]) SetWithTTL(key K, v V, ttl time.Duration)

SetWithTTL adds a value to the cache. If the key already exists, its value is updated. It uses the given ttl.

func (*HotCache[K, V]) StopJanitor

func (c *HotCache[K, V]) StopJanitor()

func (*HotCache[K, V]) Values

func (c *HotCache[K, V]) Values() []V

Values returns all values in the cache. Missing values are not included.

func (*HotCache[K, V]) WarmUp

func (c *HotCache[K, V]) WarmUp(loader func() (map[K]V, []K, error)) error

WarmUp loads all keys from the loader and sets them in the cache.

type HotCacheConfig

type HotCacheConfig[K comparable, V any] struct {
	// contains filtered or unexported fields
}

func NewHotCache

func NewHotCache[K comparable, V any](algorithm EvictionAlgorithm, capacity int) HotCacheConfig[K, V]

func (HotCacheConfig[K, V]) Build

func (cfg HotCacheConfig[K, V]) Build() *HotCache[K, V]

func (HotCacheConfig[K, V]) WithCopyOnRead

func (cfg HotCacheConfig[K, V]) WithCopyOnRead(copyOnRead func(V) V) HotCacheConfig[K, V]

WithCopyOnRead sets the function to copy the value on read.

func (HotCacheConfig[K, V]) WithCopyOnWrite

func (cfg HotCacheConfig[K, V]) WithCopyOnWrite(copyOnWrite func(V) V) HotCacheConfig[K, V]

WithCopyOnWrite sets the function to copy the value on write.

func (HotCacheConfig[K, V]) WithEvictionCallback added in v0.2.0

func (cfg HotCacheConfig[K, V]) WithEvictionCallback(onEviction base.EvictionCallback[K, V]) HotCacheConfig[K, V]

WithEvictionCallback sets the callback to be called when an entry is evicted from the cache. The callback is called synchronously and might block the cache operations if it is slow. This implementation choice is subject to change. Please open an issue to discuss.

func (HotCacheConfig[K, V]) WithJanitor

func (cfg HotCacheConfig[K, V]) WithJanitor() HotCacheConfig[K, V]

WithJanitor enables the cache janitor.

func (HotCacheConfig[K, V]) WithJitter

func (cfg HotCacheConfig[K, V]) WithJitter(jitter float64) HotCacheConfig[K, V]

WithJitter randomizes the TTL. It must be between 0 and 1.

func (HotCacheConfig[K, V]) WithLoaders

func (cfg HotCacheConfig[K, V]) WithLoaders(loaders ...Loader[K, V]) HotCacheConfig[K, V]

WithLoaders sets the chain of loaders to use for cache misses.

func (HotCacheConfig[K, V]) WithMissingCache

func (cfg HotCacheConfig[K, V]) WithMissingCache(algorithm EvictionAlgorithm, capacity int) HotCacheConfig[K, V]

WithMissingCache enables cache of missing keys. The missing keys are stored in a separate cache.

func (HotCacheConfig[K, V]) WithMissingSharedCache

func (cfg HotCacheConfig[K, V]) WithMissingSharedCache() HotCacheConfig[K, V]

WithMissingSharedCache enables cache of missing keys. The missing cache is shared with the main cache.

func (HotCacheConfig[K, V]) WithRevalidation

func (cfg HotCacheConfig[K, V]) WithRevalidation(stale time.Duration, loaders ...Loader[K, V]) HotCacheConfig[K, V]

WithRevalidation sets the time after which the cache entry is considered stale and needs to be revalidated. Keys that are not fetched during the interval will be dropped anyway. A timeout or error in loader will drop keys. If no revalidation loader is added, the default loaders or the one used in GetWithLoaders() are used.

func (HotCacheConfig[K, V]) WithRevalidationErrorPolicy added in v0.2.0

func (cfg HotCacheConfig[K, V]) WithRevalidationErrorPolicy(policy revalidationErrorPolicy) HotCacheConfig[K, V]

WithRevalidationErrorPolicy sets the policy to apply when a revalidation loader returns an error. By default, the key is dropped from the cache.

func (HotCacheConfig[K, V]) WithSharding

func (cfg HotCacheConfig[K, V]) WithSharding(nbr uint64, fn sharded.Hasher[K]) HotCacheConfig[K, V]

WithSharding enables cache sharding.

func (HotCacheConfig[K, V]) WithTTL

func (cfg HotCacheConfig[K, V]) WithTTL(ttl time.Duration) HotCacheConfig[K, V]

WithTTL sets the time-to-live for cache entries.

func (HotCacheConfig[K, V]) WithWarmUp

func (cfg HotCacheConfig[K, V]) WithWarmUp(fn func() (map[K]V, []K, error)) HotCacheConfig[K, V]

WithWarmUp preloads the cache with the provided data.

func (HotCacheConfig[K, V]) WithoutLocking

func (cfg HotCacheConfig[K, V]) WithoutLocking() HotCacheConfig[K, V]

WithoutLocking disables mutex for the cache and improves internal performances.

type Loader

type Loader[K comparable, V any] func(keys []K) (found map[K]V, err error)

type LoaderChain

type LoaderChain[K comparable, V any] []Loader[K, V]

Directories

Path Synopsis
pkg
arc
lfu
lru

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL