Skip to content

Commit

Permalink
switch options to using a structure instead of functions
Browse files Browse the repository at this point in the history
  • Loading branch information
paskal committed Aug 4, 2022
1 parent 76023f5 commit 523acfb
Show file tree
Hide file tree
Showing 6 changed files with 57 additions and 63 deletions.
6 changes: 6 additions & 0 deletions .github/workflows/ci-v2.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,13 @@ on:
push:
branches:
tags:
paths:
- ".github/workflows/ci-v2.yml"
- "v2/**"
pull_request:
paths:
- ".github/workflows/ci-v2.yml"
- "v2/**"

jobs:
build:
Expand Down
6 changes: 6 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,13 @@ on:
push:
branches:
tags:
paths-ignore:
- ".github/workflows/ci-v2.yml"
- "v2/**"
pull_request:
paths-ignore:
- ".github/workflows/ci-v2.yml"
- "v2/**"

jobs:
build:
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,12 @@ import (
"fmt"
"time"

"github.com/go-pkgz/expirable-cache"
"github.com/go-pkgz/expirable-cache/v2"
)

func main() {
// make cache with short TTL and 3 max keys
c, _ := cache.NewCache(cache.MaxKeys(3), cache.TTL(time.Millisecond*10))
c := cache.NewCache[string, string]().WithMaxKeys(3).WithTTL(time.Millisecond * 10)

// set value under key1.
// with 0 ttl (last parameter) will use cache-wide setting instead (10ms).
Expand Down
30 changes: 11 additions & 19 deletions v2/cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import (
// Cache defines cache interface
type Cache[K comparable, V any] interface {
fmt.Stringer
options[K, V]
Set(key K, value V, ttl time.Duration)
Get(key K) (V, bool)
Peek(key K) (V, bool)
Expand All @@ -41,6 +42,14 @@ type Stats struct {
Added, Evicted int // number of added and evicted records
}

// Settings defines cache settings, all fields are optional
type Settings[K comparable, V any] struct {
TTL time.Duration
MaxKeys int
LRU bool
OnEvicted func(key K, value V)
}

// cacheImpl provides Cache interface implementation.
type cacheImpl[K comparable, V any] struct {
ttl time.Duration
Expand All @@ -61,30 +70,13 @@ const noEvictionTTL = time.Hour * 24 * 365 * 10
// Default MaxKeys is unlimited (0).
// Default TTL is 10 years, sane value for expirable cache is 5 minutes.
// Default eviction mode is LRC, appropriate option allow to change it to LRU.
func NewCache[K comparable, V any](options ...*options[K, V]) (Cache[K, V], error) {
res := &cacheImpl[K, V]{
func NewCache[K comparable, V any]() Cache[K, V] {
return &cacheImpl[K, V]{
items: map[K]*list.Element{},
evictList: list.New(),
ttl: noEvictionTTL,
maxKeys: 0,
}

if len(options) > 0 {
if options[0].ttl != nil {
res.ttl = *options[0].ttl
}
if options[0].maxKeys != nil {
res.maxKeys = *options[0].maxKeys
}
if options[0].lru != nil {
res.isLRU = *options[0].lru
}
if options[0].onEvicted != nil {
res.onEvicted = options[0].onEvicted
}
}

return res, nil
}

// Set key, ttl of 0 would use cache-wide TTL
Expand Down
28 changes: 11 additions & 17 deletions v2/cache_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@ import (
)

func TestCacheNoPurge(t *testing.T) {
lc, err := NewCache[string, string]()
assert.NoError(t, err)
lc := NewCache[string, string]()

lc.Set("key1", "val1", 0)
assert.Equal(t, 1, lc.Len())
Expand All @@ -29,9 +28,10 @@ func TestCacheNoPurge(t *testing.T) {

func TestCacheWithDeleteExpired(t *testing.T) {
var evicted []string
lc, err := NewCache[string, string](Options[string, string]().TTL(150 * time.Millisecond).
OnEvicted(func(key string, value string) { evicted = append(evicted, key, value) }))
assert.NoError(t, err)
lc := NewCache[string, string]().WithTTL(150 * time.Millisecond).WithOnEvicted(
func(key string, value string) {
evicted = append(evicted, key, value)
})

lc.Set("key1", "val1", 0)

Expand Down Expand Up @@ -68,8 +68,7 @@ func TestCacheWithDeleteExpired(t *testing.T) {
}

func TestCacheWithPurgeEnforcedBySize(t *testing.T) {
lc, err := NewCache[string, string](Options[string, string]().TTL(time.Hour).MaxKeys(10))
assert.NoError(t, err)
lc := NewCache[string, string]().WithTTL(time.Hour).WithMaxKeys(10)

for i := 0; i < 100; i++ {
i := i
Expand All @@ -84,8 +83,7 @@ func TestCacheWithPurgeEnforcedBySize(t *testing.T) {
}

func TestCacheConcurrency(t *testing.T) {
lc, err := NewCache[string, string]()
assert.NoError(t, err)
lc := NewCache[string, string]()
wg := sync.WaitGroup{}
wg.Add(1000)
for i := 0; i < 1000; i++ {
Expand All @@ -100,9 +98,7 @@ func TestCacheConcurrency(t *testing.T) {

func TestCacheInvalidateAndEvict(t *testing.T) {
var evicted int
opts := Options[string, string]().LRU().OnEvicted(func(_ string, _ string) { evicted++ })
lc, err := NewCache(opts)
assert.NoError(t, err)
lc := NewCache[string, string]().WithLRU().WithOnEvicted(func(_ string, _ string) { evicted++ })

lc.Set("key1", "val1", 0)
lc.Set("key2", "val2", 0)
Expand Down Expand Up @@ -132,8 +128,7 @@ func TestCacheInvalidateAndEvict(t *testing.T) {
}

func TestCacheExpired(t *testing.T) {
lc, err := NewCache[string, string](Options[string, string]().TTL(time.Millisecond * 5))
assert.NoError(t, err)
lc := NewCache[string, string]().WithTTL(time.Millisecond * 5)

lc.Set("key1", "val1", 0)
assert.Equal(t, 1, lc.Len())
Expand All @@ -159,8 +154,7 @@ func TestCacheExpired(t *testing.T) {
}

func TestCacheRemoveOldest(t *testing.T) {
lc, err := NewCache[string, string](Options[string, string]().LRU().MaxKeys(2))
assert.NoError(t, err)
lc := NewCache[string, string]().WithLRU().WithMaxKeys(2)

lc.Set("key1", "val1", 0)
assert.Equal(t, 1, lc.Len())
Expand All @@ -184,7 +178,7 @@ func TestCacheRemoveOldest(t *testing.T) {

func ExampleCache() {
// make cache with short TTL and 3 max keys
cache, _ := NewCache[string, string](Options[string, string]().MaxKeys(3).TTL(time.Millisecond * 10))
cache := NewCache[string, string]().WithMaxKeys(3).WithTTL(time.Millisecond * 10)

// set value under key1.
// with 0 ttl (last parameter) will use cache-wide setting instead (10ms).
Expand Down
46 changes: 21 additions & 25 deletions v2/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,39 +2,35 @@ package cache

import "time"

type options[K comparable, V any] struct {
ttl *time.Duration
maxKeys *int
lru *bool
onEvicted func(key K, value V)
type options[K comparable, V any] interface {
WithTTL(ttl time.Duration) Cache[K, V]
WithMaxKeys(maxKeys int) Cache[K, V]
WithLRU() Cache[K, V]
WithOnEvicted(fn func(key K, value V)) Cache[K, V]
}

// Options is a function that created a new empty option.
func Options[K comparable, V any]() *options[K, V] { return &options[K, V]{} } // nolint

// TTL functional option defines TTL for all cache entries.
// WithTTL functional option defines TTL for all cache entries.
// By default, it is set to 10 years, sane option for expirable cache might be 5 minutes.
func (o *options[K, V]) TTL(ttl time.Duration) *options[K, V] {
o.ttl = &ttl
return o
func (c *cacheImpl[K, V]) WithTTL(ttl time.Duration) Cache[K, V] {
c.ttl = ttl
return c
}

// MaxKeys functional option defines how many keys to keep.
// WithMaxKeys functional option defines how many keys to keep.
// By default, it is 0, which means unlimited.
func (o *options[K, V]) MaxKeys(maxKeys int) *options[K, V] {
o.maxKeys = &maxKeys
return o
func (c *cacheImpl[K, V]) WithMaxKeys(maxKeys int) Cache[K, V] {
c.maxKeys = maxKeys
return c
}

// LRU sets cache to LRU (Least Recently Used) eviction mode.
func (o *options[K, V]) LRU() *options[K, V] {
v := true
o.lru = &v
return o
// WithLRU sets cache to LRU (Least Recently Used) eviction mode.
func (c *cacheImpl[K, V]) WithLRU() Cache[K, V] {
c.isLRU = true
return c
}

// OnEvicted called automatically for automatically and manually deleted entries
func (o *options[K, V]) OnEvicted(fn func(key K, value V)) *options[K, V] {
o.onEvicted = fn
return o
// WithOnEvicted defined function which would be called automatically for automatically and manually deleted entries
func (c *cacheImpl[K, V]) WithOnEvicted(fn func(key K, value V)) Cache[K, V] {
c.onEvicted = fn
return c
}

0 comments on commit 523acfb

Please sign in to comment.