Skip to content

Commit

Permalink
add cache stats
Browse files Browse the repository at this point in the history
  • Loading branch information
umputun committed Jan 25, 2019
1 parent 1dd416a commit 222fa65
Show file tree
Hide file tree
Showing 5 changed files with 97 additions and 1 deletion.
35 changes: 35 additions & 0 deletions cache_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -316,6 +316,41 @@ func TestCache_Invalidate(t *testing.T) {
}
}

func TestCache_Stats(t *testing.T) {
caches := cachesTestList(t)
for _, c := range caches {
t.Run(strings.Replace(fmt.Sprintf("%T", c), "*lcw.", "", 1), func(t *testing.T) {
// fill cache
for i := 0; i < 100; i++ {
_, err := c.Get(fmt.Sprintf("key-%d", i), func() (Value, error) {
return sizedString(fmt.Sprintf("result-%d", i)), nil
})
require.Nil(t, err)
}
stats := c.Stat()
assert.Equal(t, CacheStat{Hits: 0, Misses: 100, Keys: 100, Size: 890}, stats)

_, err := c.Get("key-1", func() (Value, error) {
return "xyz", nil
})
require.NoError(t, err)
assert.Equal(t, CacheStat{Hits: 1, Misses: 100, Keys: 100, Size: 890}, c.Stat())

_, err = c.Get("key-1123", func() (Value, error) {
return sizedString("xyz"), nil
})
require.NoError(t, err)
assert.Equal(t, CacheStat{Hits: 1, Misses: 101, Keys: 101, Size: 893}, c.Stat())

_, err = c.Get("key-9999", func() (Value, error) {
return nil, errors.New("err")
})
assert.Equal(t, CacheStat{Hits: 1, Misses: 101, Keys: 101, Size: 893, Errors: 1}, c.Stat())
})

}
}

type counts interface {
size() int64 // cache size in bytes
keys() int // number of keys in cache
Expand Down
15 changes: 15 additions & 0 deletions expirable_cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import (
// ExpirableCache implements LoadingCache with TTL.
type ExpirableCache struct {
options
CacheStat
currentSize int64
currKeys int64
backend *cache.Cache
Expand Down Expand Up @@ -51,12 +52,15 @@ func NewExpirableCache(opts ...Option) (*ExpirableCache, error) {
func (c *ExpirableCache) Get(key string, fn func() (Value, error)) (data Value, err error) {

if v, ok := c.backend.Get(key); ok {
atomic.AddInt64(&c.Hits, 1)
return v, nil
}

if data, err = fn(); err != nil {
atomic.AddInt64(&c.Errors, 1)
return data, err
}
atomic.AddInt64(&c.Misses, 1)

if c.allowed(key, data) {
if s, ok := data.(Sizer); ok {
Expand Down Expand Up @@ -94,6 +98,17 @@ func (c *ExpirableCache) Purge() {
atomic.StoreInt64(&c.currKeys, 0)
}

// Stat returns cache statistics
func (c *ExpirableCache) Stat() CacheStat {
return CacheStat{
Hits: c.Hits,
Misses: c.Misses,
Size: c.size(),
Keys: c.keys(),
Errors: c.Errors,
}
}

func (c *ExpirableCache) size() int64 {
return atomic.LoadInt64(&c.currentSize)
}
Expand Down
23 changes: 23 additions & 0 deletions interface.go
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package lcw

import "fmt"

// Value type wraps interface{}
type Value interface{}

Expand All @@ -15,6 +17,22 @@ type LoadingCache interface {
Peek(key string) (Value, bool)
Invalidate(fn func(key string) bool)
Purge()
Stat() CacheStat
}

// CacheStat represent stats values
type CacheStat struct {
Hits int64
Misses int64
Keys int
Size int64
Errors int64
}

// String fromats cache stats
func (s *CacheStat) String() string {
return fmt.Sprintf("{hits:%d, misses:%d, ratio:%.1f%%, keys:%d, size:%d, errors:%d}",
s.Hits, s.Misses, 100*(float64(s.Hits)/float64(s.Hits+s.Misses)), s.Keys, s.Size, s.Errors)
}

// Nop is do-nothing implementation of LoadingCache
Expand All @@ -36,3 +54,8 @@ func (n *Nop) Invalidate(fn func(key string) bool) {}

// Purge does nothing for nop cache
func (n *Nop) Purge() {}

// Stat always 0s for nop cache
func (n *Nop) Stat() CacheStat {
return CacheStat{}
}
9 changes: 8 additions & 1 deletion interface_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import (

func TestNop_Get(t *testing.T) {
var coldCalls int32
c := NewNopCache()
var c LoadingCache = NewNopCache()
res, err := c.Get("key1", func() (Value, error) {
atomic.AddInt32(&coldCalls, 1)
return "result", nil
Expand All @@ -25,6 +25,8 @@ func TestNop_Get(t *testing.T) {
assert.Nil(t, err)
assert.Equal(t, "result2", res.(string))
assert.Equal(t, int32(2), atomic.LoadInt32(&coldCalls))

assert.Equal(t, CacheStat{}, c.Stat())
}

func TestNop_Peek(t *testing.T) {
Expand All @@ -41,3 +43,8 @@ func TestNop_Peek(t *testing.T) {
_, ok := c.Peek("key1")
assert.False(t, ok)
}

func TestStat_String(t *testing.T) {
s := CacheStat{Keys: 100, Hits: 60, Misses: 10, Size: 12345, Errors: 5}
assert.Equal(t, "{hits:60, misses:10, ratio:85.7%, keys:100, size:12345, errors:5}", s.String())
}
16 changes: 16 additions & 0 deletions lru_cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
// LruCache wraps lru.LruCache with laoding cache Get and size limits
type LruCache struct {
options
CacheStat
backend *lru.Cache
currentSize int64
}
Expand Down Expand Up @@ -49,13 +50,17 @@ func NewLruCache(opts ...Option) (*LruCache, error) {
func (c *LruCache) Get(key string, fn func() (Value, error)) (data Value, err error) {

if v, ok := c.backend.Get(key); ok {
atomic.AddInt64(&c.Hits, 1)
return v, nil
}

if data, err = fn(); err != nil {
atomic.AddInt64(&c.Errors, 1)
return data, err
}

atomic.AddInt64(&c.Misses, 1)

if c.allowed(key, data) {
c.backend.Add(key, data)

Expand Down Expand Up @@ -91,6 +96,17 @@ func (c *LruCache) Invalidate(fn func(key string) bool) {
}
}

// Stat returns cache statistics
func (c *LruCache) Stat() CacheStat {
return CacheStat{
Hits: c.Hits,
Misses: c.Misses,
Size: c.size(),
Keys: c.keys(),
Errors: c.Errors,
}
}

func (c *LruCache) size() int64 {
return atomic.LoadInt64(&c.currentSize)
}
Expand Down

0 comments on commit 222fa65

Please sign in to comment.