New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor, add plugable cache framework #740

Merged
merged 3 commits into from Sep 7, 2017
Jump to file or symbol
Failed to load files and symbols.
+72 −31
Diff settings

Always

Just for now

Viewing a subset of changes. View all

Refactor add threadSafeCache and remove Lock related code from LRU

  • Loading branch information...
yulai.li
yulai.li committed Sep 7, 2017
commit 86b7bb02e2e549f0d9aae0783699dd6bc319afa7
View
@@ -13,6 +13,10 @@
package cache
import (
"sync"
)
// Cache is an interface for cache system
type Cache interface {
// Put puts an item into cache.
@@ -29,26 +33,83 @@ type Cache interface {
Len() int
}
type CacheType int
// Type is cache's type such as LRUCache and etc.
type Type int
const (
LRUCache CacheType = 1
// LRUCache is LRU cache type
LRUCache Type = 1
)
var (
// DefaultCache set default cache type for NewDefaultCache function
DefaultCache = LRUCache
// DefaultCacheType set default cache type for NewDefaultCache function
DefaultCacheType = LRUCache
)
func NewCache(size int, cacheType CacheType) Cache {
type threadSafeCache struct {
cache Cache
lock sync.RWMutex
}
func newThreadSafeCache(cache Cache) Cache {
return &threadSafeCache{
cache: cache,
}
}
// Put puts an item into cache.
func (c *threadSafeCache) Put(key uint64, value interface{}) {
c.lock.Lock()
defer c.lock.Unlock()
c.cache.Put(key, value)
}
// Get retrives an item from cache.
func (c *threadSafeCache) Get(key uint64) (interface{}, bool) {
c.lock.RLock()
defer c.lock.RUnlock()
return c.cache.Get(key)
}
// Peek reads an item from cache. The action is no considered 'Use'.
func (c *threadSafeCache) Peek(key uint64) (interface{}, bool) {
c.lock.RLock()
defer c.lock.RUnlock()
return c.cache.Peek(key)
}
// Remove eliminates an item from cache.
func (c *threadSafeCache) Remove(key uint64) {
c.lock.Lock()
defer c.lock.Unlock()
c.cache.Remove(key)
}
// Elems return all items in cache.
func (c *threadSafeCache) Elems() []*Item {
c.lock.RLock()
defer c.lock.RUnlock()
return c.cache.Elems()
}
// Len returns current cache size
func (c *threadSafeCache) Len() int {
c.lock.RLock()
defer c.lock.RUnlock()
return c.cache.Len()
}
// NewCache create Cache instance by CacheType
func NewCache(size int, cacheType Type) Cache {
switch cacheType {
case LRUCache:
return NewLRU(size)
return newThreadSafeCache(newLRU(size))
default:
panic("Unknown cache type")
}
}
// NewDefaultCache create Cache instance by default cache type
func NewDefaultCache(size int) Cache {
return NewCache(size, DefaultCache)
return NewCache(size, DefaultCacheType)
}
@@ -79,7 +79,7 @@ func (s *testRegionCacheSuite) TestExpireRegionCache(c *C) {
}
func (s *testRegionCacheSuite) TestLRUCache(c *C) {
cache := NewLRU(3)
cache := newLRU(3)
cache.Put(1, "1")
cache.Put(2, "2")
cache.Put(3, "3")
View
@@ -15,7 +15,6 @@ package cache
import (
"container/list"
"sync"
)
// Item is the cache entry.
@@ -26,8 +25,6 @@ type Item struct {
// LRU is 'Least-Recently-Used' cache.
type LRU struct {
sync.RWMutex
// maxCount is the maximum number of items.
// 0 means no limit.
maxCount int
@@ -36,8 +33,9 @@ type LRU struct {
cache map[uint64]*list.Element
}
// NewLRU returns a new lru cache.
func NewLRU(maxCount int) *LRU {
// newLRU returns a new lru cache. And this LRU cache is not thread-safe
// should not use this function to create LRU cache, use NewCache instead
func newLRU(maxCount int) *LRU {
return &LRU{
maxCount: maxCount,
ll: list.New(),
@@ -47,9 +45,6 @@ func NewLRU(maxCount int) *LRU {
// Put puts an item into cache.
func (c *LRU) Put(key uint64, value interface{}) {
c.Lock()
defer c.Unlock()
if ele, ok := c.cache[key]; ok {
c.ll.MoveToFront(ele)
ele.Value.(*Item).Value = value
@@ -66,9 +61,6 @@ func (c *LRU) Put(key uint64, value interface{}) {
// Get retrives an item from cache.
func (c *LRU) Get(key uint64) (interface{}, bool) {
c.Lock()
defer c.Unlock()
if ele, ok := c.cache[key]; ok {
c.ll.MoveToFront(ele)
return ele.Value.(*Item).Value, true
@@ -79,9 +71,6 @@ func (c *LRU) Get(key uint64) (interface{}, bool) {
// Peek reads an item from cache. The action is no considerd 'Use'.
func (c *LRU) Peek(key uint64) (interface{}, bool) {
c.RLock()
defer c.RUnlock()
if ele, ok := c.cache[key]; ok {
return ele.Value.(*Item).Value, true
}
@@ -91,9 +80,6 @@ func (c *LRU) Peek(key uint64) (interface{}, bool) {
// Remove eliminates an item from cache.
func (c *LRU) Remove(key uint64) {
c.Lock()
defer c.Unlock()
if ele, ok := c.cache[key]; ok {
c.removeElement(ele)
}
@@ -114,9 +100,6 @@ func (c *LRU) removeElement(ele *list.Element) {
// Elems return all items in cache.
func (c *LRU) Elems() []*Item {
c.RLock()
defer c.RUnlock()
elems := make([]*Item, 0, c.ll.Len())
for ele := c.ll.Front(); ele != nil; ele = ele.Next() {
clone := *(ele.Value.(*Item))
@@ -128,8 +111,5 @@ func (c *LRU) Elems() []*Item {
// Len returns current cache size.
func (c *LRU) Len() int {
c.RLock()
defer c.RUnlock()
return c.ll.Len()
}
ProTip! Use n and p to navigate between commits in a pull request.