/
cache.go
120 lines (99 loc) · 4.53 KB
/
cache.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
package middlewares
import (
"time"
"github.com/monitoror/monitoror/models"
"github.com/jsdidierlaurent/echo-middleware/cache"
"github.com/labstack/echo/v4"
)
/*CacheMiddleware for monitoror
*
* We need two types of Cache.
* - UpstreamCache : serves as a circuit breaker to answer before executing the request. (By default, short TTL)
* - DownstreamCache : serves as backup to answer the old result in case of service timeout. (By default, long TTL)
*
* UpstreamCache must be implemented on some routes only (and with variable expiration).
* He is implemented as a decorator on the handler of each route
*
* DownstreamCache should be used instead of a timeout response.
* So we look at the cache in the global error handler (see handlers/errors.go)
*
* To fill both store at the same time, I implemented a store wrapper that performs every actions on both store
*/
type (
CacheMiddleware struct {
store cache.Store
downstreamDefaultExpiration time.Duration
upstreamDefaultExpiration time.Duration
}
// Wrapper for setting value in store with 2 keys for timeout
upstreamStore struct {
store cache.Store
downstreamDefaultExpiration time.Duration
}
)
// NewCacheMiddleware used config to instantiate CacheMiddleware
func NewCacheMiddleware(store cache.Store, downstreamDefaultExpiration, upstreamDefaultExpiration time.Duration) *CacheMiddleware {
return &CacheMiddleware{store, downstreamDefaultExpiration, upstreamDefaultExpiration}
}
//==============================================================================
// UPSTREAM MIDDLEWARE
//==============================================================================
//UpstreamCacheHandler return the cached response if he finds it in the store. (Decorator Handlers)
func (cm *CacheMiddleware) UpstreamCacheHandler(handle echo.HandlerFunc) echo.HandlerFunc {
return cache.CacheHandlerWithConfig(cache.CacheMiddlewareConfig{
Store: &upstreamStore{cm.store, cm.downstreamDefaultExpiration},
KeyPrefix: "-", // Hack we need to replace this by real key prefix in Store definition
Expire: cm.upstreamDefaultExpiration,
}, handle)
}
//UpstreamCacheHandlerWithExpiration return the cached response if he finds it in the store. (Decorator Handlers)
func (cm *CacheMiddleware) UpstreamCacheHandlerWithExpiration(expire time.Duration, handle echo.HandlerFunc) echo.HandlerFunc {
return cache.CacheHandlerWithConfig(cache.CacheMiddlewareConfig{
Store: &upstreamStore{cm.store, cm.downstreamDefaultExpiration},
KeyPrefix: "-", // Hack we need to replace this by real key prefix in Store definition
Expire: expire,
}, handle)
}
//==============================================================================
// DOWNSTREAM MIDDLEWARE
//==============================================================================
//DownstreamStoreMiddleware Provide Downstream Store to all route. Used when route return timeout error
func (cm *CacheMiddleware) DownstreamStoreMiddleware() echo.MiddlewareFunc {
config := cache.StoreMiddlewareConfig{
Store: cm.store,
ContextKey: models.DownstreamStoreContextKey,
}
return cache.StoreMiddlewareWithConfig(config)
}
//==============================================================================
// ResponsesStore methods (implementation of cache.Store)
//==============================================================================
func (c *upstreamStore) Get(key string, value interface{}) error {
return c.store.Get(models.UpstreamStoreKeyPrefix+key[1:], value)
}
func (c *upstreamStore) Set(key string, val interface{}, expires time.Duration) (err error) {
err = c.store.Set(models.UpstreamStoreKeyPrefix+key[1:], val, expires)
// Don't add response in downstream cache when she come from timeout recover to avoid infinite loop
if response, ok := val.(cache.ResponseCache); ok && response.Header.Get(models.DownstreamCacheHeader) == "" {
_ = c.store.Set(models.DownstreamStoreKeyPrefix+key[1:], val, c.downstreamDefaultExpiration)
}
return
}
func (c *upstreamStore) Add(key string, value interface{}, expires time.Duration) error {
panic("unimplemented")
}
func (c *upstreamStore) Replace(key string, value interface{}, expires time.Duration) error {
panic("unimplemented")
}
func (c *upstreamStore) Delete(key string) error {
panic("unimplemented")
}
func (c *upstreamStore) Increment(key string, n uint64) (uint64, error) {
panic("unimplemented")
}
func (c *upstreamStore) Decrement(key string, n uint64) (uint64, error) {
panic("unimplemented")
}
func (c *upstreamStore) Flush() error {
panic("unimplemented")
}