Skip to content

Commit

Permalink
Merge 0f685c5 into 1d352ed
Browse files Browse the repository at this point in the history
  • Loading branch information
nguyenchr committed Oct 15, 2014
2 parents 1d352ed + 0f685c5 commit bdbd302
Show file tree
Hide file tree
Showing 10 changed files with 194 additions and 77 deletions.
28 changes: 16 additions & 12 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ tiered caches, and a consistent interface.
* Tiered caches -- data gets stored in each cache and fetched from the highest
priority cache(s) first.
* Use any cache you want, as long as it has the same API.
* 100% test coverage via [mocha](https://github.com/visionmedia/mocha),
* 100% test coverage via [mocha](https://github.com/visionmedia/mocha),
[istanbul](https://github.com/yahoo/istanbul), and [sinon](http://sinonjs.org).


Expand Down Expand Up @@ -57,14 +57,14 @@ function get_cached_user(id, cb) {
function get_cached_user(id, cb) {
memory_cache.wrap(id, function (cache_callback) {
get_user(id, cache_callback);
}, cb);
}, ttl, cb);
}
```

Second, node-cache-manager features a built-in memory cache (using [node-lru-cache](https://github.com/isaacs/node-lru-cache)),
with the standard functions you'd expect in most caches:

set(key, val, cb)
set(key, val, ttl, cb)
get(key, cb)
del(key, cb)

Expand All @@ -88,10 +88,10 @@ Redis cache store with connection pooling.
```javascript
var cache_manager = require('cache-manager');
var memory_cache = cache_manager.caching({store: 'memory', max: 100, ttl: 10/*seconds*/});

var ttl = 5;
// Note: callback is optional in set() and del().

memory_cache.set('foo', 'bar', function(err) {
memory_cache.set('foo', 'bar', ttl, function(err) {
if (err) { throw err; }

memory_cache.get('foo', function(err, result) {
Expand All @@ -109,11 +109,12 @@ function get_user(id, cb) {
}

var user_id = 123;
var key = 'user_' + user_id;
var key = 'user_' + user_id;

// Note: ttl is optional in wrap()
memory_cache.wrap(key, function (cb) {
get_user(user_id, cb);
}, function (err, user) {
}, ttl, function (err, user) {
console.log(user);

// Second time fetches user from memory_cache
Expand All @@ -134,7 +135,7 @@ Here's a very basic example of how you could use this in an Express app:

```javascript
function respond(res, err, data) {
if (err) {
if (err) {
res.json(500, err);
} else {
res.json(200, data);
Expand All @@ -143,9 +144,10 @@ function respond(res, err, data) {

app.get('/foo/bar', function(req, res) {
var cache_key = 'foo-bar:' + JSON.stringify(req.query);
var ttl = 10;
memory_cache.wrap(cache_key, function(cache_cb) {
DB.find(req.query, cache_cb);
}, function(err, result) {
}, ttl, function(err, result) {
respond(res, err, result);
});
});
Expand All @@ -171,10 +173,11 @@ var cache = cache_manager.caching({store: '/path/to/your/store'});
```javascript
var multi_cache = cache_manager.multi_caching([memory_cache, some_other_cache]);
user_id2 = 456;
key2 = 'user_' + user_id;
key2 = 'user_' + user_id;
ttl = 5;

// Sets in all caches.
multi_cache.set('foo2', 'bar2', function(err) {
multi_cache.set('foo2', 'bar2', ttl, function(err) {
if (err) { throw err; }

// Fetches from highest priority cache that has the key.
Expand All @@ -187,9 +190,10 @@ multi_cache.set('foo2', 'bar2', function(err) {
});
});

// Note: ttl is optional in wrap()
multi_cache.wrap(key2, function (cb) {
get_user(user_id2, cb);
}, function (err, user) {
}, ttl, function (err, user) {
console.log(user);

// Second time fetches user from memory_cache, since it's highest priority.
Expand Down
8 changes: 5 additions & 3 deletions examples/example.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@
var cache_manager = require('../');
var memory_cache = cache_manager.caching({store: 'memory', max: 100, ttl: 10/*seconds*/});
var memory_cache2 = cache_manager.caching({store: 'memory', max: 100, ttl: 100/*seconds*/});
var ttl; //Can't use a different ttl per set() call with memory cache

//
// Basic usage
//
memory_cache.set('foo', 'bar', function (err) {
memory_cache.set('foo', 'bar', ttl, function (err) {
if (err) { throw err; }

memory_cache.get('foo', function (err, result) {
Expand All @@ -31,7 +32,7 @@ var user_id = 123;
var key = 'user_' + user_id;

//
// wrap() example
// wrap() example
//

// Instead of manually managing the cache like this:
Expand Down Expand Up @@ -94,6 +95,7 @@ memory_cache.wrap(key, function (cb) {
var multi_cache = cache_manager.multi_caching([memory_cache, memory_cache2]);
var user_id2 = 456;
var key2 = 'user_' + user_id;
var ttl2; //Can't use a different ttl per set() call with memory cache

multi_cache.wrap(key2, function (cb) {
get_user(user_id2, cb);
Expand All @@ -110,7 +112,7 @@ multi_cache.wrap(key2, function (cb) {
});

// Sets in all caches.
multi_cache.set('foo2', 'bar2', function (err) {
multi_cache.set('foo2', 'bar2', ttl2, function (err) {
if (err) { throw err; }

// Fetches from highest priority cache that has the key.
Expand Down
5 changes: 3 additions & 2 deletions examples/redis_example/example.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,10 @@ var util = require('util');
var cache_manager = require('../../');
var redis_store = require('./redis_store');
var redis_cache = cache_manager.caching({store: redis_store, db: 0, ttl: 100/*seconds*/});
var ttl = 60;

console.log("set/get/del example:");
redis_cache.set('foo', 'bar', function (err) {
redis_cache.set('foo', 'bar', ttl, function (err) {
if (err) { throw err; }

redis_cache.get('foo', function (err, result) {
Expand Down Expand Up @@ -38,7 +39,7 @@ function get_user_from_cache(id, cb) {
var key = create_key(id);
redis_cache.wrap(key, function (cache_cb) {
get_user(user_id, cache_cb);
}, cb);
}, ttl, cb);
}

get_user_from_cache(user_id, function (err, user) {
Expand Down
9 changes: 5 additions & 4 deletions examples/redis_example/redis_store.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ var RedisPool = require('sol-redis-pool');
function redis_store(args) {
args = args || {};
var self = {};
var ttl = args.ttl;
var ttlDefault = args.ttl;
self.name = 'redis';
self.client = require('redis').createClient(args.port, args.host, args);

Expand Down Expand Up @@ -46,12 +46,13 @@ function redis_store(args) {
});
};

self.set = function (key, value, cb) {
self.set = function (key, value, ttl, cb) {
var ttlToUse = ttl || ttlDefault;
connect(function (err, conn) {
if (err) { return cb(err); }

if (ttl) {
conn.setex(key, ttl, JSON.stringify(value), function (err, result) {
if (ttlToUse) {
conn.setex(key, ttlToUse, JSON.stringify(value), function (err, result) {
pool.release(conn);
cb(err, result);
});
Expand Down
10 changes: 8 additions & 2 deletions lib/caching.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,13 @@ var caching = function (args) {
* console.log(user);
* });
*/
self.wrap = function (key, work, cb) {
self.wrap = function (key, work, ttl, cb) {

if(typeof(ttl) == 'function') {
cb = ttl;
ttl = undefined;
}

self.store.get(key, function (err, result) {
if (err && (!self.ignoreCacheErrors)) {
cb(err);
Expand All @@ -55,7 +61,7 @@ var caching = function (args) {
return;
}
// Subsequently assume second arg is result.
self.store.set(key, work_args[1], function (err) {
self.store.set(key, work_args[1], ttl, function (err) {
if (err && (!self.ignoreCacheErrors)) {
self.queues[key].forEach(function (done) {
done.call(null, err);
Expand Down
35 changes: 28 additions & 7 deletions lib/multi_caching.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@ var multi_caching = function (caches) {
}, cb);
}

function set_in_multiple_caches(caches, key, value, cb) {
function set_in_multiple_caches(caches, opts, cb) {
async.forEach(caches, function (cache, async_cb) {
cache.store.set(key, value, async_cb);
cache.store.set(opts.key, opts.value, opts.ttl, async_cb);
}, cb);
}

Expand All @@ -41,13 +41,24 @@ var multi_caching = function (caches) {
* If a key doesn't exist in a higher-priority cache but exists in a lower-priority
* cache, it gets set in all higher-priority caches.
*/
self.wrap = function (key, work, cb) {
self.wrap = function (key, work, ttl, cb) {

if(typeof(ttl) == 'function') {
cb = ttl;
ttl = undefined;
}

get_from_highest_priority_cache(key, function (err, result, index) {
if (err) {
return cb(err);
} else if (result) {
var caches_to_update = caches.slice(0, index);
set_in_multiple_caches(caches_to_update, key, result, function (err) {
var opts = {
key: key,
value: result,
ttl: ttl
};
set_in_multiple_caches(caches_to_update, opts, function (err) {
cb(err, result);
});
} else if (self.queues[key]) {
Expand All @@ -63,7 +74,12 @@ var multi_caching = function (caches) {
delete self.queues[key];
return;
}
set_in_multiple_caches(caches, key, work_args[1], function (err) {
var opts = {
key: key,
value: work_args[1],
ttl: ttl
};
set_in_multiple_caches(caches, opts, function (err) {
if (err) {
self.queues[key].forEach(function (done) {
done.call(null, err);
Expand All @@ -81,8 +97,13 @@ var multi_caching = function (caches) {
});
};

self.set = function (key, value, cb) {
set_in_multiple_caches(caches, key, value, cb);
self.set = function (key, value, ttl, cb) {
var opts = {
key: key,
value: value,
ttl: ttl
};
set_in_multiple_caches(caches, opts, cb);
};

self.get = function (key, cb) {
Expand Down
2 changes: 1 addition & 1 deletion lib/stores/memory.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ var memory_store = function (args) {

var lru_cache = new Lru(lru_opts);

self.set = function (key, value, cb) {
self.set = function (key, value, ttl, cb) {
lru_cache.set(key, value);
if (cb) {
process.nextTick(cb);
Expand Down
Loading

0 comments on commit bdbd302

Please sign in to comment.