Browse files

initial commit

  • Loading branch information...
0 parents commit f5b6ec450dce2e08391fb3d32e3c97e8c8c7e978 @poelzi committed Sep 9, 2011
Showing with 558 additions and 0 deletions.
  1. +12 −0 .gitignore
  2. +19 −0 LICENCE
  3. +131 −0 README.md
  4. +1 −0 backend/backend.js
  5. +1 −0 backend/redis.js
  6. +2 −0 flexcache.js
  7. +24 −0 package.json
  8. +6 −0 src/backend/base.coffee
  9. +101 −0 src/backend/redis.coffee
  10. +103 −0 src/flexcache.coffee
  11. +125 −0 test/test-redis.coffee
  12. +33 −0 wscript
12 .gitignore
@@ -0,0 +1,12 @@
+config.js
+build/*
+node_modules/*
+*.css
+repositories/*
+.lock-wscript
+.*~
+.*.kate-swp
+views/*.html
+
+*.swp
+*~
19 LICENCE
@@ -0,0 +1,19 @@
+Copyright (c) 2011 Daniel Poelzleithner
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
131 README.md
@@ -0,0 +1,131 @@
+flexcache
+=======
+
+Flexible cache for async function calls. It is designed for preventing dirty caches more then on speed.
+Different Backends allows you to cover different usecases.
+
+
+# Backends
+
+
+## Redis
+
+
+ Best used for preventing long and slow operations on the filesysetem. Can easily be shared accross a Cluster and is
+ very performant. TTL support of the Redis database scales down the memory usage.
+
+
+## Memory (soon)
+
+
+ Caches are local only. Should only be used in a very narrow scope and be destoryed after every request. They are very
+ fast however.
+
+
+# Installation
+
+ npm install flexcache
+
+
+# Cache Identifiers
+
+
+flexcache uses a two level cache, first leve is called key, second level is the hash.
+By using a easy to receive key you can clear all caches depending strongly on the state of the key value. You can
+also invalidate a subkey cache without touching other subkey caches.
+
+Default behaviour:
+
+ key is stringified first argument
+ subkey is uses Flexcache.safe_hasher_all wich generates a very good but hard to determin subkey.
+
+
+
+# Usage
+
+
+Each Flexcache instance uses a backend for storage. Many Flexcache instances can share a backend, but may have
+different options.
+
+```javascript
+
+RedisBackend = require('flexcache/backend/redis').RedisBackend
+Flexcache = require('flexcache').Flexcache
+
+backend = new RedisBackend()
+fc = new Flexcache(backend, { ttl:400000 }) // 400 second timeout
+
+slow = function(a, b, callback) { /* do something slow */ }
+
+cached = fc.cache(slow)
+
+```
+
+Whatever arguments are passed to cached, they are used to compute the subkey and should therefor never hit a wrong
+cache entry.
+
+
+Advanced Usage
+--------------
+
+```javascript
+
+backend = new RedisBackend({port:1234})
+fc = new Flexcache({
+ key: function() { return arguments.1 },
+ hash: function() { return "X" + arguments.0 },
+ ttl: 60*1000
+ });
+
+// use a special key function for this function
+rcached = fc.cache(slow, {key: function() { return arguments.2 }});
+
+
+rcached.clear("key1")
+```
+
+
+## Flexcache Options
+
+ - `hash` *function* to generate the hash or one of *'all'*, *'one'*, *'safe_one'*, *'safe_all'*. default: **safe_all**
+ - `key` same as hash. default: **one**
+ - `ttl` timeout in
+
+
+## cache(fnc)
+
+Creates a cache wrapper for a async function.
+
+## cache(...).clear([key]|[args])
+
+Clears a key and all subkeys under it. Key can be direct string or the same arguments as the function.
+
+## cache(...).clear_subkey([key, subkey]|[args])
+
+Clears a specific subkey under key. If key and subkey are strings, they are used directly.
+You can also pass the same arguments as the normal function and let the key and subkey be calculated by the key/hash functions.
+
+
+
+
+
+
+# Backends
+
+## RedisBackend
+
+### Notes
+
+ - TTL is rounded to seconds.
+ - TTL only works with Redis 2.1.3+
+
+
+### Options
+
+ - `host` Redis server hostname
+ - `port` Redis server portno
+ - `db` Database index to use
+ - `pass` Password for Redis authentication
+ - ... Remaining options passed to the redis `createClient()` method.
+
+
1 backend/backend.js
@@ -0,0 +1 @@
+module.exports = require('../build/default/backend/backend.js')
1 backend/redis.js
@@ -0,0 +1 @@
+module.exports = require('../build/default/backend/redis.js')
2 flexcache.js
@@ -0,0 +1,2 @@
+
+module.exports = require('./build/default/flexcache')
24 package.json
@@ -0,0 +1,24 @@
+{ "name": "flexcache"
+, "description": "flexible cacher for async functions with switchable backends. redis/memory"
+, "version": "0.0.1"
+, "homepage": "https://github.com/poelzi/node-flexcache"
+, "author": "poelzi (http://poelzi.org)"
+, "repository": {"type": "git", "url": "git://github.com/poelzi/node-flexcache.git"}
+, "main": "flexcache.js"
+, "engines": {"node": "0.4.x"}
+, "keywords": ["cache", "async", "redis"]
+, "scripts": {
+ "preinstall": "#preinstall DO NOTHING",
+ "install": "node-waf configure build",
+ "update": "node-waf build",
+ "test": "node-waf && ./node_modules/nodeunit/bin/nodeunit test"}
+, "dependencies": {
+ "coffee-script": ">= 1.1.1",
+ "buffalo": "0.1.x",
+ "quack-array": "0.0.x",
+ "nodeunit": "0.5.x",
+ "redis": "0.6.x",
+ "async": "0.1.x"
+}
+
+}
6 src/backend/base.coffee
@@ -0,0 +1,6 @@
+
+class Backend
+ constructor: () ->
+
+
+module.exports = { Backend }
101 src/backend/redis.coffee
@@ -0,0 +1,101 @@
+{ Backend } = require './base'
+redis = require 'redis'
+buffalo = require 'buffalo'
+async = require 'async'
+quack = require 'quack-array'
+
+class RedisBackend extends Backend
+ constructor: (options) ->
+ @options = options or {}
+ @client = new redis.createClient(@options.port or @options.socket, @options.host, @options)
+ @ttl_bug = false
+
+ super @options
+
+ if @options.pass
+ @client.auth @options.pass, (err) ->
+ throw err if err
+
+ if @options.db
+ @client.select(@options.db)
+ @client.on "connect", () =>
+ @client.send_anyways = true
+ @client.select options.db
+ @client.send_anyways = false
+
+ @client.on "connect", () =>
+ @client.info (err, res) =>
+ for line in res.split("\r\n")
+ [key, value] = line.split(":")
+ if key == 'redis_version'
+ vers = value.split(".")
+ if Number(vers[0]) <= 2 and Number(vers[1]) <= 1 and Number(vers[2]) < 3
+ @ttl_bug = true
+ console.log("!!!! WARNING !!!!", "redis version needs to be 2.1.3+ for correct behaviour. TTL will not work" )
+ else
+ @ttl_buf = false
+
+
+
+ get: (key, subkey, fn) =>
+ this.client.hget key, subkey, (err, data) =>
+ if err or not data
+ return fn(err, null)
+ try
+ x = buffalo.parse(new Buffer(data))
+ decoded = quack(buffalo.parse(new Buffer(data)))
+ catch e
+ console.log("err decoding blob" + e)
+ decoded = null
+ if not decoded
+ return fn()
+ fn null, decoded
+
+ set: (key, subkey, ttl, data, fn) =>
+ fn = fn
+ try
+ if ttl == -1
+ rttl = -1
+ else
+ rttl = ttl/1000 or 6*60*60
+ rdata = buffalo.serialize(data)
+ async.waterfall [
+ (next) =>
+ @client.ttl key, (err, res) =>
+ next(null, res > 0 and res or rttl)
+ ,
+ (oldttl, next) =>
+ @client.hset key, subkey, rdata, (err, res) ->
+ next(err, oldttl, res)
+ ,
+ (oldttl, res, next) =>
+ if @ttl_bug
+ return next(null, null)
+ @client.expire key, oldttl, (err, res) ->
+ next(null, null)
+
+ ], (err) ->
+ if not fn
+ return
+ fn err, data
+ catch err
+ fn and fn(err, null)
+
+ clear: (key, fn) =>
+ @client.del key, (err, res) ->
+ fn and fn(null, null)
+
+ clear_subkey: (key, subkey, fn) =>
+ @client.hdel key, subkey, (err, res) ->
+ fn(null, null)
+
+ dbsize: (fn) =>
+ @client.dbsize(fn)
+
+ clear_all: (fn) =>
+ @client.flushdb(fn)
+
+ close: (fn) =>
+ @client.quit(fn)
+
+module.exports = { RedisBackend }
103 src/flexcache.coffee
@@ -0,0 +1,103 @@
+
+
+redis = require 'redis'
+buffalo = require 'buffalo'
+async = require 'async'
+quack = require 'quack-array'
+
+
+class Flexcache
+ constructor: (@backend, options, callback) ->
+ # set default hasher
+ @options = options or {}
+ #console.log("ieu")
+ dset = (name, target, def) =>
+ switch @options[name]
+ when 'all' then @[target] = @hasher_all
+ when 'one' then @[target] = @hasher_all
+ when 'safe_all' then @[target] = @safe_hasher_all
+ when 'safe_one' then @[target] = @safe_hasher_one
+ else
+ if typeof @options[name] == 'function' or @options[name] == null
+ @[target] = @options[name]
+ else
+ @[target] = def or @hasher_one
+ dset("hash", "hash", @safe_hasher_all)
+ dset("key", "key")
+
+ hasher_one: (x) ->
+ return JSON.stringify(x)
+
+ hasher_all: (args...) =>
+ rv = ""
+ if @options.prefix
+ rv += @options.prefix
+ for arg in args
+ if rv
+ rv += "|"
+ rv += JSON.stringify(arg)
+ return rv
+
+ safe_hasher_one: (x) ->
+ return JSON.stringify(x)
+
+ safe_hasher_all: (args...) =>
+ rv = ""
+ if @options.prefix
+ rv += @options.prefix
+ rv += buffalo.serialize(args)
+ return rv
+
+ clear: (key, cb) =>
+ @backend.clear key, cb
+
+ clear_subkey: (key, subkey, cb) =>
+ @backend.clear_subkey key, subkey, cb
+
+ cache: (fn, loptions) =>
+ loptions ?= {}
+ hasher = loptions.hash or @hash
+ keyer = loptions.key or @key
+ ttl = loptions.ttl or @options.ttl
+
+ rv = (args...) =>
+ callback = args.pop()
+ real_arguments = arguments
+
+ key = keyer.apply(null, args)
+ subkey = hasher.apply(null, args)
+ @backend.get key, subkey, (err, cached) =>
+ # undecodeable means non cached
+ if err or not cached
+ fn.apply(null, args.concat [ (args...) =>
+ if args[0] # error case
+ return callback.apply(null, args)
+ # cache the result
+ @backend.set key, subkey, ttl, args, (err, res) ->
+ # don't care if succeeded
+ callback.apply(null, args)
+ ])
+ else
+ callback.apply(null, cached)
+
+ rv.clear = (args...) =>
+ if typeof(args[0]) == "string"
+ @clear args[0]
+ else
+ callback = args.pop()
+ keyer = loptions.key or @key
+ @clear keyer.apply(null, args), callback # calculate the key like normal parameters
+
+ rv.clear_subkey = (args...) =>
+ if typeof(args[0]) == "string" and typeof(args[1]) == "string"
+ @clear args[0], args[1]
+ else
+ callback = args.pop()
+ keyer = loptions.key or @key
+ hasher = loptions.hash or @hash
+ x = keyer.apply(null, args)
+ @clear_subkey keyer.apply(null, args), hasher.apply(null, args), callback # calculate the key like normal parameters
+
+ return rv
+
+module.exports = { Flexcache }
125 test/test-redis.coffee
@@ -0,0 +1,125 @@
+{ Flexcache } = require("../flexcache")
+{ RedisBackend } = require("../backend/redis")
+
+async = require 'async'
+
+module.exports.TestRedis = (test) ->
+
+ back = new RedisBackend()
+ fc = new Flexcache back, ttl:400000
+
+ todo = 0 # calculated
+ got_res = (fnc) ->
+ return (args...) ->
+ console.log("GOT RES:", args)
+ todo--
+ fnc.apply(null, args)
+
+
+ run = 0
+ slow = (time, args...) ->
+ callback = args.pop()
+ setTimeout(() ->
+ console.log("RUN SLOW")
+ run++
+ callback(null, "waited " + time, run, args)
+ , 10)
+
+
+ fast = fc.cache slow
+ fast_prefix = fc.cache slow,
+ prefix: "X_"
+ safe = fc.cache slow,
+ hash: fc.safe_hasher_all
+
+ fc.clear 100
+ fc.clear 99
+ series = [
+ (next) ->
+ slow 100, got_res next
+ (next) ->
+ fast 100, got_res (err) ->
+ test.equal(run, 2, "cache was hit 1")
+ next err
+ ,
+ (next) ->
+ # must hit cache
+ fast 100, got_res (err, waited, run) ->
+ test.equal(run, 2, "cache was not hit 2")
+ next err
+ ,
+ (next) ->
+ # clear the cache
+ fc.clear 100, next
+ ,
+ (next) ->
+ fast 100, got_res (err, waited, run) ->
+ test.equal(run, 3, "cache was hit 3")
+ next null
+ ,
+ (next) ->
+ fast 99, got_res (err, waited, run) ->
+ test.equal(run, 4, "cache was hit 4")
+ next null
+ ,
+ # subkey tests
+ (next) ->
+ fast 99, 1, got_res (err, waited, run) ->
+ test.equal(run, 5, "cache was hit 5")
+ next null
+ ,
+ (next) ->
+ fast 99, 2, got_res (err, waited, run) ->
+ test.equal(run, 6, "cache was hit 6")
+ next null
+ ,
+ (next) ->
+ # clear the cache
+ fast.clear_subkey 99, 1, next
+ ,
+ (next) ->
+ fast 99, 2, got_res (err, waited, run) ->
+ test.equal(run, 6, "cache was hit 7")
+ next null
+ ,
+ (next) ->
+ fast 99, 1, got_res (err, waited, run) ->
+ test.equal(run, 7, "cache was hit 8")
+ next null
+ ,
+ (next) ->
+ fast 99, "1", got_res (err, waited, run) ->
+ test.equal(run, 8, "cache was hit 9")
+ next null
+ ,
+ (next) ->
+ safe 99, new Buffer([1,2]), got_res (err, waited, run) ->
+ test.equal(run, 9, "cache was hit 10")
+ next null
+ ,
+ (next) ->
+ safe 99, new Buffer([1,2]), got_res (err, waited, run) ->
+ test.equal(run, 9, "cache was hit 11")
+ next null
+ ,
+ (next) ->
+ # clear the cache
+ fast.clear 99, next
+ ,
+ (next) ->
+ safe 99, new Buffer([1,2]), got_res (err, waited, run) ->
+ test.equal(run, 10, "cache was hit 12")
+ next null
+ ,
+ (next) ->
+ fast_prefix 99, "1", got_res (err, waited, run) ->
+ test.equal(run, 11, "cache was hit 13")
+ next null
+ ,
+ ]
+ todo = series.length - 3
+ async.series series, (err) ->
+ test.equal(err, null, "error thrown")
+ test.equal(todo, 0, "todo is not right")
+ back.close()
+ test.done()
33 wscript
@@ -0,0 +1,33 @@
+from os.path import dirname, join
+import os
+
+top = "."
+out = "build"
+
+class Dummy: pass
+
+
+def configure(ctx):
+ ctx.env.PATH = os.environ['PATH'].split(os.pathsep)
+ ctx.env.PATH.append(join(ctx.cwd, "node_modules", "coffee-script", "bin"))
+ ctx.find_program("coffee", var="COFFEE", path_list=ctx.env.PATH)
+ ctx.env.ARGS = "-co"
+
+
+def build(ctx):
+ env = Dummy()
+ env.variant = lambda: ""
+ for top in ("src", "test"):
+ for file in ctx.path.find_dir(top).ant_glob("**/*.coffee", flat=False):
+ #print file.change_ext(".js").bldpath(env)
+ tgtpath = file.change_ext(".js").bldpath(env)
+ if top == "src":
+ tgtpath = tgtpath[len(top)+2:]
+ else:
+ tgtpath = tgtpath[1:]
+ ctx.path.exclusive_build_node(tgtpath)
+ #print tgtpath
+ ctx(name = "coffee",
+ rule = "${COFFEE} ${ARGS} default/%s ${SRC}" %(dirname(tgtpath)),
+ source = file.srcpath()[3:],
+ target = tgtpath)

0 comments on commit f5b6ec4

Please sign in to comment.