Permalink
Browse files

add debug serializer to find non cachable data

  • Loading branch information...
poelzi committed Sep 16, 2011
1 parent 828acd3 commit 5ee10ae21f95a37ad8261656eae9d9b976046425
View
@@ -154,7 +154,8 @@ rcached.clear(fc.get_group("my", "arguments", 2, 4, {1:3}))
- `hash` same as hash. default: **safe\_all**
- `ttl` timeout in seconds.
- `group\_prefix` prefix added before the group hash
- `debug`
- `debug` integer debug level
- `debug\_serializer` try to decode data right after serializing it and print error in case of failure
## get\_group(args...)
View
@@ -64,6 +64,14 @@ class RedisBackend extends Backend
else
rttl = ttl/1000 or 6*60*60
rdata = buffalo.serialize(data)
if options.debug_serializer
try
buffalo.parse(rdata)
catch e
console.error("Flexcache: failed decoding serialized data:", e)
console.log(data)
console.log(rdata)
return fn("invalid encoding", null)
if max_size and rdata.length > max_size
return fn("to large", null)
async.waterfall [
View
@@ -119,7 +119,8 @@ class Flexcache
if emitter
ee = new emitter(wargs...)
@backend.get group, hash, (err, cached) =>
opt = { serializer: loptions.serializer or @options.serializer}
@backend.get group, hash, opt, (err, cached) =>
# undecodeable means non cached
if err or not cached
if @options.debug
@@ -140,7 +141,7 @@ class Flexcache
realee.on 'end', () =>
# save result in cache
#total_buffer.push(data)
opt = ttl:ttl, max_object_size:@options.max_object_size
opt = ttl:ttl, max_object_size:@options.max_object_size, debug_serializer:@options.debug_serializer
@backend.set group, hash, total_buffer, opt, (err, res) =>
if @options.debug
console.log("save cache", group, hash, "err:", err)
File renamed without changes.

0 comments on commit 5ee10ae

Please sign in to comment.