Browse files

Merge branch 'master' of github.com:Singly/hallway

  • Loading branch information...
2 parents c2e3dca + 11866d6 commit 89006cac7b1e9177acde88ce7dffa0dd1ad91b61 @tylerstalder tylerstalder committed May 29, 2012
Showing with 805 additions and 362 deletions.
  1. +0 −1 Connectors/FitBit/README.md
  2. +0 −31 Connectors/FitBit/activity.js
  3. +0 −11 Connectors/FitBit/auth.js
  4. +0 −16 Connectors/FitBit/device.js
  5. BIN Connectors/FitBit/icon.png
  6. +0 −23 Connectors/FitBit/lib.js
  7. +0 −21 Connectors/FitBit/package.json
  8. +0 −16 Connectors/FitBit/profile.js
  9. +0 −10 Connectors/FitBit/synclets.json
  10. +0 −10 Connectors/FitBit/test.js
  11. +23 −8 hallwayd.js
  12. +18 −1 lib/acl.js
  13. +14 −6 lib/authManager.js
  14. +11 −2 lib/dMap.js
  15. +38 −6 lib/dawg.js
  16. +77 −11 lib/ijod.js
  17. +3 −0 lib/lconfig.js
  18. +18 −14 lib/logger.js
  19. +4 −4 lib/optional-memcache.js
  20. +90 −0 lib/qix.js
  21. +4 −2 lib/services/facebook/feed.js
  22. +4 −2 lib/services/facebook/home.js
  23. +3 −1 lib/services/facebook/map.js
  24. +20 −0 lib/services/facebook/proxy.js
  25. +2 −26 lib/services/fitbit/activities.js
  26. +14 −14 lib/services/fitbit/devices.js
  27. +11 −0 lib/services/fitbit/fat.js
  28. +28 −0 lib/services/fitbit/lib.js
  29. +12 −7 lib/services/fitbit/map.js
  30. +10 −0 lib/services/fitbit/sleep.js
  31. +9 −4 lib/services/fitbit/synclets.json
  32. +11 −0 lib/services/fitbit/weight.js
  33. +19 −0 lib/services/foursquare/proxy.js
  34. +19 −0 lib/services/github/proxy.js
  35. +19 −0 lib/services/instagram/proxy.js
  36. +39 −0 lib/services/linkedin/proxy.js
  37. +4 −0 lib/services/links/map.js
  38. +3 −2 lib/services/links/oembed.js
  39. +2 −1 lib/services/links/resolve.js
  40. +9 −0 lib/services/tumblr/proxy.js
  41. +3 −2 lib/services/twitter/map.js
  42. +9 −0 lib/services/twitter/proxy.js
  43. +74 −53 lib/syncManager.js
  44. +69 −19 lib/webservice.js
  45. +67 −0 lib/worker.js
  46. +39 −37 package.json
  47. +5 −0 test/medusa.js
  48. +1 −1 test/services/facebook.test.js
View
1 Connectors/FitBit/README.md
@@ -1 +0,0 @@
-While not free FitBit looks to have an amazing wealth of health related information. They have a [public API](http://dev.fitbit.com) currently in beta.
View
31 Connectors/FitBit/activity.js
@@ -1,31 +0,0 @@
-/*
-*
-* Copyright (C) 2011, The Locker Project
-* All rights reserved.
-*
-* Please see the LICENSE file for more information.
-*
-*/
-
-exports.sync = require('./lib').genericSync(function(pi){
- if(!pi.config) pi.config = {};
- if(!pi.config.memberSince || !pi.config.lastSyncTime) return false;
- if(!pi.config.activeNext) pi.config.activeNext = (new Date(pi.config.memberSince).getTime()); // convert to epoch format
- if((pi.config.activeNext > new Date(pi.config.lastSyncTime).getTime())) return false; // don't run ahead of last sync
- return 'activities/date/'+format(pi.config.activeNext)+'.json';
-}, function(pi, data){
- if(!data || !data.summary) return {};
- data.id = format(pi.config.activeNext); // stub in an id based on the date
- var next = pi.config.activeNext + (3600*1000*24); // next run get next day
- if(next < (new Date(pi.config.lastSyncTime).getTime())){
- pi.config.activeNext = next; // don't move forward past last sync time!
- if(pi.config.activeNext < Date.now()) pi.config.nextRun = -1; // force run again
- }
- return {active:[data]};
-});
-
-function format(epoch)
-{
- d = new Date(epoch);
- return ""+d.getFullYear()+'-'+((d.getMonth() < 9 ? '0' : '') + (d.getMonth() + 1))+'-'+((d.getDate() < 10 ? '0' : '') + d.getDate());
-}
View
11 Connectors/FitBit/auth.js
@@ -1,11 +0,0 @@
-module.exports = {
- handler : function (host, auth, done, req, res) {
- require('fitbit-js')(auth.appKey, auth.appSecret, host + 'auth/fitbit/auth')
- .getAccessToken(req, res, function(err, newToken) {
- if(err) return done(err);
- if(!newToken) return done("token missing");
- auth.token = newToken;
- done(null, auth);
- });
- }
-}
View
16 Connectors/FitBit/device.js
@@ -1,16 +0,0 @@
-/*
-*
-* Copyright (C) 2011, The Locker Project
-* All rights reserved.
-*
-* Please see the LICENSE file for more information.
-*
-*/
-
-exports.sync = require('./lib').genericSync(function(pi){
- return 'devices.json';
-}, function(pi, data){
- if(!pi.config) pi.config = {};
- pi.config.lastSyncTime = data[0].lastSyncTime;
- return {device:data};
-});
View
BIN Connectors/FitBit/icon.png
Deleted file not rendered
View
23 Connectors/FitBit/lib.js
@@ -1,23 +0,0 @@
-/*
-*
-* Copyright (C) 2012, The Locker Project
-* All rights reserved.
-*
-* Please see the LICENSE file for more information.
-*
-*/
-
-
-exports.genericSync = function(pather, arrayer) {
- return function(pi, cb) {
- client = require('fitbit-js')(pi.auth.appKey, pi.auth.appSecret);
- var path = pather(pi);
- if(!path) return cb(null, pi);
- // need foo:bar to make fitbit api work right otehrwie no params appends ? and get BROKEN erro!
- client.apiCall('GET', '/user/-/' + path, {token:pi.auth.token, foo:'bar'}, function(err, respData) {
- if(err || !respData) return cb(err);
- pi.data = arrayer(pi, respData);
- cb(err, pi);
- });
- };
-};
View
21 Connectors/FitBit/package.json
@@ -1,21 +0,0 @@
-{
- "author": "Singly <nerds@singly.com>",
- "name": "fitbit",
- "description": "Fitbit Connector",
- "version": "0.0.3",
- "repository": {
- "title": "Fitbit",
- "handle": "fitbit",
- "author": "nerds",
- "update": "auto",
- "github": "https://github.com/LockerProject/Locker",
- "type": "connector",
- "static": "false",
- "url": ""
- },
- "engines": {
- "node": ">=0.4.9"
- },
- "dependencies": {},
- "devDependencies": {}
-}
View
16 Connectors/FitBit/profile.js
@@ -1,16 +0,0 @@
-/*
-*
-* Copyright (C) 2011, The Locker Project
-* All rights reserved.
-*
-* Please see the LICENSE file for more information.
-*
-*/
-
-exports.sync = require('./lib').genericSync(function(pi){
- return 'profile.json';
-}, function(pi, data){
- if(!pi.config) pi.config = {};
- pi.config.memberSince = data.user.memberSince; // used by activity
- return {profile:[data.user]};
-});
View
10 Connectors/FitBit/synclets.json
@@ -1,10 +0,0 @@
-{
- "provides":["device/fitbit", "profile/fitbit", "activity/fitbit"],
- "mongoId" : {
- "profile":"encodedId",
- "activity":"date"
- },
- "synclets":[{"name":"profile", "frequency":7200},
- {"name":"device", "frequency": 3600, "tolerance": 0},
- {"name":"activity", "frequency": 7200}]
-}
View
10 Connectors/FitBit/test.js
@@ -1,10 +0,0 @@
-var fs = require('fs');
-var auth = JSON.parse(fs.readFileSync("../../Me/fitbit/me.json")).auth;
-console.error(auth);
-
-var sync = require(process.argv[2]);
-sync.sync({auth:auth,config:{memberSince:'2011-01-01',lastSyncTime:'2011-11-11'}},function(e,js){
- console.error(e);
- console.error("got js:"+JSON.stringify(js));
-});
-
View
31 hallwayd.js
@@ -16,7 +16,9 @@ var util = require('util');
var argv = require("optimist").argv;
var Roles = {
- worker:{},
+ worker:{
+ startup:startWorkerWS
+ },
apihost:{
startup:startAPIHost
},
@@ -64,7 +66,7 @@ if (process.argv.indexOf("offline") >= 0) syncManager.manager.offlineMode = true
var shuttingDown_ = false;
-function syncComplete(response, task) {
+function syncComplete(response, task, callback) {
logger.info("Got a completion from %s", task.profile);
if(!response) logger.debug("missing response");
if(!response) response = {};
@@ -77,18 +79,18 @@ function syncComplete(response, task) {
async.series([
function(cb) { if(!response.auth) return cb(); profileManager.authSet(task.profile, response.auth, cb) },
function(cb) { if(!response.config) return cb(); profileManager.configSet(task.profile, response.config, cb) },
- function() { syncManager.manager.schedule(task, nextRun) }
- ]);
+ function(cb) { syncManager.manager.schedule(task, nextRun, cb); },
+ ], callback);
})
}
function startSyncmanager(cbDone) {
var isWorker = (role === Roles.worker);
+ if (isWorker) {
+ syncManager.manager.completed = syncComplete;
+ logger.info("Starting a worker.");
+ }
syncManager.manager.init(isWorker, function() {
- if (isWorker) {
- logger.info("Starting a worker.");
- syncManager.manager.on("completed", syncComplete);
- }
cbDone();
});
}
@@ -116,6 +118,19 @@ function startDawg(cbDone) {
});
}
+function startWorkerWS(cbDone) {
+ if (!lconfig.worker || !lconfig.worker.port) {
+ logger.error("You must specify a worker section with at least a port and password to run.");
+ shutdown(1);
+ }
+ var worker = require("worker");
+ if (!lconfig.worker.listenIP) lconfig.worker.listenIP = "0.0.0.0";
+ worker.startService(syncManager.manager, lconfig.worker.port, lconfig.worker.listenIP, function() {
+ logger.info("Starting a Hallway Worker, thou shalt be digitized",lconfig.worker);
+ cbDone();
+ });
+}
+
if (argv._.length > 0) {
if (!Roles.hasOwnProperty(argv._[0])) {
logger.error("The %s role is unknown.", argv._[0]);
View
19 lib/acl.js
@@ -110,6 +110,14 @@ exports.getApp = function(app, callback) {
});
};
+// return the full list (used by dawg)
+exports.getApps = function(callback) {
+ dal.query("SELECT app FROM Apps", [], function(err, rows) {
+ callback(err, rows);
+ });
+};
+
+
// create a new app and generate it's keys
exports.addApp = function(notes, callback) {
logger.debug("creating new app from ",notes);
@@ -139,7 +147,10 @@ exports.deleteApp = function(appId, callback) {
logger.debug("deleting app "+appId);
var q = dal.query("DELETE FROM Apps WHERE app=?", [appId], function(err) {
if (err) logger.error("query failed: ", q, err);
- callback(err);
+ var q = dal.query("DELETE FROM Accounts WHERE app=?", [appId], function(err) {
+ if (err) logger.error("query failed: ", q, err);
+ callback(err);
+ });
});
};
@@ -159,3 +170,9 @@ exports.delProfiles = function(account, callback) {
dal.query("DELETE FROM Accounts WHERE account = ?", [account], callback);
}
+// whackawhacka
+exports.delProfile = function(account, pid, callback) {
+ logger.debug("deleting account profile ",account,pid);
+ dal.query("DELETE FROM Accounts WHERE account = ? AND profile = ?", [account, pid], callback);
+}
+
View
20 lib/authManager.js
@@ -3,6 +3,7 @@ var fs = require('fs');
var querystring = require('querystring');
var request = require('request');
var async = require('async');
+var sanitizer = require('sanitizer');
var lconfig = require('lconfig');
var logger = require('logger').logger("authManager");
@@ -33,7 +34,7 @@ function startServiceAuth(service, appID, req, res) {
authModule = require(path.join('services', service, 'auth.js'));
} catch (E) {
logger.warn("can't load auth.js for "+service,E);
- return authfail(service, res, "The service named "+service+" isn't here, sorry!", 404);
+ return authfail(service, res, "Unknown service: "+sanitizer.escape(service), 404);
}
// oauth2 types redirect
@@ -217,11 +218,18 @@ myOAP.on('enforce_login', function(req, res, authorize_url, cbForm) {
logger.debug("got app ",app);
// verify redirect_uri here is superset of the configured one
- if(!app.notes || !app.notes.callbackUrl || !req.query.redirect_uri || req.query.redirect_uri.indexOf(app.notes.callbackUrl) == 0)
- {
- logger.warn("callback mismatch warning!", app.app, app.notes && app.notes.callbackUrl, req.query.redirect_uri);
- // enable this after catching/migrating existing apps!
- // return res.send("oauth redirect uri mismatch, it sucks I know, make sure '"+req.query.redirect_uri+"' matches the app settings in dev.singly.com");
+ if (!app.notes ||
+ !app.notes.callbackUrl ||
+ !req.query.redirect_uri ||
+ req.query.redirect_uri.indexOf(app.notes.callbackUrl) !== 0) {
+ logger.warn("callback mismatch warning!",
+ app.app, app.notes && app.notes.callbackUrl, req.query.redirect_uri);
+ /* return res.send(
+ "Unacceptable redirect_uri. If you are the developer, please check " +
+ "<pre>" + sanitizer.escape(req.query.redirect_uri) + "</pre>" +
+ " against your " +
+ "<a href=\"https://dev.singly.com/apps\">application settings</a>."
+ ); */
}
// if starting auth
View
13 lib/dMap.js
@@ -153,18 +153,27 @@ exports.load = function(service) {
maps.system = {
defaults: {
anubis: 'logs'
- }
+ },
+ logs: {
+ text: function(data) {
+ var keys = {};
+ // return ip as hex and paths
+ data.forEach(function(item){ keys[item.from.split('.').map(function(y){return parseInt(y).toString(16)}).join('')]=1; keys[item.path]=1; });
+ return Object.keys(keys).join(" ");
+ }
+ }
}
// TODO these should be done somewhere else or in an init function or somesuch
+exports.load('links');
exports.load('twitter');
exports.load('facebook');
exports.load('instagram');
exports.load('foursquare');
exports.load('tumblr');
exports.load('linkedin');
-exports.load('email');
exports.load('fitbit');
exports.load('gcontacts');
exports.load('github');
+
View
44 lib/dawg.js
@@ -47,9 +47,13 @@ dawg.get('/', function(req, res) {
});
dawg.get('/slag', function(req, res) {
- slag(function(err, count){
+ slag(function(err, ret){
if(err) res.json(err, 500);
- res.json(count);
+ slack(function(err, js){
+ if(err) res.json(err, 500);
+ ret.slack = js;
+ res.json(ret);
+ })
})
});
@@ -71,6 +75,24 @@ dawg.get('/links/:type', function(req, res) {
});
});
+dawg.get('/apps/active', function(req, res) {
+ var options = {limit:1};
+ options.since = parseInt(req.query.since) || (Date.now() - 86400000);
+ acl.getApps(function(err, all){
+ if(err || !all) res.json(err, 500);
+ var count = 0;
+ async.forEach(all, function(row, cb){
+ ijod.getRange('logs:'+row.app+'/anubis', options, null, function(err, cnt) {
+ if(cnt && cnt.length > 0) count++;
+ cb();
+ }, true); // true == SMOKE MONST3R
+ }, function(){
+ res.json(count);
+ })
+ });
+});
+
+
// error handling
dawg.error(function(err, req, res, next) {
if(err.stack) logger.error(err.stack);
@@ -83,16 +105,26 @@ exports.startService = function(port, ip, cb) {
});
setInterval(function() {
- slag(function(err, cnt) {
- if (!err && cnt !== undefined) instruments.gauge({slag:cnt}).send();
+ slag(function(err, res) {
+ if (!err && res !== undefined) instruments.gauge({slag:res.cnt}).send();
+ });
+ slack(function(err, res) {
+ if (!err && res !== undefined) instruments.gauge({slack:res.cnt}).send();
});
}, 60000);
};
function slag(cb)
{
- dal.query("select count(*) as cnt from SyncSchedule where nextRun < UNIX_TIMESTAMP(NOW())", [], function(err, ret){
- if (cb) cb(err, ret && ret[0] && ret[0].cnt);
+ dal.query("select count(*) as cnt, avg(UNIX_TIMESTAMP(NOW()) - (nextRun/1000)) as lag from SyncSchedule where state = 0 and nextRun < UNIX_TIMESTAMP(NOW())*1000", [], function(err, ret){
+ cb(err, ret && ret[0]);
+ })
+}
+
+function slack(cb)
+{
+ dal.query("select count(*) as cnt, avg(UNIX_TIMESTAMP(NOW()) - (nextRun/1000)) as lag from SyncSchedule where state > 0 and nextRun < UNIX_TIMESTAMP(NOW())*1000", [], function(err, ret){
+ cb(err, ret && ret[0]);
})
}
View
88 lib/ijod.js
@@ -28,18 +28,18 @@ var mmh = require("murmurhash3");
var instruments = require("instruments");
var memcachelib = require("optional-memcache");
var memcache;
+var qix = require('qix');
exports.debug = false;
exports.initDB = function(callback) {
memcache = memcachelib.memcacheClient();
- dal.bQuery(["CREATE TABLE IF NOT EXISTS ijod (idr VARCHAR(32) NOT NULL, base VARCHAR(32), path VARCHAR(255), hash VARCHAR(32), at BIGINT, offset INT, len INT, lat DECIMAL(8,5), lng DECIMAL(8,5), PRIMARY KEY(idr))"], function(err) {
- memcache.once("connect", function() {
+ dal.bQuery(["CREATE TABLE IF NOT EXISTS ijod (idr VARCHAR(32) NOT NULL, base VARCHAR(32), path VARCHAR(255), hash VARCHAR(32), at BIGINT, offset INT, len INT, lat DECIMAL(8,5), lng DECIMAL(8,5), `q0` bigint(20) unsigned DEFAULT NULL, `q1` bigint(20) unsigned DEFAULT NULL, `q2` bigint(20) unsigned DEFAULT NULL, `q3` bigint(20) unsigned DEFAULT NULL, PRIMARY KEY(idr))"], function(err) {
+ memcache.connect(function() {
logger.info("Connected to memcache");
callback(err);
});
- memcache.connect();
});
}
@@ -138,6 +138,18 @@ IJOD.prototype.addData = function(arg, callback) {
arg.id = idr.id(arg.idr);
arg.idr = idr.toString(arg.idr);
var ll = dMap.get('ll',arg.data,arg.idr) || [null,null];
+ // build our query matching
+ var q = qget(arg)
+ var qx = ['00','00','00','00'];
+ var buf = qix.buf(q);
+ if(exports.debug) logger.debug("Q",arg.idr,q,buf&&buf.toString('hex'));
+ if(buf)
+ {
+ qx[0] = buf.slice(0,8).toString('hex');
+ qx[1] = buf.slice(8,16).toString('hex');
+ qx[2] = buf.slice(16,24).toString('hex');
+ qx[3] = buf.slice(24).toString('hex');
+ }
var self = this;
this.startAddTransaction(function() {
var tmpJson = JSON.stringify(arg);
@@ -146,17 +158,18 @@ IJOD.prototype.addData = function(arg, callback) {
var offset = self.len;
self.len += gzdata.length;
memcache.replace(idr.hash(arg.idr), tmpJson, function(error, result) {
- // TODO
+ // TODO, also replace idr2 in types?
});
- self.transactionQueries.push({sql:"INSERT INTO ijod (idr, base, path, hash, at, offset, len, lat, lng) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE path=VALUES(path), hash=VALUES(hash), at=VALUES(at), offset=VALUES(offset), len=VALUES(len)", binds:[idr.hash(arg.idr), idr.baseHash(arg.idr), self.path, hash, arg.at, offset, (self.len - offset), ll[0], ll[1]]});
+ self.transactionQueries.push({sql:"INSERT INTO ijod (idr, base, path, hash, at, offset, len, lat, lng, q0, q1, q2, q3) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, x?, x?, x?, x?) ON DUPLICATE KEY UPDATE path=VALUES(path), hash=VALUES(hash), at=VALUES(at), offset=VALUES(offset), len=VALUES(len), lat=VALUES(lat), lng=VALUES(lng), q0=VALUES(q0), q1=VALUES(q1), q2=VALUES(q2), q3=VALUES(q3)", binds:[idr.hash(arg.idr), idr.baseHash(arg.idr), self.path, hash, arg.at, offset, (self.len - offset), ll[0], ll[1], qx[0], qx[1], qx[2], qx[3]]});
// if there's types, insert each of them too for filtering
if(!arg.data || !arg.types) return callback();
async.forEachSeries(Object.keys(arg.types), function(type, cb){
var i2 = idr.clone(arg.idr);
i2.protocol = type;
instruments.increment("data.types." + type).send();
if(typeof arg.types[type] == 'string') i2.hash = arg.types[type]; // also index this under an alternate data id!
- self.transactionQueries.push({sql:"INSERT INTO ijod (idr, base, path, hash, at, offset, len, lat, lng) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE path=VALUES(path), hash=VALUES(hash), at=VALUES(at), offset=VALUES(offset), len=VALUES(len)", binds:[idr.hash(i2), idr.baseHash(i2), self.path, hash, arg.at, offset, (self.len - offset), ll[0], ll[1]]});
+ if(typeof arg.types[type] == 'object' && arg.types[type].auth) i2.auth = arg.types[type].auth; // also index this with a different auth!
+ self.transactionQueries.push({sql:"INSERT INTO ijod (idr, base, path, hash, at, offset, len, lat, lng, q0, q1, q2, q3) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, x?, x?, x?, x?) ON DUPLICATE KEY UPDATE path=VALUES(path), hash=VALUES(hash), at=VALUES(at), offset=VALUES(offset), len=VALUES(len), lat=VALUES(lat), lng=VALUES(lng), q0=VALUES(q0), q1=VALUES(q1), q2=VALUES(q2), q3=VALUES(q3)", binds:[idr.hash(i2), idr.baseHash(i2), self.path, hash, arg.at, offset, (self.len - offset), ll[0], ll[1], qx[0], qx[1], qx[2], qx[3]]});
cb();
}, callback);
});
@@ -188,7 +201,9 @@ exports.getOne = function(targetIdr, callback) {
// take the raw id if given too
var hash = (typeof targetIdr == 'string' && targetIdr.indexOf(':') == -1) ? targetIdr : idr.hash(targetIdr);
memcache.get(hash, function(error, result) {
- if (error || result == "NOT_STORED" || result == null) {
+ var js;
+ try { js = JSON.parse(result[hash]) }catch(E){}
+ if (error || result == "NOT_STORED" || result == null || !js) {
var s3client = knox.createClient({
key:lconfig.s3.key,
secret:lconfig.s3.secret,
@@ -226,7 +241,7 @@ exports.getOne = function(targetIdr, callback) {
});
} else {
if (exports.debug) logger.debug("Get one in %d", (Date.now() - startTime));
- callback(null, JSON.parse(result));
+ callback(null, js);
}
});
};
@@ -238,7 +253,7 @@ exports.getOne = function(targetIdr, callback) {
*
* Results are returned in reverse chronological order.
*/
-exports.getRange = function(basePath, range, cbEach, cbDone) {
+exports.getRange = function(basePath, range, cbEach, cbDone, smoke) {
var startRangeTime = Date.now();
var s3client = knox.createClient({
key:lconfig.s3.key,
@@ -257,6 +272,13 @@ exports.getRange = function(basePath, range, cbEach, cbDone) {
sql += " AND at < ?";
binds.push(range.until);
}
+ if (range && range.box) {
+ sql += " AND lat > ? AND lat < ? AND lng > ? and lng < ?";
+ range.box.lat.sort(function(a,b){return a-b});
+ range.box.lng.sort(function(a,b){return a-b});
+ binds.push(range.box.lat[0], range.box.lat[1], range.box.lng[0], range.box.lng[1]);
+ }
+ sql += qq(range.q);
sql += " ORDER BY at DESC";
if (range.limit) {
sql += " LIMIT " + parseInt(range.limit);
@@ -271,6 +293,7 @@ exports.getRange = function(basePath, range, cbEach, cbDone) {
dal.query(sql, binds, function(error, rows) {
if (error) return cbDone(error);
if (rows.length == 0) return cbDone(null);
+ if (smoke) return cbDone(null, rows); // ugly hack added by jer, beware the smoke monster!
var curPath = "";
var ranges = [];
@@ -351,6 +374,19 @@ exports.getRange = function(basePath, range, cbEach, cbDone) {
for (var i = 0; i < results.length; ++i) {
if (!results[i]) continue;
for (var j = 0; j < results[i].length; ++j) {
+ // make sure q matches, must be same as above (TODO cleanup as part of refactor)
+ if(range.q)
+ {
+ var q = qix.chunk(qget(results[i][j]));
+ var parts = qix.chunk(range.q);
+ var matches = 0;
+ parts.forEach(function(part){if(q.indexOf(part) >= 0) matches++ });
+ if(matches != parts.length)
+ {
+ logger.warn("couldn't find ",parts.join(','),"in",q.join(','));
+ continue;
+ }
+ }
cbEach(results[i][j]);
}
}
@@ -397,7 +433,7 @@ exports.batchSmartAdd = function(entries, callback) {
entry.hash = hash;
}
}
- ij.addData(entry, cb);
+ ij.addData(entry, function() { async.nextTick(cb); });
}, function(error) {
if (error) {
ij.abortAddTransaction(function() {
@@ -426,6 +462,13 @@ exports.getBounds = function(basePath, range, cbDone) {
sql += " AND at < ?";
binds.push(range.until);
}
+ if (range && range.box) {
+ sql += " AND lat > ? AND lat < ? AND lng > ? and lng < ?";
+ range.box.lat.sort(function(a,b){return a-b});
+ range.box.lng.sort(function(a,b){return a-b});
+ binds.push(range.box.lat[0], range.box.lat[1], range.box.lng[0], range.box.lng[1]);
+ }
+ sql += qq(range.q);
sql += " ORDER BY at DESC";
if (range.limit) {
sql += " LIMIT " + parseInt(range.limit);
@@ -441,6 +484,26 @@ exports.getBounds = function(basePath, range, cbDone) {
});
}
+function qq(q)
+{
+ if(!q) return "";
+ var buf = qix.buf(q);
+ if(!buf) return "";
+ var ret = "";
+ for(var i = 0; i < 4; i++)
+ {
+ var hex = (i < 3) ? buf.slice(i*8,(i*8)+8).toString('hex') : buf.slice(24).toString('hex');
+ ret += " AND q"+i+" & x'"+hex+"' = x'"+hex+"'";
+ }
+ return ret;
+}
+
+function qget(entry)
+{
+ var oe = dMap.get('oembed', entry.data, entry.idr) || {};
+ return [entry.q, oe.url, oe.title, oe.author_name, dMap.get('text', entry.data, entry.idr)].join(" "); // get all queryable strings
+}
+
// Takes a complete changeset breaks it down by base and saves it to S3
exports.pump = function(arg, cbDone) {
if(!arg || !Array.isArray(arg)) return cbDone(new Error("arg is missing or invalid: "+JSON.stringify(arg)));
@@ -457,7 +520,10 @@ exports.pump = function(arg, cbDone) {
// do each clustering
async.forEach(Object.keys(bases), function(base, cb){
- exports.batchSmartAdd(bases[base], cb);
+ exports.batchSmartAdd(bases[base], function(error) {
+ if (error) return cb(error);
+ async.nextTick(cb);
+ });
}, function(error){
if (exports.debug) logger.debug("pump done",Object.keys(bases),error);
cbDone(error ? error : null, error ? null : arg);
View
3 lib/lconfig.js
@@ -49,11 +49,14 @@ exports.load = function(filepath) {
exports.memcache = config.memcache;
exports.statsd = config.statsd;
exports.dawg = config.dawg;
+ exports.worker = config.worker;
+ if(!exports.worker) exports.worker = {port:8041};
exports.stats = config.stats || {};
exports.database = config.database || {};
if(!exports.database.maxConnections) exports.database.maxConnections = 10;
exports.alerting = config.alerting;
exports.s3 = config.s3 || {key:"x", secret:"X", bucket:"X"};
+ exports.syncManager = config.syncManager || {};
exports.authSecrets = config.authSecrets || {crypt:'foo', sign:'bar'}; // these need to be required to be set in prod, trusted cookies use them during auth
exports.cookieExpire = config.cookieExpire || (60 * 60 * 24 * 30); // default 30 days
if (exports.stats.prefix) {
View
32 lib/logger.js
@@ -164,7 +164,8 @@ Logger.prototype.anubis = function(req, js, type)
if(!req._authsome) return self.warn("anubis isn't authsome");
// fill in log entry
if(!js) js = {};
- js.pid = req._authsome.account+'@'+req._authsome.app;
+ js.act = req._authsome.account;
+ js.app = req._authsome.app;
js.at = Date.now();
js.type = type||'log'; // sanity
js.path = req.url;
@@ -178,27 +179,30 @@ Logger.prototype.anubis = function(req, js, type)
var reap = setInterval(reaper, 10000);
function reaper()
{
+ var ijod = require('ijod');
if(tomb.length == 0) return;
var doom = tomb;
tomb = [];
module.exports.debug("reaping",doom.length);
var bundle = {};
var types = {};
+ // munge the raw list into batch groupings per account@app
doom.forEach(function(js){
- if(!bundle[js.pid]) bundle[js.pid] = [];
- bundle[js.pid].push(js);
- // things that are non the default, make them more findable
- if(js.type != 'log') {
- if(!types[js.pid]) types[js.pid] = {};
- types[js.pid][js.type] = true;
- }
- delete js.pid; // surp
+ var key = [js.act, js.app].join('@');
+ if(!bundle[key]) bundle[key] = [];
+ bundle[key].push(js);
+ delete js.app; // don't need to store this, present in idr
});
- var ijod = require('ijod');
- Object.keys(bundle).forEach(function(pid){
- var entry = {data:bundle[pid], at:Date.now()};
- entry.idr = 'logs:'+pid+'/anubis#'+entry.at;
- if(types[pid]) entry.types = types[pid];
+ Object.keys(bundle).forEach(function(key){
+ var parts = key.split('@');
+ var act = parts[0];
+ var app = parts[1];
+ var entry = {data:bundle[key], at:Date.now()};
+ // TODO also add other types if the set contains more than log? entry.types needs refactoring first
+ // idr is global to app by default
+ entry.idr = 'logs:'+app+'/anubis#'+act+'.'+entry.at;
+ // also change the base to include the account so it's getRange'able that way (bit of a hack :/)
+ entry.types = {logs:{auth:act}};
ijod.batchSmartAdd([entry], function(err){
if(err) module.exports.error("anubis bsa",err);
})
View
8 lib/optional-memcache.js
@@ -5,8 +5,8 @@ var util = require("util");
function DisabledMemcache() {
}
util.inherits(DisabledMemcache, events.EventEmitter);
-DisabledMemcache.prototype.connect = function() {
- this.emit("connect");
+DisabledMemcache.prototype.connect = function(cb) {
+ cb(null);
};
DisabledMemcache.prototype.get = function(key, cb) {
cb(null, "NOT_STORED");
@@ -20,8 +20,8 @@ DisabledMemcache.prototype.set = function(key, value, cb) {
exports.memcacheClient = function() {
if (lconfig.memcache && lconfig.memcache.host && lconfig.memcache.port) {
- var memcacheLib = require("memcache");
- return new memcacheLib.Client(lconfig.memcache.port, lconfig.memcache.host);
+ var mc = require("mc");
+ return new mc.Client(lconfig.memcache.host + ":" + lconfig.memcache.port);
} else {
return new DisabledMemcache();
}
View
90 lib/qix.js
@@ -0,0 +1,90 @@
+var mmh = require('murmurhash3');
+var natural = require('natural');
+var unfreq = "lcumwfgypbvkjxqz"; // least popular letters
+// stopwords from mysql, stemmed
+var stop = {"abl":1,"about":1,"abov":1,"according":1,"accordingli":1,"across":1,"actual":1,"after":1,"afterward":1,"again":1,"against":1,"ain":1,"all":1,"allow":1,"almost":1,"alon":1,"along":1,"alreadi":1,"also":1,"although":1,"alwai":1,"among":1,"amongst":1,"and":1,"anoth":1,"ani":1,"anybodi":1,"anyhow":1,"anyon":1,"anything":1,"anywai":1,"anywher":1,"apart":1,"appear":1,"appreci":1,"appropri":1,"aren":1,"around":1,"asid":1,"ask":1,"asking":1,"associated":1,"avail":1,"awai":1,"awfulli":1,"becam":1,"becaus":1,"becom":1,"been":1,"befor":1,"beforehand":1,"behind":1,"believ":1,"below":1,"besid":1,"best":1,"better":1,"between":1,"beyond":1,"both":1,"brief":1,"but":1,"mon":1,"came":1,"can":1,"cannot":1,"cant":1,"caus":1,"certain":1,"certainli":1,"chang":1,"clearli":1,"com":1,"come":1,"concern":1,"consequ":1,"consid":1,"contain":1,"correspond":1,"could":1,"couldn":1,"cours":1,"current":1,"definit":1,"describ":1,"despit":1,"did":1,"didn":1,"differ":1,"doe":1,"doesn":1,"don":1,"done":1,"down":1,"downward":1,"dure":1,"each":1,"edu":1,"eight":1,"either":1,"els":1,"elsewher":1,"enough":1,"entir":1,"especi":1,"etc":1,"even":1,"ever":1,"everi":1,"everybodi":1,"everyon":1,"everything":1,"everywher":1,"exactli":1,"exampl":1,"except":1,"far":1,"few":1,"fifth":1,"first":1,"five":1,"follow":1,"for":1,"former":1,"formerli":1,"forth":1,"four":1,"from":1,"further":1,"furthermor":1,"get":1,"given":1,"give":1,"goe":1,"gone":1,"got":1,"gotten":1,"greet":1,"had":1,"hadn":1,"happen":1,"hardli":1,"hasn":1,"have":1,"haven":1,"hello":1,"help":1,"henc":1,"her":1,"here":1,"hereaft":1,"herebi":1,"herein":1,"hereupon":1,"herself":1,"him":1,"himself":1,"hither":1,"hopefulli":1,"how":1,"howbeit":1,"howev":1,"ignored":1,"immedi":1,"inasmuch":1,"inc":1,"inde":1,"indic":1,"indicated":1,"inner":1,"insofar":1,"instead":1,"into":1,"inward":1,"isn":1,"itself":1,"just":1,"keep":1,"kept":1,"know":1,"known":1,"last":1,"late":1,"later":1,"latter":1,"latterli":1,"least":1,"less":1,"lest":1,"let":1,"like":1,"littl":1,"look":1,"ltd":1,"mainli":1,"mani":1,"mai":1,"mayb":1,"mean":1,"meanwhil":1,"mere":1,"might":1,"more":1,"moreov":1,"most":1,"mostli":1,"much":1,"must":1,"myself":1,"name":1,"near":1,"nearli":1,"necessari":1,"need":1,"neither":1,"never":1,"nevertheless":1,"new":1,"next":1,"nine":1,"nobodi":1,"non":1,"none":1,"noon":1,"nor":1,"normal":1,"not":1,"noth":1,"novel":1,"now":1,"nowher":1,"obvious":1,"off":1,"often":1,"okai":1,"old":1,"onc":1,"onli":1,"onto":1,"other":1,"otherwis":1,"ought":1,"our":1,"ourselv":1,"out":1,"outsid":1,"over":1,"overal":1,"own":1,"particular":1,"particularli":1,"per":1,"perhap":1,"place":1,"pleas":1,"plu":1,"possibl":1,"presum":1,"probabl":1,"provid":1,"que":1,"quit":1,"rather":1,"realli":1,"reason":1,"regard":1,"regardless":1,"rel":1,"respect":1,"right":1,"said":1,"same":1,"saw":1,"sai":1,"second":1,"secondli":1,"see":1,"seem":1,"seen":1,"self":1,"selv":1,"sensibl":1,"sent":1,"seriou":1,"serious":1,"seven":1,"sever":1,"shall":1,"she":1,"should":1,"shouldn":1,"sinc":1,"six":1,"some":1,"somebodi":1,"somehow":1,"someon":1,"someth":1,"sometim":1,"somewhat":1,"somewher":1,"soon":1,"sorri":1,"specifi":1,"still":1,"sub":1,"such":1,"sup":1,"sure":1,"take":1,"taken":1,"tell":1,"tend":1,"than":1,"thank":1,"thanx":1,"that":1,"the":1,"their":1,"them":1,"themselv":1,"then":1,"thenc":1,"there":1,"thereaft":1,"therebi":1,"therefor":1,"therein":1,"thereupon":1,"these":1,"thei":1,"think":1,"third":1,"thi":1,"thorough":1,"thoroughli":1,"those":1,"though":1,"three":1,"through":1,"throughout":1,"thru":1,"thu":1,"togeth":1,"too":1,"took":1,"toward":1,"tri":1,"truli":1,"try":1,"trying":1,"twice":1,"two":1,"under":1,"unfortun":1,"unless":1,"unlik":1,"until":1,"unto":1,"upon":1,"used":1,"using":1,"usual":1,"valu":1,"variou":1,"veri":1,"via":1,"viz":1,"want":1,"wasn":1,"wai":1,"welcom":1,"well":1,"went":1,"were":1,"weren":1,"what":1,"whatev":1,"when":1,"whenc":1,"whenev":1,"where":1,"whereaft":1,"wherea":1,"wherebi":1,"wherein":1,"whereupon":1,"wherev":1,"whether":1,"which":1,"while":1,"whither":1,"who":1,"whoever":1,"whole":1,"whom":1,"whose":1,"why":1,"will":1,"wish":1,"with":1,"within":1,"without":1,"won":1,"wonder":1,"would":1,"wouldn":1,"yet":1,"you":1,"your":1,"yourself":1,"yourselv":1,"zero":1,"http":1}; // plus some!
+
+// this qix system builds a micro index of fixed 28 bytes out of any string
+// works best with smaller strings
+exports.buf = function(str)
+{
+ var buf = new Buffer(Array(32));
+ if(!str) return buf;
+ var arr = defl8(tok(str));
+ if(!arr || arr.length == 0) return buf;
+ arr.forEach(function(str){
+ shifty(str, buf);
+ });
+ return buf;
+}
+
+exports.chunk = function(str)
+{
+ return defl8(tok(str));
+}
+
+// micro index a string into a fixed buffer by shifting bits
+function shifty(str, buf)
+{
+ var mask = 0x1;
+ var sum = 0;
+ for(var i=0; i < str.length; i++)
+ {
+ var code = str.charCodeAt(i);
+ code = (code > 96 && code < 123) ? code - 97 : 26; // alpha preserved, rest punctuated, to get our micro index
+ buf[code] |= mask; // flip the bit at this offset for this character
+ mask <<= 1; // move to the next bit
+ sum += code;
+ }
+ // if a smaller word, throw in terminator
+ if(str.length != 8) buf[27+(code % 4)] |= mask;
+ return buf;
+}
+
+// make sure everything in the array is 3+ <8 chars, anything longer compress it
+function defl8(arr)
+{
+ var ret = [];
+ for(var i=0;i < arr.length; i++)
+ {
+ var str = arr[i].toLowerCase();
+ str = natural.PorterStemmer.stem(str);
+ if(str.length <= 2) continue;
+ if(stop[str]) continue;
+ if(str.length <= 8 && str.split(/\D+/).length > 1) { ret.push(str); continue } // smaller w/ some alpha in them
+ // rest hash'd
+ var x = mmh.murmur32HexSync(str);
+ var ss = ""
+ // use only the first 8 nibbles of the hash and make as unique a key as possible translating into the unpopular character ranges
+ for(var j=0; j < 8; j++)
+ {
+ ss += unfreq.substr(parseInt(x.substr(j,1),16),1);
+ }
+ ret.push(ss);
+ }
+ return ret;
+}
+
+// simple tokenizer around all normal punctuation stuff
+function tok(str)
+{
+ var ret = [];
+ var chunk = ""
+ for(var i=0;i<str.length;i++)
+ {
+ var code = str.charCodeAt(i);
+ if( // we're allowing alphanumerics and all unicode/higher chars as string sequences
+ (code > 47 && code < 58) // 0-9
+ || (code > 64 && code < 91) // A-Z
+ || (code > 96 && code < 123) // a-z
+ || (code > 127 && (code < 8192 || code > 8303)) // utf8 non-punctuation
+ ) {
+ chunk += str.substr(i,1);
+ }else{
+ if(chunk.length > 0) ret.push(chunk);
+ chunk = "";
+ }
+ }
+ if(chunk.length > 0) ret.push(chunk);
+ return ret
+}
View
6 lib/services/facebook/feed.js
@@ -10,7 +10,7 @@
var fb = require('./lib.js');
exports.sync = function(pi, cb) {
- var arg = {id:"me",type:"feed",limit:100,accessToken:pi.auth.accessToken};
+ var arg = {id:"me",type:"feed",limit:200,accessToken:pi.auth.accessToken};
var since=0;
if (!pi.config) pi.config = {};
if (pi.config.feedSince) since = arg.since = pi.config.feedSince;
@@ -25,9 +25,11 @@ exports.sync = function(pi, cb) {
resp.data[base] = js.data;
resp.config.feedSince = since;
// if we got full limit and we're paging through, always use that
- if(js.data.length == arg.limit && js.paging && js.paging.next) {
+ if(js.data.length != 0 && js.paging && js.paging.next) {
resp.config.feedNext = js.paging.next;
resp.config.nextRun = -1;
+ }else{
+ resp.config.feedNext = false;
}
cb(err, resp);
});
View
6 lib/services/facebook/home.js
@@ -11,7 +11,7 @@ var fb = require('./lib.js');
var async = require('async');
exports.sync = function(pi, cb) {
- var arg = {id:"me",type:"home",limit:100,accessToken:pi.auth.accessToken};
+ var arg = {id:"me",type:"home",limit:200,accessToken:pi.auth.accessToken};
var since=0;
if (pi.config && pi.config.homeSince) since = arg.since = pi.config.homeSince;
if (pi.config && pi.config.homeNext) arg.page = pi.config.homeNext; // if we're paging the first time
@@ -34,9 +34,11 @@ exports.sync = function(pi, cb) {
}, function(){
resp.config.homeSince = since;
// if we got full limit and we're paging through, always use that
- if (js.data.length == arg.limit && js.paging && js.paging.next) {
+ if (js.data.length != 0 && js.paging && js.paging.next) {
resp.config.homeNext = js.paging.next;
resp.config.nextRun = -1;
+ }else{
+ resp.config.homeNext = false;
}
cb(err, resp);
});
View
4 lib/services/facebook/map.js
@@ -9,10 +9,12 @@ exports.contact = {
var ret = {type:'contact'};
ret.url = data.link;
ret.title = data.name;
+ if(data.bio) ret.description = data.bio;
ret.thumbnail_url = 'https://graph.facebook.com/' + data.id + '/picture?type=large';
ret.provider_name = 'facebook';
return ret;
- }
+ },
+ text: 'bio'
};
exports.post = {
View
20 lib/services/facebook/proxy.js
@@ -0,0 +1,20 @@
+var url = require('url');
+var request = require('request');
+
+exports.proxy = function(auth, req, res)
+{
+ var uri = url.parse('https://graph.facebook.com'+req.url);
+ uri.query = req.query;
+ uri.query.access_token = auth.accessToken;
+ // trying to mirror everything needed from orig req
+ var arg = {method:req.method};
+ arg.uri = url.format(uri);
+ if(req.headers['content-type'])
+ { // post or put only?
+ req.headers = {'content-type':req.headers['content-type']};
+ arg.body = req.body;
+ }
+ arg.json = true;
+// console.error(arg);
+ request(arg).pipe(res);
+}
View
28 lib/services/fitbit/activities.js
@@ -7,29 +7,5 @@
*
*/
-exports.sync = require('./lib').genericSync(function(pi){
- if(!pi.config) pi.config = {};
- if(!pi.config.memberSince || !pi.config.lastSyncTime) return false;
- if(!pi.config.activeNext) pi.config.activeNext = (new Date(pi.config.memberSince).getTime()); // convert to epoch format
- if((pi.config.activeNext > new Date(pi.config.lastSyncTime).getTime())) return false; // don't run ahead of last sync
- return 'activities/date/'+format(pi.config.activeNext)+'.json';
-}, function(pi, data, cb){
- if(!data || !data.summary) return cb();
- data.id = format(pi.config.activeNext); // stub in an id based on the date
- data.at = pi.config.activeNext; // also fill in
- var next = pi.config.activeNext + (3600*1000*24); // next run get next day
- if(next < (new Date(pi.config.lastSyncTime).getTime())){
- pi.config.activeNext = next; // don't move forward past last sync time!
- if(pi.config.activeNext < Date.now()) pi.config.nextRun = -1; // force run again
- }
- var base = 'activity:'+pi.auth.pid+'/activities';
- var ret = {};
- ret[base] = [data];
- cb(null, {config:pi.config, data:ret})
-});
-
-function format(epoch)
-{
- d = new Date(epoch);
- return ""+d.getFullYear()+'-'+((d.getMonth() < 9 ? '0' : '') + (d.getMonth() + 1))+'-'+((d.getDate() < 10 ? '0' : '') + d.getDate());
-}
+exports.sync =
+ require('./lib').dailySync('activities', 'summary', 'activity', 'activities');
View
28 lib/services/fitbit/devices.js
@@ -1,19 +1,19 @@
/*
-*
-* Copyright (C) 2011, The Locker Project
-* All rights reserved.
-*
-* Please see the LICENSE file for more information.
-*
-*/
+ *
+ * Copyright (C) 2011, The Locker Project
+ * All rights reserved.
+ *
+ * Please see the LICENSE file for more information.
+ *
+ */
exports.sync = require('./lib').genericSync(function(pi){
- return 'devices.json';
+ return 'devices.json';
}, function(pi, data, cb){
- if(!Array.isArray(data) || data.length == 0) return cb();
- pi.config.lastSyncTime = data[0].lastSyncTime;
- var base = 'device:'+pi.auth.pid+'/devices';
- var ret = {};
- ret[base] = data;
- cb(null, {config:pi.config, data:ret})
+ if(!Array.isArray(data) || data.length === 0) return cb();
+ pi.config.lastSyncTime = data[0].lastSyncTime;
+ var base = 'device:'+pi.auth.pid+'/devices';
+ var ret = {};
+ ret[base] = data;
+ cb(null, {config:pi.config, data:ret});
});
View
11 lib/services/fitbit/fat.js
@@ -0,0 +1,11 @@
+/*
+*
+* Copyright (C) 2011, The Locker Project
+* All rights reserved.
+*
+* Please see the LICENSE file for more information.
+*
+*/
+
+exports.sync =
+ require('./lib').dailySync('body/log/fat', 'fat', 'fat', 'fat');
View
28 lib/services/fitbit/lib.js
@@ -24,3 +24,31 @@ exports.genericSync = function(pather, cbDone) {
});
};
};
+
+exports.dailySync = function(apiPath, item, idrType, idrPath) {
+ return this.genericSync(function(pi) {
+ if(!pi.config) pi.config = {};
+ if(!pi.config.memberSince || !pi.config.lastSyncTime) return false;
+ if(!pi.config.activeNext) pi.config.activeNext = (new Date(pi.config.memberSince).getTime()); // convert to epoch format
+ if((pi.config.activeNext > new Date(pi.config.lastSyncTime).getTime())) return false; // don't run ahead of last sync
+ return apiPath + '/date/'+format(pi.config.activeNext)+'.json';
+ }, function(pi, data, cb) {
+ if(!data || !data[item]) return cb();
+ data.id = format(pi.config.activeNext); // stub in an id based on the date
+ data.at = pi.config.activeNext; // also fill in
+ var next = pi.config.activeNext + (3600*1000*24); // next run get next day
+ if(next < (new Date(pi.config.lastSyncTime).getTime())){
+ pi.config.activeNext = next; // don't move forward past last sync time!
+ if(pi.config.activeNext < Date.now()) pi.config.nextRun = -1; // force run again
+ }
+ var base = idrType + ':' + pi.auth.pid + '/' + idrPath;
+ var ret = {};
+ ret[base] = [data];
+ cb(null, {config:pi.config, data:ret});
+ });
+};
+
+function format(epoch) {
+ d = new Date(epoch);
+ return ""+d.getFullYear()+'-'+((d.getMonth() < 9 ? '0' : '') + (d.getMonth() + 1))+'-'+((d.getDate() < 10 ? '0' : '') + d.getDate());
+}
View
19 lib/services/fitbit/map.js
@@ -1,15 +1,20 @@
exports.device = {
- at: function(data) { return (new Date(data.lastSyncTime)).getTime() }
-}
+ at: function(data) {
+ return (new Date(data.lastSyncTime)).getTime();
+ }
+};
exports.profile = {
id: 'encodedId',
photo: 'avatar'
-}
+};
exports.defaults = {
- devices: 'device',
- activities: 'activity',
- self: 'profile'
-}
+ self : 'profile',
+ activities : 'activity',
+ devices : 'device',
+ fat : 'fat',
+ sleep : 'sleep',
+ weight : 'weight'
+};
View
10 lib/services/fitbit/sleep.js
@@ -0,0 +1,10 @@
+/*
+*
+* Copyright (C) 2011, The Locker Project
+* All rights reserved.
+*
+* Please see the LICENSE file for more information.
+*
+*/
+
+exports.sync = require('./lib').dailySync('sleep', 'summary', 'sleep', 'sleep');
View
13 lib/services/fitbit/synclets.json
@@ -1,7 +1,12 @@
{
"name":"Fitbit",
- "desc":"Syncs devices and all activities.",
- "synclets":[{"name":"self", "frequency":7200},
- {"name":"devices", "frequency": 3600},
- {"name":"activities", "frequency": 7200}]
+ "desc":"Syncs devices, activity, sleep and weight/bodyfat",
+ "synclets":[
+ {"name":"self", "frequency": 7200},
+ {"name":"activities", "frequency": 7200},
+ {"name":"devices", "frequency": 3600},
+ {"name":"fat", "frequency": 7200},
+ {"name":"sleep", "frequency": 7200},
+ {"name":"weight", "frequency": 7200}
+ ]
}
View
11 lib/services/fitbit/weight.js
@@ -0,0 +1,11 @@
+/*
+*
+* Copyright (C) 2011, The Locker Project
+* All rights reserved.
+*
+* Please see the LICENSE file for more information.
+*
+*/
+
+exports.sync =
+ require('./lib').dailySync('body/log/weight', 'weight', 'weight', 'weight');
View
19 lib/services/foursquare/proxy.js
@@ -0,0 +1,19 @@
+var url = require('url');
+var request = require('request');
+
+exports.proxy = function(auth, req, res)
+{
+ var uri = url.parse('https://api.foursquare.com/v2'+req.url);
+ uri.query = req.query;
+ uri.query.oauth_token = auth.accessToken;
+ // trying to mirror everything needed from orig req
+ var arg = {method:req.method};
+ arg.uri = url.format(uri);
+ if(req.headers['content-type'])
+ { // post or put only?
+ req.headers = {'content-type':req.headers['content-type']};
+ arg.body = req.body;
+ }
+ arg.json = true;
+ request(arg).pipe(res);
+}
View
19 lib/services/github/proxy.js
@@ -0,0 +1,19 @@
+var url = require('url');
+var request = require('request');
+
+exports.proxy = function(auth, req, res)
+{
+ var uri = url.parse('https://api.github.com'+req.url);
+ uri.query = req.query;
+ uri.query.access_token = auth.accessToken;
+ // trying to mirror everything needed from orig req
+ var arg = {method:req.method};
+ arg.uri = url.format(uri);
+ if(req.headers['content-type'])
+ { // post or put only?
+ req.headers = {'content-type':req.headers['content-type']};
+ arg.body = req.body;
+ }
+ arg.json = true;
+ request(arg).pipe(res);
+}
View
19 lib/services/instagram/proxy.js
@@ -0,0 +1,19 @@
+var url = require('url');
+var request = require('request');
+
+exports.proxy = function(auth, req, res)
+{
+ var uri = url.parse('https://api.instagram.com/v1'+req.url);
+ uri.query = req.query;
+ uri.query.access_token = auth.accessToken;
+ // trying to mirror everything needed from orig req
+ var arg = {method:req.method};
+ arg.uri = url.format(uri);
+ if(req.headers['content-type'])
+ { // post or put only?
+ req.headers = {'content-type':req.headers['content-type']};
+ arg.body = req.body;
+ }
+ arg.json = true;
+ request(arg).pipe(res);
+}
View
39 lib/services/linkedin/proxy.js
@@ -0,0 +1,39 @@
+var querystring = require('querystring');
+
+exports.proxy = function(auth, req, res)
+{
+ var OAlib = require('oauth').OAuth;
+ var OA = new OAlib(null, null, auth.consumerKey, auth.consumerSecret, '1.0', null, 'HMAC-SHA1', null, {'Accept': '*/*', 'Connection': 'close'});
+ var url = 'http://api.linkedin.com/v1'+req.url;
+ if (req.method.toUpperCase() === 'GET') {
+ return OA.get(
+ url + '?' + querystring.stringify(req.query)
+ , auth.token
+ , auth.tokenSecret
+ , requestCallback(res)
+ );
+ } else if (req.method.toUpperCase() === 'POST') {
+ return CLIENT.oauth.post(
+ url
+ , auth.token
+ , auth.tokenSecret
+ , req.query
+ , 'application/json; charset=UTF-8'
+ , requestCallback(res)
+ );
+ }
+ res.send("unsupported",500);
+}
+
+function requestCallback(res) {
+ return function (error, data, response) {
+ if (error) return res.send(error, 500);
+ var js;
+ try {
+ js = JSON.parse(data);
+ } catch (exc) {
+ return res.send(exc, 500);
+ }
+ res.send(js);
+ };
+}
View
4 lib/services/links/map.js
@@ -0,0 +1,4 @@
+exports.oembed = {
+ oembed: function(data) { return data },
+ text: 'description'
+}
View
5 lib/services/links/oembed.js
@@ -29,7 +29,7 @@ exports.pump = function(cset, callback) {
{
// first time this is created, insert it!
if(data && !did[task.url]) {
- logger.debug("saving",task.url,data.type);
+ //logger.debug("saving",task.url,data.type);
did[task.url] = data;
var entry = {idr:task.idr, at:Date.now(), data:data, types:{}};
entry.types[data.type] = true; // need alias saved that is used by original entry
@@ -44,6 +44,7 @@ exports.pump = function(cset, callback) {
task.entries.forEach(function(entry){
if(!entry.refs) entry.refs = {};
entry.refs[typed] = task.url;
+ entry.q = [entry.q,data.title].join(" "); // neato
});
}
// worker queue to lookup/save urls
@@ -71,7 +72,7 @@ exports.pump = function(cset, callback) {
if(typeof data.type != 'string') data.type = 'link';
if(typeof data.url != 'string') data.url = task.url;
saver(task, data);
- cb()
+ async.nextTick(cb);
});
});
}, 10);
View
3 lib/services/links/resolve.js
@@ -49,7 +49,7 @@ exports.pump = function(cset, callback) {
if(arg.err) logger.warn("link resolving warning",arg.url,arg.err);
if(typeof arg.url != 'string') arg.url = task.url; // use original if expansion failed
saver(task, arg.url);
- cb()
+ async.nextTick(cb);
});
}, 10);
q.drain = function(){ callback(null, cset) };
@@ -180,6 +180,7 @@ var APIs = {
if (res.statusCode === 301 || res.statusCode === 302 || res.statusCode === 307)
{
// re-basing like a browser would, yes sam, this happens
+ if(!res.headers.location) { args.err = 'missing location header on a 3xx'; return callback(args); }
var newup = urllib.parse(urllib.resolve(args.urlp,urllib.parse(res.headers.location)));
// if the url is unparseable, bail out
if (!newup || !newup.pathname) return callback(args);
View
9 lib/services/tumblr/proxy.js
@@ -0,0 +1,9 @@
+exports.proxy = function(auth, req, res)
+{
+ var tc = require(__dirname+'/tumblr_client.js')(auth.consumerKey, auth.consumerSecret);
+ req.query.token = auth.token;
+ var p = tc.apiCall(req.method, req.url, req.query, function(err, js){
+ if(err) return res.json(err, 500);
+ res.json(js);
+ });
+}
View
5 lib/services/twitter/map.js
@@ -16,7 +16,8 @@ exports.contact = {
if(data.profile_image_url_https) ret.thumbnail_url = data.profile_image_url_https.replace('_normal','');
ret.provider_name = 'twitter';
return ret;
- }
+ },
+ text: 'description'
};
exports.tweet = {
@@ -84,7 +85,7 @@ exports.tweet = {
urls = Object.keys(urls);
return urls.length > 0 ? urls : undefined;
},
- text: 'data.text'
+ text: 'text'
};
exports.related = {
View
9 lib/services/twitter/proxy.js
@@ -0,0 +1,9 @@
+exports.proxy = function(auth, req, res)
+{
+ var tc = require(__dirname+'/twitter_client.js')(auth.consumerKey, auth.consumerSecret);
+ req.query.token = auth.token;
+ var p = tc.apiCall(req.method, req.url, req.query, function(err, js){
+ if(err) return res.json(err, 500);
+ res.json(js);
+ });
+}
View
127 lib/syncManager.js
@@ -1,26 +1,25 @@
var fs = require('fs');
var path = require('path');
var async = require('async');
-var EventEmitter = require('events').EventEmitter;
+var lconfig = require('lconfig');
var util = require('util');
var logger = require("logger.js").logger("syncManager");
var profileManager = require('profileManager');
var dal = require("dal");
var instruments = require("instruments");
-// Load these from a config
-var PAGING_TIMING = 2000; // 2s gap in paging
-var NUM_WORKERS = 4;
-var DEFAULT_SCAN_TIME = 5000;
+var NUM_WORKERS = lconfig.syncManager.numWorkers || 4;
+var PAGING_TIMING = lconfig.syncManager.pagingTiming || 2000;
+var DEFAULT_SCAN_TIME = lconfig.syncManager.defaultScanTime || 5000;
// TODO TECH-DEBT: the task object might be pointless now, need to rethink it some
var syncletManager;
exports.debug = false;
-/**
-* The database has a state field for the SyncSchedule table. This field represents
+/**
+* The database has a state field for the SyncSchedule table. This field represents
* where the task is in the process of being ran. The states are:
*
* 0 - Pending a run, normal waiting
@@ -30,12 +29,10 @@ exports.debug = false;
*/
function SyncletManager()
{
- EventEmitter.call(this);
-
this.scheduled = {};
this.offlineMode = false;
+ this.completed = undefined;
}
-util.inherits(SyncletManager, EventEmitter);
// Async init
SyncletManager.prototype.init = function(liveWorker, callback) {
var self = this;
@@ -51,10 +48,18 @@ SyncletManager.prototype.init = function(liveWorker, callback) {
// If we're not a worker and just talking to workers, we can bail early
if (!self.liveWorker) return callback();
- self.workerName = process.env['WORKER'] || require("os").hostname();;
+ self.workerName = process.env.WORKER || require("os").hostname();
this.scanTimeout = undefined;
- self.workQueue = async.queue(function(task, callback) { self.runTask(task, callback); }, NUM_WORKERS);
+ this.active = {};
+ self.workQueue = async.queue(function(task, callback) {
+ var key = Math.random().toString(16).substr(2);
+ self.active[key] = task;
+ self.runTask(task, function(){
+ delete self.active[key];
+ callback();
+ });
+ }, NUM_WORKERS);
self.workQueue.drain = function() { self.scanAndRun(); };
// any jobs that were running when shutdown, clean up on start
@@ -65,6 +70,18 @@ SyncletManager.prototype.init = function(liveWorker, callback) {
});
});
};
+
+// admin things used by worker web service
+SyncletManager.prototype.backlog = function() {
+ return this.workQueue.length();
+}
+SyncletManager.prototype.active = function() {
+ var ret = [];
+ var self = this;
+ Object.keys(self.active).forEach(function(key){ret.push(self.active[key])});
+ return ret;
+}
+
SyncletManager.prototype.loadSynclets = function() {
// not defensively coded! load synclets
var self = this;
@@ -93,7 +110,6 @@ SyncletManager.prototype.loadSynclets = function() {
synclets('foursquare');
synclets('tumblr');
synclets('linkedin');
- synclets('email');
synclets('fitbit');
synclets('gcontacts');
synclets('github');
@@ -102,7 +118,7 @@ SyncletManager.prototype.loadSynclets = function() {
// just return the list of services as loaded from disk
SyncletManager.prototype.getServices = function(callback) {
callback(null, this.services);
-}
+};
/// Schedule a synclet to run
/**
@@ -117,35 +133,40 @@ SyncletManager.prototype.getServices = function(callback) {
*
* timeToRun: milliseconds from epoch to run the task
*/
-SyncletManager.prototype.schedule = function(task, timeToRun) {
+SyncletManager.prototype.schedule = function(task, timeToRun, cbDone) {
+ if (!cbDone) cbDone = function() {};
if (exports.debug) logger.debug("scheduling "+JSON.stringify(task)+" at "+timeToRun);
if (!this.synclets[task.synclet.connector] || !this.synclets[task.synclet.connector][task.synclet.name]) {
- logger.error("Attempted to schedule an unregistered synclet: " + task.synclet.connector + "-" + task.synclet.name);
- return;
+ var E = new Error("Attempted to schedule an unregistered synclet: " + task.synclet.connector + "-" + task.synclet.name);
+ logger.error(E);
+ return cbDone(E);
}
var syncletInfo = this.synclets[task.synclet.connector][task.synclet.name];
if (!syncletInfo.frequency) {
- logger.error("Attempted to schedule a run only synclet");
- return;
+ var E = new Error("Attempted to schedule a run only synclet");
+ logger.error(E);
+ return cbDone(E);
}
// In offline mode things may only be ran directly with runTask
- if (this.offlineMode) return;
+ if (this.offlineMode) return cbDone();
var self = this;
var key = this.getKey(task);
var sql = "SELECT * FROM SyncSchedule WHERE `key`=? AND state > 0 AND worker != NULL";
dal.query(sql, [self.getKey(task)], function(error, rows) {
if (error) {
- logger.error("Error trying to find a key in the schdule: " + error);
- return;
+ var E = new Error("Error trying to find a key in the schdule: " + error);
+ logger.error(E);
+ return cbDone(E);
}
if (rows && rows.length == 1 && rows[0].state !== 0 && rows[0].state !== 3) {
- logger.error("Attempted to reschedule a synclet while it is running");
- return;
+ var E = new Error("Attempted to reschedule a synclet while it is running");
+ logger.error(E);
+ return cbDone(E);
}
if (timeToRun === undefined || timeToRun === null || timeToRun <= 0) {
@@ -161,8 +182,10 @@ SyncletManager.prototype.schedule = function(task, timeToRun) {
if (error) {
logger.error("Failed to schedule " + key);
// TODO
+ return cbDone("Failed to schedule " + key);
}
if (self.liveWorker) self.scanAndRun();
+ cbDone();
});
});
};
@@ -187,12 +210,12 @@ SyncletManager.prototype.scanAndRun = function() {
self.scanTimeout = setTimeout(function() { self.scanAndRun(); }, timeout || DEFAULT_SCAN_TIME);
}
var ranRows = 0;
- var sql = "UPDATE SyncSchedule SET worker=? WHERE nextRun <= UNIX_TIMESTAMP()*1000 AND worker IS NULL ORDER BY nextRun LIMIT 4";
+ var limit = (NUM_WORKERS * 2) - self.workQueue.length();
+ if (limit <= 0) return;
+ var sql = "UPDATE SyncSchedule SET worker=? WHERE nextRun <= UNIX_TIMESTAMP()*1000 AND worker IS NULL ORDER BY nextRun LIMIT " + limit;
dal.query(sql, [self.workerName], function(error, rows) {
- if (error) {
- logger.error(error);
- };
- sql = "SELECT * FROM SyncSchedule WHERE worker=? AND state=0";
+ if (error) logger.error(error);
+ sql = "SELECT * FROM SyncSchedule WHERE worker=? AND state=0";
dal.query(sql, [self.workerName], function(error, rows) {
if (error) {
logger.error("There was an error trying to scanAndRun",error);
@@ -215,8 +238,8 @@ SyncletManager.prototype.scanAndRun = function() {
logger.error("Error getting a nextRun time: " + error);
return setNextScan();
}
- var scanTimeout = undefined;
- if (rows.length == 1 && rows[0].nextRun) scanTimeout = rows[0].nextRun - Date.now();
+ var scanTimeout;
+ if (rows.length === 1 && rows[0].nextRun) scanTimeout = rows[0].nextRun - Date.now();
if (exports.debug) logger.debug("Setting next run timeout to %d - %j", scanTimeout, rows[0]);
setNextScan(scanTimeout);
});
@@ -252,49 +275,47 @@ SyncletManager.prototype.runTask = function(task, callback) {
if (!err) {
instruments.increment("synclet.successful").send();
dal.query("UPDATE SyncSchedule SET errorCount=0,lastError=NULL WHERE `key`=?", [self.getKey(task)], function(error) {
- self.emit("completed", response, task);
+ if (self.completed) self.completed(response, task, callback);
});
} else {
instruments.increment("synclet.error."+task.synclet.connector+"."+task.synclet.name).send();
var estr = (typeof err == 'string') ? err : util.inspect(err).replace(/\s+/g," ");
dal.query("UPDATE SyncSchedule SET worker=NULL,errorCount=errorCount+1, lastError=? WHERE `key`=?", [estr.substr(0, 255), self.getKey(task)], function(error) {
self.schedule(task);
+ callback(err);
});
}
- callback(err);
- });
+ });
}
// Don't reschdule, it's never going to work, drop it and assume they will reschedule
if (!this.synclets[task.synclet.connector] || !this.synclets[task.synclet.connector][task.synclet.name]) {
- logger.error("Attempted to run an unregistered synclet: " + task.synclet.connector + "-" + task.synclet.name);
- return;
+ var E = new Error("Attempted to run an unregistered synclet: " + task.synclet.connector + "-" + task.synclet.name);
+ logger.error(E);
+ return callback(E);
}
logger.verbose("Synclet starting " + this.getKey(task));
var tstart = Date.now();
-
var syncletInfo = this.synclets[task.synclet.connector][task.synclet.name];
var runInfo = {};
- var self = this;
// load up the current auth/config data and prep to run a task
async.series([
function(cb) { profileManager.allGet(task.profile, function(err, ret){ runInfo = ret; cb(); }); },
- function(cb) { self.updateState(self.getKey(task), 2, cb); },
- function() {
- if(!runInfo.auth) {
- logger.error("no auth found, skipping "+JSON.stringify(task));
- return callback(new Error("no auth found, skipping"));
- }
- if(!runInfo.config) runInfo.config = {};
- // in case something in the synclet barfs...
- instruments.increment("synclet.run").send();
- try {
- syncletInfo.sync(runInfo, cbDone);
- } catch(E) {
- cbDone(E); // this should never be a double-callback!
- }
+ function(cb) { self.updateState(self.getKey(task), 2, cb); }
+ ], function() {
+ if(!runInfo.auth) {
+ logger.error("no auth found, skipping "+JSON.stringify(task));
+ return callback(new Error("no auth found, skipping"));
}
- ]);
+ if(!runInfo.config) runInfo.config = {};
+ // in case something in the synclet barfs...
+ instruments.increment("synclet.run").send();
+ try {
+ syncletInfo.sync(runInfo, cbDone);
+ } catch(E) {
+ cbDone(E); // this should never be a double-callback!
+ }
+ });
};
// This trivial helper function just makes sure we're consistent and we can change it easly
SyncletManager.prototype.getKey = function(task) {
View
88 lib/webservice.js
@@ -11,6 +11,7 @@ var express = require('express');
var connect = require('connect');
var logger = require('logger').logger("webservice");
var async = require('async');
+var path = require('path');
var crypto = require('crypto');
var urllib = require('url');
var authManager = require('authManager');
@@ -71,7 +72,7 @@ locker.get('/auth/:id/auth', function(req, res) {
logger.warn('missing cookie for fallback auth',req.params.id);
return res.send("handshake failed, missing cookie, spilled milk!",500);
}
- console.error("here",req.cookies['auth'+req.params.id]);
+ logger.debug("authauth here",req.cookies['auth'+req.params.id]);
authManager.authIsAuth(req.params.id, req.cookies['auth'+req.params.id], req, res);
});
@@ -154,20 +155,24 @@ locker.get('/profiles', function(req, res) {
});
});
-// return convenient list of all profiles auth'd for this account
-locker.post('/profiles/delete', function(req, res) {
+// a way to make changes to profiles, just delete for now
+locker.post('/profiles', function(req, res) {
var account = req._authsome.account;
if(!account) return res.json('no account', 404);
- if(typeof req.body == 'string') try {
- req.body = JSON.parse(req.body);
- } catch (E) {
- logger.error("couldn't parse /profiles/delete body", req.body);
- return res.json(false);
+ if(!req.query.delete) return res.json('no delete= in the query string', 404);
+
+ logger.info("deleting account profiles for "+account,req.query.delete,req._authsome.profiles);
+ // delete all if the id is the account
+ if(req.query.delete === account)
+ {
+ acl.delProfiles(account, function(err, rows){
+ if(err) logger.error(err);
+ logger.anubis(req);
+ res.json(true);
+ });
+ return;
}
- if(req.body !== true) return res.json(false);
-
- logger.info("deleting account profiles for "+account,req._authsome.profiles);
- acl.delProfiles(account, function(err){
+ acl.delProfile(account, req.query.delete, function(err, rows){
if(err) logger.error(err);
logger.anubis(req);
res.json(true);
@@ -241,6 +246,7 @@ locker.get('/types/:type', function(req, res) {
if(maxd < options.limit) options.limit = maxd; // override to enforce an upper bound
}
if(options.limit < 5) options.limit = 5; // rough minimum to avoid odd edge cases
+ doNear(req, options);
logger.debug("TYPE",type,options,bases);
// get the oldest at
var oldest;
@@ -280,7 +286,7 @@ locker.get('/types/:type', function(req, res) {
if(type == 'statuses' || type == 'statuses_feed' || (item.types && item.types.status))
{
var text = (item.map && item.map.text) ? item.map.text : dMap.get('text', item.data, item.idr);
- if(!text) return; // bail if none!
+ if(!text) return logger.warn("missing text for ",item.idr); // bail if none!
item.oembed = {type:'text', text:text};
}
// if no oembed yet or the one we have isn't the right type, find any ref based oembed and expand them
@@ -502,8 +508,9 @@ locker.get('/services/:serviceName/:serviceEndpoint', function(req, res) {
var profiles = req._authsome.profiles;
var pid;
profiles.forEach(function(item) {
- if(item.profile.indexOf(service) > 0) pid = item.profile;
+ if(item.profile.indexOf('@'+service) > 0) pid = item.profile;
});
+ if(service == req._authsome.app) pid = req._authsome.account+'@'+req._authsome.app;
if(!pid) return res.json('missing profile for '+service, 404);
// construct the base, get the default type for this endpoint
var type = req.query['type'] || dMap.defaults(service, req.params.serviceEndpoint);
@@ -512,9 +519,11 @@ locker.get('/services/:serviceName/:serviceEndpoint', function(req, res) {
var options = {};
if(req.query['offset']) options.offset = parseInt(req.query['offset']) || 0;
options.limit = parseInt(req.query['limit'] || 20);
+ doNear(req, options);
+ options.q = req.query.q;
var written = 0;
// write out the return array progressively, pseudo-streaming
- console.error('getRange '+base+' '+JSON.stringify(options));
+ logger.debug('getRange '+base+' '+JSON.stringify(options));
var skips = {};
res.writeHead(200, {'Content-Type': 'application/json; charset=utf-8'});
res.write('[');
@@ -542,8 +551,9 @@ locker.get('/services/:serviceName/:serviceEndpoint/:id', function(req, res) {
var profiles = req._authsome.profiles;
var pid;
profiles.forEach(function(item) {
- if(item.profile.indexOf(service) > 0) pid = item.profile;
+ if(item.profile.indexOf('@'+service) > 0) pid = item.profile;
});
+ if(service == req._authsome.app) pid = req._authsome.account+'@'+req._authsome.app;
var type = dMap.defaults(service, req.params.serviceEndpoint);
if(!pid || !type) return res.json('missing profile for '+service, 404);
// construct the base, get the default type for this endpoint
@@ -579,8 +589,9 @@ locker.get('/services/:serviceName', function(req, res) {
var profiles = req._authsome.profiles;
var pid;
profiles.forEach(function(item) {
- if(item.profile.indexOf(service) > 0) pid = item.profile;
+ if(item.profile.indexOf('@'+service) > 0) pid = item.profile;
});
+ if(service == req._authsome.app) pid = req._authsome.account+'@'+req._authsome.app;
if(!pid) return res.json('missing profile for '+service, 404);
var ret = {};
async.forEach(dMap.bases([pid]),function(base, cb){
@@ -596,7 +607,7 @@ locker.get('/services/:serviceName', function(req, res) {
// Get a system-wide id uniquely
locker.get('/id/:id', function(req, res) {
- id = req.params.id || req.url.substr(1);
+ var id = req.params.id || req.url.substr(1);
logger.debug("fetching "+id);
if(id && id.indexOf('_') > 0) id = id.substr(0,id.indexOf('_')); // for future use, the second part used for sharding hints, possible validation, etc
ijod.getOne(id, function(err, entry) {
@@ -615,6 +626,31 @@ locker.get('/id/:id', function(req, res) {
});
});
+// generic proxy-authed-to-service util
+locker.all('/proxy/:service/*', function(req, res) {
+ var service = req.params.service;
+ var pid;
+ req._authsome.profiles.forEach(function(item) {
+ if(item.profile.indexOf(service) > 0) pid = item.profile;
+ });
+ if(!pid) return res.json('missing profile for '+service, 404);
+ req.url = '/'+req.params[0];
+ delete req.query['access_token'];
+ logger.debug("proxy fetching "+service,req.url,req.query);
+ profileManager.authGet(pid, function(err, auth){
+ if(err || !auth) return res.json("missing stored auth info", 404);
+ var proxy;
+ try {
+ proxy = require(path.join('services', service, 'proxy.js'));
+ } catch (E) {
+ console.error(E);
+ return res.json('no proxy for this service',404);
+ }
+ logger.anubis(req);
+ proxy.proxy(auth, req, res);
+ });
+});
+
// force a synclet to run, mostly internal dev util
locker.get('/services/:serviceName/:serviceEndpoint/run', function(req, res) {
var service = req.params.serviceName;
@@ -626,7 +662,7 @@ locker.get('/services/:serviceName/:serviceEndpoint/run', function(req, res) {