Skip to content

Commit

Permalink
Resolved chagnes
Browse files Browse the repository at this point in the history
  • Loading branch information
benrudolph committed Mar 30, 2013
2 parents 8a13a7a + 22b5ac1 commit 163e954
Show file tree
Hide file tree
Showing 23 changed files with 665 additions and 467 deletions.
3 changes: 2 additions & 1 deletion CONTRIBUTING.md
Expand Up @@ -10,6 +10,7 @@ Guide to Contributions
* Looking for something to work on? look for bugs marked [goodfirstbug](https://github.com/daleharvey/pouchdb/issues?labels=goodfirstbug&page=1&state=open)
* We follow [Felix's Node.js Style Guide](http://nodeguide.com/style.html)
* Almost all Pull Requests for features or bug fixes will need tests (seriously, its really important)
* Before opening a pull request run `$ grunt test` to lint test the changes and run node tests. Preferably run the browser tests as well.
* Commit messages should follow the following style:

```
Expand Down Expand Up @@ -57,7 +58,7 @@ Run single test file `test.basics.js` with:

### Browser Tests

$ grunt browser
$ grunt browser
# Now visit http://127.0.0.1:8000/tests/test.html in your browser
# add ?testFiles=test.basics.js to run single test file

Expand Down
17 changes: 11 additions & 6 deletions Gruntfile.js
Expand Up @@ -29,8 +29,8 @@ var browserConfig = [{
}, {
browserName: 'firefox',
version: '19',
platform: 'Windows 2003',
name: 'win2003/firefox'
platform: 'Linux',
name: 'linux/firefox'
// }, {
// browserName: 'opera',
// version: '12',
Expand Down Expand Up @@ -215,7 +215,7 @@ module.exports = function(grunt) {
}
},
'publish-results': {
server: 'http://pouchdb.iriscouch.com',
server: 'http://couchdb.pouchdb.com',
db: 'test_results'
}
});
Expand Down Expand Up @@ -278,11 +278,16 @@ module.exports = function(grunt) {
grunt.loadNpmTasks('grunt-contrib-watch');

grunt.registerTask("build", ["concat:amd", "concat:all" , "uglify:dist"]);
grunt.registerTask("browser", ["connect", "cors-server", "forever"])
grunt.registerTask("testSetup", ["jshint", "build", "connect", "cors-server"]);
grunt.registerTask("test", ["testSetup", "node-qunit" ,"saucelabs-qunit", "publish-results"]);
grunt.registerTask("browser", ["connect", "cors-server", "forever"]);
grunt.registerTask("full", ["concat", "uglify"]);

grunt.registerTask("spatial", ["concat:spatial", "uglify:spatial"]);
grunt.registerTask("gql", ["concat:gql", "uglify:gql"]);

grunt.registerTask("test", ["jshint", "node-qunit"]);
grunt.registerTask("test-travis", ["jshint", "build", "connect", "cors-server",
"node-qunit", "saucelabs-qunit",
"publish-results"]);

grunt.registerTask('default', 'build');
};
9 changes: 9 additions & 0 deletions docs/api.md
Expand Up @@ -25,6 +25,7 @@ Most of the Pouch API is exposed as `fun(arg, [options], [callback])` Where both
* [Get database information](#get_database_information)
* [Listen to database changes](#listen_to_database_changes)
* [Replicate a database](#replicate_a_database)
* [Compact the database](#compact_the_database)
* [Get document revision diffs](#document_revisions_diff)

## List all databases
Expand Down Expand Up @@ -499,6 +500,14 @@ Replicate one database to another.
//
})

## Compact the database

db.compact([opts], [callback])

Runs compaction of the database. Fires callback when compaction is done. If you use http adapter and have specified callback Pouch will ping the remote database in regular intervals unless the compaction is finished.

* `options.interval`: Number of milliseconds Pouch waits before asking again if compaction is already done. Only for http adapter.

## Document Revisions Diff

db.revsDiff(diff, [callback])
Expand Down
2 changes: 1 addition & 1 deletion package.json
Expand Up @@ -69,6 +69,6 @@
}
],
"scripts": {
"test": "grunt test"
"test": "grunt test-travis"
}
}
22 changes: 20 additions & 2 deletions src/adapters/pouch.http.js
Expand Up @@ -209,16 +209,34 @@ var HttpPouch = function(opts, callback) {

// Sends a POST request to the host calling the couchdb _compact function
// version: The version of CouchDB it is running
api.compact = function(callback) {
api.compact = function(opts, callback) {
if (!api.taskqueue.ready()) {
api.taskqueue.addTask('compact', arguments);
return;
}
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
ajax({
auth: host.auth,
url: genDBUrl(host, '_compact'),
method: 'POST'
}, callback);
}, function() {
function ping() {
api.info(function(err, res) {
if (!res.compact_running) {
call(callback, null);
} else {
setTimeout(ping, opts.interval || 200);
}
});
}
// Ping the http if it's finished compaction
if (typeof callback === "function") {
ping();
}
});
};

// Calls GET on the host, which gets back a JSON string containing
Expand Down
108 changes: 54 additions & 54 deletions src/adapters/pouch.idb.js
@@ -1,6 +1,5 @@
/*globals call: false, extend: false, parseDoc: false, Crypto: false */
/*globals isLocalId: false, isDeleted: false, collectConflicts: false */
/*globals collectLeaves: false, Changes: false, filterChange: false */
/*globals isLocalId: false, isDeleted: false, Changes: false, filterChange: false */

'use strict';

Expand Down Expand Up @@ -78,18 +77,26 @@ var IdbPouch = function(opts, callback) {
console.log(name + ': Open Database');
}

// TODO: before we release, make sure we write upgrade needed
// in a way that supports a future upgrade path
req.onupgradeneeded = function(e) {
var db = e.target.result;
var currentVersion = e.oldVersion;
while (currentVersion !== e.newVersion) {
if (currentVersion === 0) {
createSchema(db);
}
currentVersion++;
}
};

function createSchema(db) {
db.createObjectStore(DOC_STORE, {keyPath : 'id'})
.createIndex('seq', 'seq', {unique: true});
db.createObjectStore(BY_SEQ_STORE, {autoIncrement : true})
.createIndex('_rev', '_rev', {unique: true});
.createIndex('_doc_id_rev', '_doc_id_rev', {unique: true});
db.createObjectStore(ATTACH_STORE, {keyPath: 'digest'});
db.createObjectStore(META_STORE, {keyPath: 'id', autoIncrement: false});
db.createObjectStore(DETECT_BLOB_SUPPORT_STORE);
};
}

req.onsuccess = function(e) {

Expand Down Expand Up @@ -160,20 +167,13 @@ var IdbPouch = function(opts, callback) {
};

api._bulkDocs = function idb_bulkDocs(req, opts, callback) {

var newEdits = opts.new_edits;
var userDocs = extend(true, [], req.docs);
var userDocs = req.docs;

// Parse the docs, give them a sequence number for the result
var docInfos = userDocs.map(function(doc, i) {
var newDoc = parseDoc(doc, newEdits);
newDoc._bulk_seq = i;
if (doc._deleted) {
if (!newDoc.metadata.deletions) {
newDoc.metadata.deletions = {};
}
newDoc.metadata.deletions[doc._rev.split('-')[1]] = true;
}
return newDoc;
});

Expand Down Expand Up @@ -348,6 +348,7 @@ var IdbPouch = function(opts, callback) {
}

function finish() {
docInfo.data._doc_id_rev = docInfo.data._id + "::" + docInfo.data._rev;
var dataReq = txn.objectStore(BY_SEQ_STORE).put(docInfo.data);
dataReq.onsuccess = function(e) {
if (Pouch.DEBUG) {
Expand All @@ -370,12 +371,10 @@ var IdbPouch = function(opts, callback) {
}

function updateDoc(oldDoc, docInfo) {
docInfo.metadata.deletions = extend(docInfo.metadata.deletions, oldDoc.deletions);

var merged = Pouch.merge(oldDoc.rev_tree, docInfo.metadata.rev_tree[0], 1000);

var inConflict = (isDeleted(oldDoc) && isDeleted(docInfo.metadata)) ||
(!isDeleted(oldDoc) && newEdits && merged.conflicts !== 'new_leaf');
var wasPreviouslyDeleted = isDeleted(oldDoc);
var inConflict = (wasPreviouslyDeleted && isDeleted(docInfo.metadata)) ||
(!wasPreviouslyDeleted && newEdits && merged.conflicts !== 'new_leaf');

if (inConflict) {
results.push(makeErr(Pouch.Errors.REV_CONFLICT, docInfo._bulk_seq));
Expand Down Expand Up @@ -404,28 +403,18 @@ var IdbPouch = function(opts, callback) {
function saveAttachment(docInfo, digest, data, callback) {
var objectStore = txn.objectStore(ATTACH_STORE);
var getReq = objectStore.get(digest).onsuccess = function(e) {
var originalRefs = e.target.result && e.target.result.refs || {};
var ref = [docInfo.metadata.id, docInfo.metadata.rev].join('@');
var newAtt = {digest: digest, body: data};

if (e.target.result) {
if (e.target.result.refs) {
// only update references if this attachment already has them
// since we cannot migrate old style attachments here without
// doing a full db scan for references
newAtt.refs = e.target.result.refs;
newAtt.refs[ref] = true;
}
} else {
newAtt.refs = {};
newAtt.refs[ref] = true;
}

var newAtt = {
digest: digest,
body: data,
refs: originalRefs
};
newAtt.refs[ref] = true;
var putReq = objectStore.put(newAtt).onsuccess = function(e) {
call(callback);
};
putReq.onerror = putReq.ontimeout = idbError(callback);
};
getReq.onerror = getReq.ontimeout = idbError(callback);
}

var txn;
Expand All @@ -438,7 +427,6 @@ var IdbPouch = function(opts, callback) {

processDocs();
});

};

function sortByBulkSeq(a, b) {
Expand Down Expand Up @@ -467,16 +455,19 @@ var IdbPouch = function(opts, callback) {
return;
}
if (isDeleted(metadata) && !opts.rev) {
result = extend({}, Pouch.Errors.MISSING_DOC, {reason:"deleted"});
result = Pouch.error(Pouch.Errors.MISSING_DOC, "deleted");
return;
}

var rev = Pouch.merge.winningRev(metadata);
var key = opts.rev ? opts.rev : rev;
var index = txn.objectStore(BY_SEQ_STORE).index('_rev');
var key = metadata.id + '::' + (opts.rev ? opts.rev : rev);
var index = txn.objectStore(BY_SEQ_STORE).index('_doc_id_rev');

index.get(key).onsuccess = function(e) {
var doc = e.target.result;
if(doc && doc._doc_id_rev) {
delete(doc._doc_id_rev);
}
if (!doc) {
result = Pouch.Errors.MISSING_DOC;
return;
Expand Down Expand Up @@ -622,8 +613,12 @@ var IdbPouch = function(opts, callback) {
if (opts.include_docs) {
doc.doc = data;
doc.doc._rev = Pouch.merge.winningRev(metadata);
if (doc.doc._doc_id_rev) {
delete(doc.doc._doc_id_rev);
}
if (opts.conflicts) {
doc.doc._conflicts = collectConflicts(metadata);
doc.doc._conflicts = Pouch.merge.collectConflicts(metadata)
.map(function(x) { return x.id; });
}
}
if ('keys' in opts) {
Expand All @@ -645,9 +640,10 @@ var IdbPouch = function(opts, callback) {
if (!opts.include_docs) {
allDocsInner(metadata);
} else {
var index = transaction.objectStore(BY_SEQ_STORE).index('_rev');
var index = transaction.objectStore(BY_SEQ_STORE).index('_doc_id_rev');
var mainRev = Pouch.merge.winningRev(metadata);
index.get(mainRev).onsuccess = function(event) {
var key = metadata.id + "::" + mainRev;
index.get(key).onsuccess = function(event) {
allDocsInner(cursor.value, event.target.result);
};
}
Expand Down Expand Up @@ -687,8 +683,6 @@ var IdbPouch = function(opts, callback) {
console.log(name + ': Start Changes Feed: continuous=' + opts.continuous);
}

opts = extend(true, {}, opts);

if (!opts.since) {
opts.since = 0;
}
Expand Down Expand Up @@ -758,9 +752,11 @@ var IdbPouch = function(opts, callback) {
var cursor = event.target.result;

// Try to pre-emptively dedup to save us a bunch of idb calls
var changeId = cursor.value._id, changeIdIndex = resultIndices[changeId];
var changeId = cursor.value._id;
var changeIdIndex = resultIndices[changeId];
if (changeIdIndex !== undefined) {
results[changeIdIndex].seq = cursor.key; // update so it has the later sequence number
results[changeIdIndex].seq = cursor.key;
// update so it has the later sequence number
results.push(results[changeIdIndex]);
results[changeIdIndex] = null;
resultIndices[changeId] = results.length - 1;
Expand All @@ -775,25 +771,28 @@ var IdbPouch = function(opts, callback) {
}

var mainRev = Pouch.merge.winningRev(metadata);
var index = txn.objectStore(BY_SEQ_STORE).index('_rev');
index.get(mainRev).onsuccess = function(docevent) {
var key = metadata.id + "::" + mainRev;
var index = txn.objectStore(BY_SEQ_STORE).index('_doc_id_rev');
index.get(key).onsuccess = function(docevent) {
var doc = docevent.target.result;
var changeList = [{rev: mainRev}];
if (opts.style === 'all_docs') {
// console.log('all docs', changeList, collectLeaves(metadata.rev_tree));
changeList = collectLeaves(metadata.rev_tree);
changeList = Pouch.merge.collectLeaves(metadata.rev_tree)
.map(function(x) { return {rev: x.rev}; });
}
var change = {
id: metadata.id,
seq: cursor.key,
changes: changeList,
doc: doc
};

if (isDeleted(metadata, mainRev)) {
change.deleted = true;
}
if (opts.conflicts) {
change.doc._conflicts = collectConflicts(metadata);
change.doc._conflicts = Pouch.merge.collectConflicts(metadata)
.map(function(x) { return x.id; });
}

// Dedupe the changes feed
Expand Down Expand Up @@ -846,9 +845,10 @@ var IdbPouch = function(opts, callback) {

api._removeDocRevisions = function(docId, revs, callback) {
var txn = idb.transaction([BY_SEQ_STORE], IDBTransaction.READ_WRITE);
var index = txn.objectStore(BY_SEQ_STORE).index('_rev');
var index = txn.objectStore(BY_SEQ_STORE).index('_doc_id_rev');
revs.forEach(function(rev) {
index.getKey(rev).onsuccess = function(e) {
var key = docId + "::" + rev;
index.getKey(key).onsuccess = function(e) {
var seq = e.target.result;
if (!seq) {
return;
Expand Down

0 comments on commit 163e954

Please sign in to comment.