Skip to content

Commit

Permalink
Added paging via naive limit / skip mechanism
Browse files Browse the repository at this point in the history
  • Loading branch information
craigedmunds committed Jul 1, 2014
1 parent b97d3d2 commit 4a725de
Show file tree
Hide file tree
Showing 3 changed files with 47 additions and 1 deletion.
7 changes: 7 additions & 0 deletions lib/adapters/mongodb.js
Original file line number Diff line number Diff line change
Expand Up @@ -325,6 +325,12 @@ adapter.findMany = function(model, query, projection) {
projection.limit = projection.limit || model.schema.options.defaultLimit || 1000;
projection.select = projection.select || '';

if (projection.page && projection.page > 0) {
projection.skip = (projection.page - 1) * projection.pageSize;
console.log("skip", projection.skip);
projection.limit = projection.pageSize;
}

//Ensure business id is included to selection
var pkNotRequested = false;
if (_.isArray(projection.select)){
Expand All @@ -343,6 +349,7 @@ adapter.findMany = function(model, query, projection) {
.select(projection.select)
.sort(projection.sort)
.lean(true)
.skip(projection.skip)
.exec(function(error, resources) {
if(error) {
return reject(error);
Expand Down
13 changes: 12 additions & 1 deletion lib/route.js
Original file line number Diff line number Diff line change
Expand Up @@ -309,10 +309,21 @@ function route(name, model, resources, inflect) {
projection.limit = req.query.limit;
}

if (req.query.sort){
if (req.query.sort){
projection.sort = req.query.sort;
}

if (req.query.page){
projection.page = req.query.page;

if (req.query.pageSize){
projection.pageSize = req.query.pageSize;
}
else {
projection.pageSize = 10;
}
}

//run beforeRead
beforeReadHook({}, req, res)
.then(function(){
Expand Down
28 changes: 28 additions & 0 deletions test/fortune/fields_and_filters.js
Original file line number Diff line number Diff line change
Expand Up @@ -328,5 +328,33 @@ module.exports = function(options){
});
});
});

describe('paging', function(){
it('should be possible to get page 1', function(done){
request(baseUrl).get('/people?sort=name&page=1&pageSize=2')
.expect(200)
.end(function(err, res){
should.not.exist(err);
var body = JSON.parse(res.text);
// console.log(body);
(body.people.length).should.equal(2);
_.pluck(body.people, "name").should.eql(["Dilbert", "Robert"]);
done();
});
});

it('should be possible to get page 2', function(done){
request(baseUrl).get('/people?sort=name&page=2&pageSize=2')
.expect(200)
.end(function(err, res){
should.not.exist(err);
var body = JSON.parse(res.text);
// console.log(body);
(body.people.length).should.equal(2);
_.pluck(body.people, "name").should.eql(["Sally", "Wally"]);
done();
});
});
});
});
};

1 comment on commit 4a725de

@cgrossde
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using page and pageSize as query parameters on the storage backend is not a good practice. Consider using a combination of limit and offset and add a meta entry total so the client can handle pagination.

Please sign in to comment.