Skip to content

Commit

Permalink
docs(NODE-4775): improve documentation on gridfs start and end options (
Browse files Browse the repository at this point in the history
  • Loading branch information
nbbeeken committed Nov 7, 2022
1 parent 528449d commit bf7a132
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 43 deletions.
9 changes: 7 additions & 2 deletions src/gridfs/download.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,14 @@ import type { GridFSChunk } from './upload';
export interface GridFSBucketReadStreamOptions {
sort?: Sort;
skip?: number;
/** 0-based offset in bytes to start streaming from */
/**
* 0-indexed non-negative byte offset from the beginning of the file
*/
start?: number;
/** 0-based offset in bytes to stop streaming before */
/**
* 0-indexed non-negative byte offset to the end of the file contents
* to be returned by the stream. `end` is non-inclusive
*/
end?: number;
}

Expand Down
67 changes: 26 additions & 41 deletions test/integration/gridfs/gridfs_stream.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -1002,57 +1002,42 @@ describe('GridFS Stream', function () {
});
});

/**
* Provide start and end parameters for file download to skip ahead x bytes and limit the total amount of bytes read to n
*
* @example-class GridFSBucket
* @example-method openDownloadStream
*/
it('NODE-829 start/end options for openDownloadStream where start-end is < size of chunk', {
it('should return only end - start bytes when the end is within a chunk', {
metadata: { requires: { topology: ['single'] } },

test(done) {
const configuration = this.configuration;
const client = configuration.newClient(configuration.writeConcernMax(), { maxPoolSize: 1 });
client.connect(function (err, client) {
const db = client.db(configuration.db);
const bucket = new GridFSBucket(db, {
bucketName: 'gridfsdownload',
chunkSizeBytes: 20
});
// Provide start and end parameters for file download to skip
// ahead x bytes and limit the total amount of bytes read to n
const db = client.db();

const readStream = fs.createReadStream('./LICENSE.md');
const uploadStream = bucket.openUploadStream('teststart.dat');
const start = 1;
const end = 6;

uploadStream.once('finish', function () {
const downloadStream = bucket
.openDownloadStreamByName('teststart.dat', { start: 1 })
.end(6);
const bucket = new GridFSBucket(db, {
bucketName: 'gridfsdownload',
chunkSizeBytes: 20
});

downloadStream.on('error', function (error) {
expect(error).to.not.exist;
});
const readStream = fs.createReadStream('./LICENSE.md');
const uploadStream = bucket.openUploadStream('teststart.dat');

let gotData = 0;
let str = '';
downloadStream.on('data', function (data) {
++gotData;
str += data.toString('utf8');
});
uploadStream.once('finish', function () {
const downloadStream = bucket.openDownloadStreamByName('teststart.dat', { start }).end(end);

downloadStream.on('end', function () {
// Depending on different versions of node, we may get
// different amounts of 'data' events. node 0.10 gives 2,
// node >= 0.12 gives 3. Either is correct, but we just
// care that we got between 1 and 3, and got the right result
expect(gotData >= 1 && gotData <= 3).to.equal(true);
expect(str).to.equal('pache');
client.close(done);
});
downloadStream.on('error', done);

let str = '';
downloadStream.on('data', function (data) {
str += data.toString('utf8');
});

readStream.pipe(uploadStream);
downloadStream.on('end', function () {
expect(str).to.equal('pache');
expect(str).to.have.lengthOf(end - start);
client.close(done);
});
});

readStream.pipe(uploadStream);
}
});

Expand Down

0 comments on commit bf7a132

Please sign in to comment.