Skip to content

Commit

Permalink
Merge 6c3afea into 892b774
Browse files Browse the repository at this point in the history
  • Loading branch information
mitjap committed May 27, 2021
2 parents 892b774 + 6c3afea commit a97c354
Show file tree
Hide file tree
Showing 3 changed files with 112 additions and 39 deletions.
2 changes: 1 addition & 1 deletion README.md
Expand Up @@ -7,7 +7,7 @@

tus is a new open protocol for resumable uploads built on HTTP. This is the [tus protocol 1.0.0](http://tus.io/protocols/resumable-upload.html) node.js server implementation.

> :warning: **Attention:** We currently lack the resources to properly maintain tus-node-server. This has the unfortunate consequence that this project is in rather bad condition (out-dated dependencies, no tests for the S3 storage, no resumable uploads for the GCS storage etc). If you want to help us with tus-node-server, we are more than happy to assist you and welcome new contributors. In the meantime, we can recommend [tusd](https://github.com/tus/tusd) as a reliable and production-tested tus server. Of course, you can use tus-node-server if it serves your purpose.
> :warning: **Attention:** We currently lack the resources to properly maintain tus-node-server. This has the unfortunate consequence that this project is in rather bad condition (out-dated dependencies, no tests for the S3 storage etc). If you want to help us with tus-node-server, we are more than happy to assist you and welcome new contributors. In the meantime, we can recommend [tusd](https://github.com/tus/tusd) as a reliable and production-tested tus server. Of course, you can use tus-node-server if it serves your purpose.
## Installation

Expand Down
35 changes: 27 additions & 8 deletions lib/stores/GCSDataStore.js
Expand Up @@ -131,6 +131,8 @@ class GCSDataStore extends DataStore {
return new Promise((resolve, reject) => {
const file = this.bucket.file(file_id);

const destination = data.size === 0 ? file : this.bucket.file(`${file_id}_patch`);

const options = {
offset,
metadata: {
Expand All @@ -143,28 +145,45 @@ class GCSDataStore extends DataStore {
},
};

const write_stream = file.createWriteStream(options);
const write_stream = destination.createWriteStream(options);
if (!write_stream) {
return reject(ERRORS.FILE_WRITE_ERROR);
}

let new_offset = 0;
let new_offset = data.size;
req.on('data', (buffer) => {
new_offset += buffer.length;
});

write_stream.on('finish', () => {
write_stream.on('finish', async() => {
log(`${new_offset} bytes written`);

if (data.upload_length === new_offset) {
this.emit(EVENTS.EVENT_UPLOAD_COMPLETE, { file });
}
try {
if (file !== destination) {
await this.bucket.combine([file, destination], file);
await Promise.all([file.setMetadata(options.metadata), destination.delete({ ignoreNotFound: true })]);
}

resolve(new_offset);
if (data.upload_length === new_offset) {
this.emit(EVENTS.EVENT_UPLOAD_COMPLETE, { file });
}

resolve(new_offset);
}
catch (e) {
log(e);
reject(ERRORS.FILE_WRITE_ERROR);
}
});

write_stream.on('error', (e) => {
write_stream.on('error', async(e) => {
log(e);
try {
await destination.delete({ ignoreNotFound: true });
} catch {
// ignore error
}

reject(ERRORS.FILE_WRITE_ERROR);
});

Expand Down
114 changes: 84 additions & 30 deletions test/Test-GCSDataStore.js
Expand Up @@ -5,6 +5,7 @@ const should = require('should');
const assert = require('assert');
const path = require('path');
const fs = require('fs');
const stream = require('stream');
const Server = require('../lib/Server');
const DataStore = require('../lib/stores/DataStore');
const GCSDataStore = require('../lib/stores/GCSDataStore');
Expand Down Expand Up @@ -43,7 +44,6 @@ describe('GCSDataStore', () => {
}

let server;
let test_file_id;
const files_created = [];
before(() => {
server = new Server();
Expand Down Expand Up @@ -134,17 +134,16 @@ describe('GCSDataStore', () => {
},
};
server.datastore.create(req)
.then((file) => {
assert.equal(file instanceof File, true);
assert.equal(file.upload_length, TEST_FILE_SIZE);
test_file_id = file.id;
return done();
}).catch(console.log);
.then((file) => {
files_created.push(file.id);
assert.equal(file instanceof File, true);
assert.equal(file.upload_length, TEST_FILE_SIZE);
return done();
}).catch(console.log);
});


it(`should fire the ${EVENTS.EVENT_FILE_CREATED} event`, (done) => {
server.datastore.on(EVENTS.EVENT_FILE_CREATED, (event) => {
server.datastore.once(EVENTS.EVENT_FILE_CREATED, (event) => {
event.should.have.property('file');
assert.equal(event.file instanceof File, true);
done();
Expand All @@ -156,36 +155,78 @@ describe('GCSDataStore', () => {
},
};
server.datastore.create(req)
.catch(console.log);
.then((file) => {
files_created.push(file.id);
})
.catch(console.log);
});
});

describe('write', () => {
it('should open a stream and resolve the new offset', (done) => {
const write_stream = fs.createReadStream(TEST_FILE_PATH);
write_stream.once('open', () => {
server.datastore.write(write_stream, test_file_id, 0)
const req = {
headers: {
'upload-length': TEST_FILE_SIZE,
},
};

server.datastore.create(req)
.then((file) => {
files_created.push(file.id);

const write_stream = fs.createReadStream(TEST_FILE_PATH);
return server.datastore.write(write_stream, file.id, 0)
})
.then((offset) => {
assert.equal(offset, TEST_FILE_SIZE);
return done();
})
.catch(console.log);
});

it('should open a stream and resolve the new offset with continuation', (done) => {
const req = {
headers: {
'upload-length': 2 * TEST_FILE_SIZE,
},
};

server.datastore.create(req)
.then((file) => {
files_created.push(file.id);

return server.datastore.write(fs.createReadStream(TEST_FILE_PATH), file.id, 0)
.then((offset) => {
assert.equal(offset, 1 * TEST_FILE_SIZE);
return server.datastore.write(fs.createReadStream(TEST_FILE_PATH), file.id, offset)
})
.then((offset) => {
files_created.push(test_file_id.split('&upload_id')[0]);
assert.equal(offset, TEST_FILE_SIZE);
assert.equal(offset, 2 * TEST_FILE_SIZE);
return done();
})
.catch(console.log);
});
})
.catch(console.log);
});


it(`should fire the ${EVENTS.EVENT_UPLOAD_COMPLETE} event`, (done) => {
server.datastore.on(EVENTS.EVENT_UPLOAD_COMPLETE, (event) => {
server.datastore.once(EVENTS.EVENT_UPLOAD_COMPLETE, (event) => {
event.should.have.property('file');
done();
});

const write_stream = fs.createReadStream(TEST_FILE_PATH);
write_stream.once('open', () => {
server.datastore.write(write_stream, test_file_id, 0)
.catch(console.log);
});
const req = {
headers: {
'upload-length': TEST_FILE_SIZE,
},
};

server.datastore.create(req)
.then((file) => {
files_created.push(file.id);
const write_stream = fs.createReadStream(TEST_FILE_PATH);
return server.datastore.write(write_stream, file.id, 0)
})
.catch(console.log);
});
});

Expand All @@ -196,12 +237,25 @@ describe('GCSDataStore', () => {
});

it('should resolve existing files with the metadata', () => {
// TODO: upload this file to the bucket first
return server.datastore.getOffset(FILE_ALREADY_IN_BUCKET)
.should.be.fulfilledWith({
size: TEST_FILE_SIZE,
upload_length: TEST_FILE_SIZE,
});
const req = {
headers: {
'upload-length': TEST_FILE_SIZE,
},
};

server.datastore.create(req)
.then((file) => {
files_created.push(file.id);
const write_stream = fs.createReadStream(TEST_FILE_PATH);
return server.datastore.write(write_stream, file.id, 0)
.then(() => {
return server.datastore.getOffset(file.id)
})
})
.should.be.fulfilledWith({
size: TEST_FILE_SIZE,
upload_length: TEST_FILE_SIZE,
});
});
});
});

0 comments on commit a97c354

Please sign in to comment.