Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Upload Part - Copy #443

Merged
merged 7 commits into from
Apr 3, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
85 changes: 85 additions & 0 deletions lib/controllers/object.js
Original file line number Diff line number Diff line change
Expand Up @@ -736,3 +736,88 @@ exports.uploadPart = async function uploadPart(ctx) {
throw err;
}
};

exports.uploadPartCopy = async function uploadPartCopy(ctx) {
let copySource = decodeURI(ctx.headers['x-amz-copy-source']);
copySource = copySource.startsWith('/') ? copySource.slice(1) : copySource;
let [srcBucket, ...srcKey] = copySource.split('/');
srcKey = srcKey.join('/');

const bucket = await ctx.store.getBucket(srcBucket);
if (!bucket) {
ctx.logger.error('No bucket found for "%s"', srcBucket);
throw new S3Error('NoSuchBucket', 'The specified bucket does not exist', {
BucketName: srcBucket,
});
}

const options = {};
if ('x-amz-copy-source-range' in ctx.headers) {
const match = /^bytes=(\d+)-(\d+)$/.exec(
ctx.headers['x-amz-copy-source-range'],
);
if (!match) {
throw new S3Error(
'InvalidArgument',
'The x-amz-copy-source-range value must be of the form bytes=first-last where first and last are the zero-based offsets of the first and last bytes to copy',
{
ArgumentName: 'x-amz-copy-source-range',
ArgumentValue: ctx.get('x-amz-copy-source-range'),
},
);
}
options.start = Number(match[1]);
options.end = Number(match[2]);
}

const object = await ctx.store.getObject(srcBucket, srcKey, options);
if (!object) {
throw new S3Error('NoSuchKey', 'The specified key does not exist.', {
Key: srcKey,
});
}

// Range request was out of range
if (object.range && (!object.content || object.range.end < options.end)) {
if (object.content) object.content.destroy();
throw new S3Error(
'InvalidArgument',
`Range specified is not valid for source object of size: ${object.size}`,
{
ArgumentName: 'x-amz-copy-source-range',
ArgumentValue: ctx.get('x-amz-copy-source-range'),
},
);
}

try {
const { md5 } = await ctx.store.putPart(
ctx.params.bucket,
ctx.query.uploadId,
ctx.query.partNumber,
object.content,
);
ctx.logger.info(
'Copied part %s of %s in bucket "%s" successfully',
ctx.query.partNumber,
ctx.query.uploadId,
ctx.params.bucket,
);
ctx.etag = md5;
ctx.body = {
CopyPartResult: {
LastModified: new Date().toISOString(),
ETag: JSON.stringify(md5),
},
};
} catch (err) {
ctx.logger.error(
'Error copying part %s of %s to bucket "%s"',
ctx.query.partNumber,
ctx.query.uploadId,
ctx.params.bucket,
err,
);
throw err;
}
};
10 changes: 3 additions & 7 deletions lib/routes.js
Original file line number Diff line number Diff line change
Expand Up @@ -210,13 +210,9 @@ router
? objectCtrl.putObjectCopy(ctx)
: objectCtrl.putObject(ctx);
case 'uploadId':
if ('x-amz-copy-source' in ctx.headers) {
throw new S3Error(
'NotImplemented',
'A header you provided implies functionality that is not implemented',
);
}
return objectCtrl.uploadPart(ctx);
return 'x-amz-copy-source' in ctx.headers
? objectCtrl.uploadPartCopy(ctx)
: objectCtrl.uploadPart(ctx);
case 'tagging':
return objectCtrl.putObjectTagging(ctx);
case 'acl':
Expand Down
184 changes: 184 additions & 0 deletions test/controllers/object.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -1203,5 +1203,189 @@ describe('Operations on Objects', () => {
.promise();
expect(object.Metadata.somekey).to.equal('value');
});

it('should upload a part by copying it', async function () {
const upload = await s3Client
.createMultipartUpload({
Bucket: 'bucket-a',
Key: 'merged',
})
.promise();
await s3Client
.putObject({
Bucket: 'bucket-a',
Key: 'part',
Body: Buffer.alloc(20 * Math.pow(1024, 2)), // 20MB
})
.promise();
const data = await s3Client
.uploadPartCopy({
CopySource: 'bucket-a/part',
Bucket: 'bucket-a',
Key: 'destination',
PartNumber: 1,
UploadId: upload.UploadId,
})
.promise();
expect(JSON.parse(data.CopyPartResult.ETag)).to.be.ok;
await s3Client
.completeMultipartUpload({
Bucket: 'bucket-a',
Key: 'desintation',
UploadId: upload.UploadId,
MultipartUpload: {
Parts: [
{
ETag: data.CopyPartResult.ETag,
PartNumber: 1,
},
],
},
})
.promise();
});

it('should copy parts from bucket to bucket', async function () {
const upload = await s3Client
.createMultipartUpload({
Bucket: 'bucket-a',
Key: 'merged',
})
.promise();
await s3Client
.putObject({
Bucket: 'bucket-b',
Key: 'part',
Body: Buffer.alloc(20 * Math.pow(1024, 2)), // 20MB
})
.promise();
const data = await s3Client
.uploadPartCopy({
CopySource: `bucket-b/part`,
Bucket: 'bucket-a',
Key: 'destination',
PartNumber: 1,
UploadId: upload.UploadId,
})
.promise();
expect(JSON.parse(data.CopyPartResult.ETag)).to.be.ok;
});

it('should copy a part range from bucket to bucket', async function () {
const upload = await s3Client
.createMultipartUpload({
Bucket: 'bucket-a',
Key: 'merged',
})
.promise();
await s3Client
.putObject({
Bucket: 'bucket-b',
Key: 'part',
Body: Buffer.alloc(20 * Math.pow(1024, 2)), // 20MB
})
.promise();
const data = await s3Client
.uploadPartCopy({
CopySource: `bucket-b/part`,
CopySourceRange: 'bytes=0-10',
Bucket: 'bucket-a',
Key: 'destination',
PartNumber: 1,
UploadId: upload.UploadId,
})
.promise();
expect(JSON.parse(data.CopyPartResult.ETag)).to.be.ok;
});

it('fails to copy a part range for an out of bounds requests', async function () {
const upload = await s3Client
.createMultipartUpload({
Bucket: 'bucket-a',
Key: 'merged',
})
.promise();
const body = Buffer.alloc(20 * Math.pow(1024, 2)); // 20MB
await s3Client
.putObject({
Bucket: 'bucket-b',
Key: 'part',
Body: body,
})
.promise();

let error;
try {
await s3Client
.uploadPartCopy({
CopySource: `bucket-b/part`,
CopySourceRange: `bytes=${body.length - 10}-${body.length}`,
Bucket: 'bucket-a',
Key: 'destination',
PartNumber: 1,
UploadId: upload.UploadId,
})
.promise();
} catch (err) {
error = err;
}
expect(error).to.exist;
expect(error.code).to.equal('InvalidArgument');
expect(error.message).to.equal(
`Range specified is not valid for source object of size: ${body.length}`,
);
});

it('fails to copy a part from a nonexistent bucket', async function () {
const upload = await s3Client
.createMultipartUpload({
Bucket: 'bucket-a',
Key: 'merged',
})
.promise();

let error;
try {
await s3Client
.uploadPartCopy({
CopySource: `not-exist/part`,
Bucket: 'bucket-a',
Key: 'destination',
PartNumber: 1,
UploadId: upload.UploadId,
})
.promise();
} catch (err) {
error = err;
}
expect(error).to.exist;
expect(error.code).to.equal('NoSuchBucket');
});

it('fails to copy a part from a nonexistent key', async function () {
const upload = await s3Client
.createMultipartUpload({
Bucket: 'bucket-a',
Key: 'merged',
})
.promise();

let error;
try {
await s3Client
.uploadPartCopy({
CopySource: `bucket-b/not-exist`,
Bucket: 'bucket-a',
Key: 'destination',
PartNumber: 1,
UploadId: upload.UploadId,
})
.promise();
} catch (err) {
error = err;
}
expect(error).to.exist;
expect(error.code).to.equal('NoSuchKey');
});
});
});