Skip to content

Commit

Permalink
Fixes: testing form that consumes dataset (#897)
Browse files Browse the repository at this point in the history
* added dataset download ability to testing endpoint
moved the common logic to query module

* moved common function to resource file
  • Loading branch information
sadiqkhoja committed May 29, 2023
1 parent d113ed2 commit 0661a7b
Show file tree
Hide file tree
Showing 2 changed files with 57 additions and 31 deletions.
66 changes: 35 additions & 31 deletions lib/resources/forms.js
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,30 @@ const excelMimeTypes = {
xlsx: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
};

const streamAttachment = async (container, attachment, response) => {
const { Blobs, Datasets, Entities } = container;

if (attachment.blobId == null && attachment.datasetId == null) {
return reject(Problem.user.notFound());
} else if (attachment.blobId != null) {
const blob = await Blobs.getById(attachment.blobId).then(getOrNotFound);
return withEtag(`${blob.md5}`, () => binary(blob.contentType, attachment.name, blob.content));
} else {
const dataset = await Datasets.getById(attachment.datasetId, true).then(getOrNotFound);
const properties = await Datasets.getProperties(attachment.datasetId);
const { lastEntity } = dataset.forApi();

const serverEtag = md5sum(lastEntity?.toISOString() ?? '1970-01-01');

return withEtag(serverEtag, async () => {
const entities = await Entities.streamForExport(attachment.datasetId);
response.append('Content-Disposition', contentDisposition(`${attachment.name}`));
response.append('Content-Type', 'text/csv');
return streamEntityCsvAttachment(entities, properties);
});
}
};

module.exports = (service, endpoint) => {
// This forms list can also be used to get a list of just the soft-deleted forms by adding ?deleted=true
// TODO: paging.
Expand Down Expand Up @@ -273,33 +297,14 @@ module.exports = (service, endpoint) => {
.then((form) => auth.canOrReject('form.read', form))
.then((form) => FormAttachments.getAllByFormDefId(form.def.id))));

service.get(`${base}/attachments/:name`, endpoint(({ Blobs, FormAttachments, Forms, Datasets, Entities }, { params, auth }, request, response) =>
getInstance(Forms, params)
service.get(`${base}/attachments/:name`, endpoint((container, { params, auth }, request, response) => {
const { FormAttachments, Forms } = container;
return getInstance(Forms, params)
.then((form) => auth.canOrReject('form.read', form))
.then((form) => FormAttachments.getByFormDefIdAndName(form.def.id, params.name)
.then(getOrNotFound)
.then(async (attachment) => {
if (attachment.blobId == null && attachment.datasetId == null) {
return reject(Problem.user.notFound());
} else if (attachment.blobId != null) {
const blob = await Blobs.getById(attachment.blobId).then(getOrNotFound);
return withEtag(`"${blob.md5}"`, () => binary(blob.contentType, attachment.name, blob.content));
} else {
const dataset = await Datasets.getById(attachment.datasetId, true).then(getOrNotFound);
const properties = await Datasets.getProperties(attachment.datasetId);
const { lastEntity } = dataset.forApi();

const serverEtag = md5sum(lastEntity?.toISOString() ?? '1970-01-01');

return withEtag(serverEtag, async () => {
const entities = await Entities.streamForExport(attachment.datasetId);
response.append('Content-Disposition', contentDisposition(`${attachment.name}`));
response.append('Content-Type', 'text/csv');
return streamEntityCsvAttachment(entities, properties);
});
}

}))));
.then(attachment => streamAttachment(container, attachment, response)));
}));
};

// the linter literally won't let me break this apart..
Expand Down Expand Up @@ -429,17 +434,16 @@ module.exports = (service, endpoint) => {
.then(checkFormToken(params.key))
.then((form) => xml(form.xml))));

service.get('/test/:key/projects/:projectId/forms/:id/draft/attachments/:name', endpoint(({ Blobs, FormAttachments, Forms }, { params }) =>
Forms.getByProjectAndXmlFormId(params.projectId, params.id, false, Form.DraftVersion)
service.get('/test/:key/projects/:projectId/forms/:id/draft/attachments/:name', endpoint((container, { params }, request, response) => {
const { FormAttachments, Forms } = container;
return Forms.getByProjectAndXmlFormId(params.projectId, params.id, false, Form.DraftVersion)
.then(getOrNotFound)
.then(ensureDef)
.then(checkFormToken(params.key))
.then((form) => FormAttachments.getByFormDefIdAndName(form.def.id, params.name)
.then(getOrNotFound)
.then((attachment) => ((attachment.blobId == null)
? reject(Problem.user.notFound())
: Blobs.getById(attachment.blobId)
.then(getOrNotFound)
.then((blob) => binary(blob.contentType, attachment.name, blob.content)))))));
.then(attachment => streamAttachment(container, attachment, response)));
}));

};

22 changes: 22 additions & 0 deletions test/integration/api/datasets.js
Original file line number Diff line number Diff line change
Expand Up @@ -1319,6 +1319,28 @@ describe('datasets and entities', () => {
text.should.equal('name,label,first_name,age\n12345678-1234-4123-8234-123456789abc,Alice (88),Alice,88\n');
})))));

it('should return entities csv for testing', testService(async (service, container) => {
const asAlice = await service.login('alice');

await createBothForms(asAlice);

await asAlice.post('/v1/projects/1/forms/simpleEntity/submissions')
.send(testData.instances.simpleEntity.one.replace(/people/g, 'goodone'))
.set('Content-Type', 'application/xml')
.expect(200);

await exhaust(container);

const token = await asAlice.get('/v1/projects/1/forms/withAttachments/draft')
.expect(200)
.then(({ body }) => body.draftToken);

await service.get(`/v1/test/${token}/projects/1/forms/withAttachments/draft/attachments/goodone.csv`)
.expect(200)
.then(({ text }) => { text.should.equal('name,label,first_name,age\n12345678-1234-4123-8234-123456789abc,Alice (88),Alice,88\n'); });

}));

it('should return data for columns that contain valid special characters', testService(async (service, container) => {
const asAlice = await service.login('alice');

Expand Down

0 comments on commit 0661a7b

Please sign in to comment.