Skip to content

Commit

Permalink
Merge pull request #10 from apostrophecms/pro-4261-export-attachments
Browse files Browse the repository at this point in the history
Pro 4261 export attachments
  • Loading branch information
ValJed committed Aug 30, 2023
2 parents 77155cd + 213dcd1 commit a988c32
Show file tree
Hide file tree
Showing 13 changed files with 477 additions and 146 deletions.
5 changes: 4 additions & 1 deletion .eslintrc.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
{
"extends": [ "apostrophe", "plugin:vue/vue3-recommended" ]
"extends": [ "apostrophe", "plugin:vue/vue3-recommended" ],
"globals": {
"apos": true
}
}
2 changes: 2 additions & 0 deletions i18n/en.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
{
"export": "Download {{ type }}",
"exportAttachmentError": "Some attachments could not be added to the {{ extension }} file",
"exportFileGenerationError": "The {{ extension }} file generation failed",
"exported": "Downloaded {{ count }} {{ type }}",
"exporting": "Downloading {{ type }}...",
"exportModalDescription": "You've selected {{ count }} {{ type }} for download",
Expand Down
61 changes: 50 additions & 11 deletions lib/formats/archiver.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@ module.exports = {
compress
};

function compress(filepath, data, archive) {
function compress(apos, filepath, data, archive) {
return new Promise((resolve, reject) => {
const output = createWriteStream(filepath);
let response;

archive.on('warning', function(err) {
if (err.code === 'ENOENT') {
Expand All @@ -16,21 +17,59 @@ function compress(filepath, data, archive) {
}
});
archive.on('error', reject);
archive.on('finish', resolve);

archive.on('finish', () => {
resolve(response);
});
archive.pipe(output);

for (const filename in data) {
const content = data[filename];
for (const [ filename, content ] of Object.entries(data.json || {})) {
archive.append(content, { name: filename });
}

if (content.endsWith('/')) {
archive.directory(content, filename);
continue;
}
compressAttachments(apos, archive, data.attachments || {})
.then((res) => {
response = res;
archive.finalize();
});
});
}

archive.append(content, { name: filename });
async function compressAttachments(apos, archive, attachments = {}) {
const promises = Object.entries(attachments).map(([ name, url ]) => {
return new Promise((resolve, reject) => {
apos.http.get(url, { originalResponse: true })
.then((res) => {
res.body.on('error', reject);
res.body.on('end', resolve);

archive.append(res.body, {
name: `attachments/${name}`
});
})
.catch(reject);
});
});

const chunkedPromises = chunkPromises(promises, 5);
let attachmentError = false;

for (const chunk of chunkedPromises) {
const results = await Promise.allSettled(chunk);
if (results.some(({ status }) => status === 'rejected')) {
attachmentError = true;
}
}

return { attachmentError };
}

archive.finalize();
function chunkPromises(attachments, max) {
const length = Math.ceil(attachments.length / max);
return Array(length).fill([]).map((chunk, i) => {
const position = i * max;
return [
...chunk,
...attachments.slice(position, position + max)
];
});
}
4 changes: 2 additions & 2 deletions lib/formats/gzip.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ const { compress } = require('./archiver');
module.exports = {
label: 'gzip',
extension: 'tar.gz',
output(filepath, data) {
output(apos, filepath, data) {
const archive = archiver('tar', { gzip: true });

return compress(filepath, data, archive);
return compress(apos, filepath, data, archive);
}
};
4 changes: 2 additions & 2 deletions lib/formats/zip.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@ const { compress } = require('./archiver');

module.exports = {
label: 'Zip',
output(filepath, data) {
output(apos, filepath, data) {
const archive = archiver('zip');

return compress(filepath, data, archive);
return compress(apos, filepath, data, archive);
}
};
31 changes: 20 additions & 11 deletions lib/methods/archive.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ const util = require('util');

module.exports = self => {
return {
async createArchive(req, reporting, docs, attachments, options) {
async createArchive(req, reporting, data, options) {
const {
extension,
format,
Expand All @@ -16,16 +16,25 @@ module.exports = self => {
const filename = `${self.apos.util.generateId()}-export.${specificExtension || extension}`;
const filepath = path.join(self.apos.attachment.uploadfs.getTempPath(), filename);

const docsData = JSON.stringify(docs, undefined, 2);
const attachmentsData = JSON.stringify(attachments, undefined, 2);

const data = {
'aposDocs.json': docsData,
'aposAttachments.json': attachmentsData
// attachments: 'attachments/' // TODO: add attachment into an "/attachments" folder
};

await format.output(filepath, data);
try {
const { attachmentError } = await format.output(self.apos, filepath, data);
if (attachmentError) {
await self.apos.notification.trigger(req, 'aposImportExport:exportAttachmentError', {
interpolate: { extension },
dismiss: true,
icon: 'alert-circle-icon',
type: 'warning'
});
}
} catch ({ message }) {
self.apos.error('error', message);
await self.apos.notification.trigger(req, 'aposImportExport:exportFileGenerationError', {
interpolate: { extension },
dismiss: true,
icon: 'alert-circle-icon',
type: 'error'
});
}

// Must copy it to uploadfs, the server that created it
// and the server that delivers it might be different
Expand Down
162 changes: 103 additions & 59 deletions lib/methods/export.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
// TODO: remove:
const attachmentsMock = [ { foo: 'bar' } ];
const MAX_RECURSION = 10;

module.exports = self => {
return {
Expand All @@ -12,7 +11,6 @@ module.exports = self => {
reporting.setTotal(req.body._ids.length);
}

// TODO: add batchSize?
const ids = self.apos.launder.ids(req.body._ids);
const relatedTypes = self.apos.launder.strings(req.body.relatedTypes);
const extension = self.apos.launder.string(req.body.extension, 'zip');
Expand All @@ -30,7 +28,7 @@ module.exports = self => {
if (!relatedTypes.length) {
const docs = await self.fetchActualDocs(req, allIds, reporting);

return self.createArchive(req, reporting, docs, attachmentsMock, {
return self.createArchive(req, reporting, this.formatArchiveData(docs), {
extension,
expiration,
format
Expand All @@ -44,17 +42,52 @@ module.exports = self => {
// since they might have different related documents.
const relatedIds = draftDocs
.concat(publishedDocs)
.flatMap(doc => self.getRelatedDocsIds(manager, doc, relatedTypes));
.flatMap(doc => {
return self.getRelatedIdsBySchema({
doc,
schema: self.apos.modules[doc.type].schema,
relatedTypes
});
});

const allRelatedIds = self.getAllModesIds(relatedIds);

const docs = await self.fetchActualDocs(req, [ ...allIds, ...allRelatedIds ], reporting);

return self.createArchive(req, reporting, docs, attachmentsMock, {
extension,
expiration,
format
const attachmentsIds = docs.flatMap(doc => {
return self.getRelatedIdsBySchema({
doc,
schema: self.apos.modules[doc.type].schema,
type: 'attachment'
});
});
const attachments = await self.fetchActualDocs(req, attachmentsIds, reporting, 'attachment');
const attachmentUrls = Object.fromEntries(
attachments.map((attachment) => {
const name = `${attachment._id}-${attachment.name}.${attachment.extension}`;
return [ name, self.apos.attachment.url(attachment, { size: 'original' }) ];
})
);

return self.createArchive(req,
reporting,
self.formatArchiveData(docs, attachments, attachmentUrls),
{
extension,
expiration,
format
}
);
},

formatArchiveData(docs, attachments = [], urls = {}) {
return {
json: {
'aposDocs.json': JSON.stringify(docs, undefined, 2),
'aposAttachments.json': JSON.stringify(attachments, undefined, 2)
},
attachments: urls
};
},

// Add the published version ID next to each draft ID,
Expand All @@ -72,10 +105,13 @@ module.exports = self => {
// without altering the fields or populating them, as the managers would.
// It is ok if docs corresponding to published IDs do not exist in the database,
// as they simply will not be fetched.
async fetchActualDocs(req, docsIds, reporting) {
const docsIdsUniq = [ ...new Set(docsIds) ];
async fetchActualDocs(req, docsIds, reporting, collection = 'doc') {
if (!docsIds.length) {
return [];
}

const docs = await self.apos.doc.db
const docsIdsUniq = [ ...new Set(docsIds) ];
const docs = await self.apos[collection].db
.find({
_id: {
$in: docsIdsUniq
Expand Down Expand Up @@ -115,56 +151,64 @@ module.exports = self => {
.toArray();
},

getRelatedDocsIds(manager, doc, relatedTypes) {
// Use `doc.type` for pages to get the actual schema of the corresponding page type.
const schema = manager.schema || self.apos.modules[doc.type].schema;
getRelatedIdsBySchema({
doc, schema, type = 'relationship', relatedTypes, recursion = 0
}) {
return schema.flatMap(field => {
const fieldValue = doc[field.name];
const shouldRecurse = recursion <= MAX_RECURSION;

if (
!fieldValue ||
(relatedTypes && field.withType && !relatedTypes.includes(field.withType))
// TODO: handle 'exportDoc: false' option
/* ( */
/* type === 'relationship' && */
/* field.withType && */
/* self.apos.modules[field.withType].options.relatedDocument === false */
/* ) */
) {
return [];
}

return self
.getRelatedBySchema(doc, schema)
.filter(relatedDoc => relatedTypes.includes(relatedDoc.type))
.map(relatedDoc => relatedDoc._id);
},
if (shouldRecurse && field.type === 'array') {
return fieldValue.flatMap((subField) => self.getRelatedIdsBySchema({
doc: subField,
schema: field.schema,
type,
relatedTypes,
recursion: recursion + 1
}));
}

// TODO: factorize with the one from AposI18nLocalize.vue
// TODO: limit recursion to 10 as we do when retrieving related types?
getRelatedBySchema(object, schema) {
let related = [];
for (const field of schema) {
if (field.type === 'array') {
for (const value of (object[field.name] || [])) {
related = [
...related,
...self.getRelatedBySchema(value, field.schema)
];
}
} else if (field.type === 'object') {
if (object[field.name]) {
related = [
...related,
...self.getRelatedBySchema(object[field.name], field.schema)
];
}
} else if (field.type === 'area') {
for (const widget of (object[field.name]?.items || [])) {
related = [
...related,
...self.getRelatedBySchema(widget, self.apos.modules[`${widget?.type}-widget`]?.schema || [])
];
}
} else if (field.type === 'relationship') {
related = [
...related,
...(object[field.name] || [])
];
// Stop here, don't recurse through relationships or we're soon
// related to the entire site
if (shouldRecurse && field.type === 'object') {
return self.getRelatedIdsBySchema({
doc: fieldValue,
schema: field.schema,
type,
relatedTypes,
recursion: recursion + 1
});
}
}
// Filter out doc types that opt out completely (pages should
// never be considered "related" to other pages simply because
// of navigation links, the feature is meant for pieces that feel more like
// part of the document being localized)
return related.filter(doc => self.apos.modules[doc.type].relatedDocument !== false);

if (shouldRecurse && field.type === 'area') {
return (fieldValue.items || []).flatMap((widget) => self.getRelatedIdsBySchema({
doc: widget,
schema: self.apos.modules[`${widget?.type}-widget`]?.schema || [],
type,
relatedTypes,
recursion: recursion + 1
}));
}

if (field.type === type) {
return Array.isArray(fieldValue)
? fieldValue.map(({ _id }) => _id)
: [ fieldValue._id ];
}

return [];
});
}
};
};
Loading

0 comments on commit a988c32

Please sign in to comment.