Skip to content

Commit

Permalink
feat(scripts): add uploads in restore & switch to fs promise (big res…
Browse files Browse the repository at this point in the history
…tore) ✨
  • Loading branch information
PierreBrisorgueil committed Jun 24, 2020
1 parent 95f3e45 commit 71eb5d5
Show file tree
Hide file tree
Showing 4 changed files with 79 additions and 11 deletions.
10 changes: 5 additions & 5 deletions config/defaults/development.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ module.exports = {
*/
},
promise: global.Promise,
restoreExceptions: ['uploads'],
restoreExceptions: [], // collections exceptions for db restore : npm run seed:mongorestore
},
// SSL on express server (FYI : Wiki)
// secure: {
Expand Down Expand Up @@ -106,10 +106,10 @@ module.exports = {
},
// zxcvbn is used to manage password security
zxcvbn: {
forbiddenPasswords: ['12345678', 'azertyui', 'qwertyui', 'azertyuiop', 'qwertyuiop'],
minSize: 8,
maxSize: 126,
minimumScore: 3,
forbiddenPasswords: ['12345678', 'azertyui', 'qwertyui', 'azertyuiop', 'qwertyuiop'], // passwords forbidden
minSize: 8, // min password size
maxSize: 126, // max password size
minimumScore: 3, // min password complexity score
},
// jwt is for token authentification
jwt: {
Expand Down
29 changes: 29 additions & 0 deletions modules/uploads/repositories/uploads.repository.js
Original file line number Diff line number Diff line change
Expand Up @@ -107,3 +107,32 @@ exports.purge = async (kind, collection, key) => {
});
return { deletedCount: toDelete.length };
};

/**
* @desc Function to import list of uploads in db
* @param {[Object]} uploads
* @param {[String]} filters
* @return {Object} uploads
*/
exports.import = (uploads, filters, collection) => {
const _schema = new mongoose.Schema({}, { collection, strict: false });
let model;
try {
model = mongoose.model(collection);
} catch (error) {
model = mongoose.model(collection, _schema);
}
return model.bulkWrite(uploads.map((upload) => {
const filter = {};
filters.forEach((value) => {
filter[value] = upload[value];
});
return {
updateOne: {
filter,
update: upload,
upsert: true,
},
};
}));
};
11 changes: 11 additions & 0 deletions modules/uploads/services/uploads.data.service.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,14 @@ exports.delete = async (user) => {
const result = await UploadRepository.deleteMany({ 'metadata.user': user._id });
return Promise.resolve(result);
};

/**
* @desc Function to ask repository to import a list of uploads
* @param {[Object]} uploads
* @param {[String]} filters
* @return {Promise} uploads
*/
exports.import = (uploads, filters, collection) => {
const result = UploadRepository.import(uploads, filters, collection);
return result;
};
40 changes: 34 additions & 6 deletions scripts/db/mongorestore.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
/* eslint-disable no-await-in-loop */
/* eslint-disable no-restricted-syntax */

/**
* Module dependencies
*/
Expand All @@ -7,13 +10,30 @@ const path = require('path');
const fs = require('fs');
const bson = require('bson');

const fsPromises = fs.promises;
const config = require(path.resolve('./config'));
const mongooseService = require(path.resolve('./lib/services/mongoose'));

/**
* Work
*/

const listDir = async (database) => {
try {
return fsPromises.readdir(path.resolve(`./scripts/db/dump/${database}`));
} catch (err) {
console.error('Error occured while reading directory dump! ./scripts/db/dump/', err);
}
};

const importFile = async (database, collection) => {
try {
return fsPromises.readFile(path.resolve(`./scripts/db/dump/${database}/${collection}.bson`));
} catch (err) {
console.error('Error occured while reading directory dump! ./scripts/db/dump/', err);
}
};

const seedData = async () => {
try {
console.log(chalk.bold.green('Start Seed Dump by update items if differents'));
Expand All @@ -24,24 +44,32 @@ const seedData = async () => {

let database = config.db.uri.split('/')[config.db.uri.split('/').length - 1];
database = database.split('?')[0];

console.log(chalk.bold.green(`database selected: ${database}`));

fs.readdirSync(path.resolve(`./scripts/db/dump/${database}`)).forEach((file) => {
const files = await listDir(database);

for (const file of files) {
if (file.slice(-4) === 'bson' && !config.db.restoreExceptions.includes(file.split('.')[0])) {
const collection = file.slice(0, -5);

const buffer = fs.readFileSync(path.resolve(`./scripts/db/dump/${database}/${collection}.bson`));
// read file
const buffer = await importFile(database, collection);
let bfIdx = 0;
const items = [];
while (bfIdx < buffer.length) bfIdx = bson.deserializeStream(buffer, bfIdx, 1, items, items.length);

const Service = require(path.resolve(`./modules/${collection}/services/${collection}.data.service`));
Service.import(items, ['_id']);
// insert
if (collection.split('.')[0] === 'uploads') {
const Service = require(path.resolve(`./modules/${collection.split('.')[0]}/services/${collection.split('.')[0]}.data.service`));
await Service.import(items, ['_id'], collection);
} else {
const Service = require(path.resolve(`./modules/${collection}/services/${collection}.data.service`));
await Service.import(items, ['_id']);
}

console.log(chalk.blue(`Database Seeding ${collection} : ${items.length}`));
}
});
}
} catch (err) {
console.log(chalk.bold.red(`Error ${err}`));
}
Expand Down

0 comments on commit 71eb5d5

Please sign in to comment.