Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CLI Tests] Add seeding capabilities #19170

Merged
merged 6 commits into from Jan 12, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
11 changes: 0 additions & 11 deletions cli-tests/constants.js
@@ -1,16 +1,5 @@
const { CUSTOM_TRANSFER_TOKEN_ACCESS_KEY } = require('./app-template/template/src/constants');

const ALLOWED_CONTENT_TYPES = ['admin::user', 'admin::role', 'admin::permission'];

// TODO: we should start using @strapi.io addresses to have the chance one day to
// actually receive and check the emails; also: it is not nice to spam other peoples
// websites
const ADMIN_EMAIL_ADDRESS = 'test@testing.com';
const ADMIN_PASSWORD = 'Testing123!';

module.exports = {
ADMIN_EMAIL_ADDRESS,
ADMIN_PASSWORD,
ALLOWED_CONTENT_TYPES,
CUSTOM_TRANSFER_TOKEN_ACCESS_KEY,
};
87 changes: 87 additions & 0 deletions cli-tests/scripts/dts-export.js
@@ -0,0 +1,87 @@
'use strict';

const {
strapi: {
providers: { createLocalStrapiSourceProvider },
},
file: {
providers: { createLocalFileDestinationProvider },
},
engine: { createTransferEngine },
} = require('@strapi/data-transfer');
const { strapiFactory } = require('@strapi/strapi');
const path = require('path');

/**
* Export the data from a strapi project.
* This script should be run as `node <path-to>/dts-export.js [exportFilePath]` from the
* root directory of a strapi project e.g. `/examples/kitchensink`.
*/
const createDataset = async () => {
let args = process.argv.slice(2);

if (args.length !== 1) {
console.error('Please provide the dataset path name as a parameter.');
process.exit(1);
}

const datasetPath = path.resolve(args[0]);

const strapi = await createStrapiInstance();

const source = createSourceProvider(strapi);
const destination = createDestinationProvider(datasetPath);

const engine = createTransferEngine(source, destination, {
versionStrategy: 'ignore',
schemaStrategy: 'ignore',
});

engine.diagnostics.onDiagnostic(console.log);

try {
const results = await engine.transfer();
const { destination, engine: engineResults } = results;

const relativeArchivePath = path.relative(process.cwd(), destination.file.path);

console.log(`Dataset exported to: ${relativeArchivePath}`);
console.log('The export contains:');
console.log(` - ${engineResults.schemas?.count ?? 0} schemas`);
console.log(` - ${engineResults.entities?.count ?? 0} entities`);
console.log(` - ${engineResults.links?.count ?? 0} links`);
console.log(` - ${engineResults.assets?.count ?? 0} assets`);
console.log(` - ${engineResults.configuration?.count ?? 0} configs`);

process.exit(0);
} catch (e) {
console.error('Export process failed.');
console.error(e);
process.exit(1);
}
};

const createSourceProvider = (strapi) =>
createLocalStrapiSourceProvider({
async getStrapi() {
return strapi;
},
});

const createDestinationProvider = (datasetPath) => {
return createLocalFileDestinationProvider({
file: { path: datasetPath },
encryption: { enabled: false },
compression: { enabled: false },
});
};

const createStrapiInstance = async (logLevel = 'error') => {
const appContext = await strapiFactory.compile();
const app = strapiFactory(appContext);

app.log.level = logLevel;
return app.load();
};

createDataset().finally();
50 changes: 50 additions & 0 deletions cli-tests/scripts/dts-import.js
@@ -0,0 +1,50 @@
import { resolve } from 'path';
import { CUSTOM_TRANSFER_TOKEN_ACCESS_KEY } from '../constants';

const {
file: {
providers: { createLocalFileSourceProvider },
},
strapi: {
providers: { createRemoteStrapiDestinationProvider },
},
engine: { createTransferEngine },
} = require('@strapi/data-transfer');

/**
* Reset the DB and import data from a DTS dataset
Convly marked this conversation as resolved.
Show resolved Hide resolved
*/
export const resetDatabaseAndImportDataFromPath = async (filePath) => {
const source = createSourceProvider(filePath);
const destination = createDestinationProvider();

const engine = createTransferEngine(source, destination, {
versionStrategy: 'ignore',
schemaStrategy: 'ignore',
});

engine.diagnostics.onDiagnostic(console.log);

try {
await engine.transfer();
} catch (e) {
console.error('Import process failed.');
console.error(e);
process.exit(1);
}
};

const createSourceProvider = (filePath) =>
createLocalFileSourceProvider({
file: { path: resolve(filePath) },
encryption: { enabled: false },
compression: { enabled: false },
});

const createDestinationProvider = () => {
return createRemoteStrapiDestinationProvider({
innerdvations marked this conversation as resolved.
Show resolved Hide resolved
url: new URL(`http://127.0.0.1:${process.env.PORT ?? 1337}/admin`),
auth: { type: 'token', token: CUSTOM_TRANSFER_TOKEN_ACCESS_KEY },
strategy: 'restore',
});
};
217 changes: 217 additions & 0 deletions cli-tests/utils/fs.js
@@ -0,0 +1,217 @@
'use strict';

const path = require('node:path');
const { pipeline } = require('node:stream');
const fs = require('node:fs');
const { parser: jsonlParser } = require('stream-json/jsonl/Parser');
const { chain } = require('stream-chain');
const tar = require('tar');

// COLLECTORS

/**
* Collects and buffers data from a Readable stream.
*
* @param {import('stream').PassThrough} entry - The Readable stream from which to collect data.
* @returns {Promise<Buffer[]>} A promise that resolves with an array of buffers containing the collected data.
*/
const rawCollector = (entry) => {
/**
* @type {Buffer[]}
*/
const chunks = [];

return new Promise((resolve, reject) => {
entry
.on('data', (data) => {
chunks.push(data);
})
.on('error', reject)
.on('end', () => resolve(chunks));
});
};

/**
* Collects a string from the given entry using a raw collector.
*
* @param {import('stream').PassThrough} entry - The entry to collect the string from.
* @returns {Promise<string>} - A Promise that resolves to the collected string.
*/
const stringCollector = async (entry) => {
return await rawCollector(entry).then((chunks) => chunks.map((chunk) => chunk.toString()).join());
};

/**
* Collects and processes JSONL data from a given entry.
*
* @param {import('stream').PassThrough} entry - The entry to collect JSONL data from.
* @returns {Promise<unknown[]>} A promise that resolves to the collected data.
*/
const jsonlCollector = async (entry) => {
const transforms = [
// JSONL parser to read the data chunks one by one (line by line)
jsonlParser({ checkErrors: true }),
// The JSONL parser returns each line as key/value
(line) => line.value,
];

const stream = entry.pipe(chain(transforms));

return rawCollector(stream);
};

/**
* Asynchronously collects the content of an entry and converts it to JSON format.
*
* @param {import('stream').PassThrough} entry - The entry to collect the content from.
* @returns {Promise} A promise that resolves to the content of the entry in JSON format.
*/
const jsonCollector = async (entry) => {
return await stringCollector(entry).then((content) => JSON.parse(content));
};

// FILES

/**
* Reads a file from an archive.
*
* @param {string} archive - The path to the archive file.
* @param {string} file - The name of the file to read.
* @param {Object} [options={}] - Additional options.
* @param {Function} [options.collector=stringCollector] - A function to collect the content of the file.
* @returns {Promise<string>} - The content of the file.
* @throws {Error} - If the file is not found in the archive.
*/
const readFile = async (archive, file, options = {}) => {
const { collector = stringCollector } = options;
/**
* @type {string | undefined}
*/
let content = undefined;

await new Promise((resolve, reject) => {
pipeline(
[
// Source: Archive stream
fs.createReadStream(archive),

// Transform: tar parser
new tar.Parse({
// Match tar entry with the given filename
filter: (filePath, entry) => {
console.log(filePath);
return entry.type === 'File' && file === filePath;
},
// Set outStream to
async onentry(entry) {
content = await collector(entry);
},
}),
],
(err) => (err ? reject(err) : resolve())
);
});

if (content === undefined) {
throw new Error(`File not found: ${file} in ${archive}`);
}

return content;
};

/**
* Reads a JSON Lines file from an archive.
*
* @param {string} archive - The archive to read from.
* @param {string} file - The JSON Lines file to read.
* @returns {Promise<any>} - A promise that will resolve to the content of the JSON Lines file.
*/
const readJSONLFile = async (archive, file) => {
return readFile(archive, file, { collector: jsonlCollector });
};

/**
* Reads a JSON file from an archive.
*
* @param {string} archive - The name of the archive.
* @param {string} file - The name of the JSON file.
* @returns {Promise} - A promise that resolves to the JSON data.
*/
const readJSONFile = async (archive, file) => {
return readFile(archive, file, { collector: jsonCollector });
};

// DIRECTORIES

/**
* Read and retrieve files from a specific directory in a TAR archive.
*
* @param {string} archive - The path to the TAR archive.
* @param {string} dir - The directory path inside the archive.
* @returns {Promise<string[]>} - A promise that resolves with an array of filenames in the specified directory.
*/
const readDir = async (archive, dir) => {
/**
* @type {string[]}
*/
const files = [];

await new Promise((resolve, reject) => {
pipeline(
[
// Source: Archive stream
fs.createReadStream(archive),
// Transform: tar parser
new tar.Parse({
// Match tar entry with the given filename
filter: (filePath, entry) => entry.type === 'File' && dir === path.dirname(filePath),
// Set outStream to
async onentry(entry) {
files.push(path.basename(entry.path));

// Consume the entry anyway to avoid blocking the tar parser
await rawCollector(entry);
},
}),
],
(err) => (err ? reject(err) : resolve())
);
});

return files;
};

const readJSONLDir = async (archive, dir) => {
const files = await readDir(archive, dir);

const filesContent = await Promise.all(
files
// Prefix paths with the directory name
.map((file) => path.join(dir, file))
// Read files content as JSONL
.map((file) => readJSONLFile(archive, file))
);

// Flatten the results to a single JSON collection
return filesContent.flat();
};

module.exports = {
// Files
readFile,
readJSONFile,
readJSONLFile,
// Directories
readDir,
readJSONLDir,

tar: (archive) => ({
// Files
readFile: (file) => readFile(archive, file),
readJSONLFile: (file) => readJSONLFile(archive, file),
readJSONFile: (file) => readJSONFile(archive, file),
// Directories
readDir: (dir) => readDir(archive, dir),
readJSONLDir: (dir) => readJSONLDir(archive, dir),
}),
};
5 changes: 4 additions & 1 deletion cli-tests/utils/index.js
@@ -1,3 +1,6 @@
'use strict';

module.exports = {};
module.exports = {
fs: require('./fs'),
seed: require('../scripts/dts-import'),
};
3 changes: 3 additions & 0 deletions package.json
Expand Up @@ -142,7 +142,10 @@
"prettier": "2.8.4",
"qs": "6.11.1",
"rimraf": "3.0.2",
"stream-chain": "2.2.5",
"stream-json": "1.8.0",
"supertest": "6.3.3",
"tar": "6.1.13",
"ts-jest": "29.1.0",
"typescript": "5.3.2",
"yalc": "1.0.0-pre.53",
Expand Down