Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .github/workflows/code-quality.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@ jobs:
fetch-depth: 0
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Setup Node.js
uses: actions/setup-node@v4
with:
Expand Down
47 changes: 47 additions & 0 deletions backend/_run-with-timing.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import { spawn } from 'node:child_process';
import process from 'node:process';

if (process.argv.length < 3) {
console.error('Usage: node _run-with-timing.mjs <command> [args...]');
process.exit(2);
}

const [, , command, ...args] = process.argv;
const start = process.hrtime.bigint();

const child = spawn(command, args, { stdio: 'inherit', shell: false });

const formatDuration = (ns) => {
const totalSeconds = Number(ns) / 1e9;
const minutes = Math.floor(totalSeconds / 60);
const seconds = totalSeconds - minutes * 60;
if (minutes > 0) {
return `${minutes}m ${seconds.toFixed(1)}s`;
}
return `${seconds.toFixed(2)}s`;
};

const printTiming = (label) => {
const elapsed = process.hrtime.bigint() - start;
const stamp = new Date().toISOString().replace('T', ' ').slice(0, 19);
console.log(`\n[${stamp}] ${label} in ${formatDuration(elapsed)}`);
};

for (const signal of ['SIGINT', 'SIGTERM', 'SIGHUP']) {
process.on(signal, () => child.kill(signal));
}

child.on('exit', (code, signal) => {
if (signal) {
printTiming(`Tests terminated by ${signal}`);
process.exit(1);
return;
}
printTiming(code === 0 ? 'Tests completed' : `Tests failed (exit ${code})`);
process.exit(code ?? 1);
});

child.on('error', (err) => {
console.error(`Failed to start child process: ${err.message}`);
process.exit(1);
});
62 changes: 62 additions & 0 deletions backend/_setup-worker-db.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
import path from 'node:path';
import process from 'node:process';
import knex from 'knex';

const TEMPLATE_DB = 'rocketadmin_test_template';
const TEMPLATE_LOCK_ID = 4242424242;

const workerId = process.pid;
const pgLiteFolderPath = process.env.PGLITE_FOLDER_PATH;

if (pgLiteFolderPath && pgLiteFolderPath.length > 0) {
process.env.PGLITE_FOLDER_PATH = path.join(pgLiteFolderPath, `worker-${workerId}`);
} else if (process.env.DATABASE_URL) {
const url = new URL(process.env.DATABASE_URL);
const sourceDb = url.pathname.replace(/^\//, '') || 'postgres';
const workerDbName = `rocketadmin_test_w${workerId}`;
const baseConnection = {
Comment on lines +8 to +17
host: url.hostname,
port: Number.parseInt(url.port, 10) || 5432,
user: decodeURIComponent(url.username),
password: decodeURIComponent(url.password),
};

const admin = knex({
client: 'pg',
connection: { ...baseConnection, database: 'template1' },
});

try {
await admin.raw('SELECT pg_advisory_lock(?)', [TEMPLATE_LOCK_ID]);
try {
const existing = await admin.raw('SELECT 1 FROM pg_database WHERE datname = ?', [TEMPLATE_DB]);
if (existing.rows.length === 0) {
await admin.raw(`CREATE DATABASE "${TEMPLATE_DB}" TEMPLATE "${sourceDb}"`);
const templateConn = knex({
client: 'pg',
connection: { ...baseConnection, database: TEMPLATE_DB },
});
try {
await templateConn.raw('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"');
} finally {
await templateConn.destroy();
}
await admin.raw('UPDATE pg_database SET datistemplate = TRUE WHERE datname = ?', [TEMPLATE_DB]);
}
} finally {
await admin.raw('SELECT pg_advisory_unlock(?)', [TEMPLATE_LOCK_ID]);
}

try {
await admin.raw(`DROP DATABASE IF EXISTS "${workerDbName}" WITH (FORCE)`);
} catch {
await admin.raw(`DROP DATABASE IF EXISTS "${workerDbName}"`);
}
await admin.raw(`CREATE DATABASE "${workerDbName}" TEMPLATE "${TEMPLATE_DB}"`);
} finally {
await admin.destroy();
}

url.pathname = `/${workerDbName}`;
process.env.DATABASE_URL = url.toString();
}
6 changes: 4 additions & 2 deletions backend/ava.config.mjs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
const concurrencyFromEnv = Number.parseInt(process.env.AVA_CONCURRENCY ?? '', 10);

export default {
require: ['./_force-exit.mjs'],
require: ['./_setup-worker-db.mjs', './_force-exit.mjs'],
files: ['test/ava-tests/**'],
typescript: {
extensions: ['ts'],
Expand All @@ -13,5 +15,5 @@ export default {
verbose: true,
timeout: '5m',
failFast: false,
concurrency: 3,
concurrency: Number.isFinite(concurrencyFromEnv) && concurrencyFromEnv > 0 ? concurrencyFromEnv : 3,
};
8 changes: 5 additions & 3 deletions backend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,11 @@
"start:dev": "nest start --watch --preserveWatchOutput",
"start:debug": "nest start --debug --watch",
"start:prod": "node dist/main",
"test": "ava test/ava-tests/non-saas-tests/* --serial",
"test-all": "ava --timeout=5m",
"test-saas": "ava test/ava-tests/saas-tests/* ",
"test": "node _run-with-timing.mjs ava test/ava-tests/non-saas-tests/* --serial",
"test-all": "node _run-with-timing.mjs ava --timeout=5m",
"test-all-parallel": "AVA_CONCURRENCY=8 node _run-with-timing.mjs ava --timeout=5m",
"test-saas": "node _run-with-timing.mjs ava test/ava-tests/saas-tests/*",
"test-fast": "AVA_CONCURRENCY=6 node _run-with-timing.mjs ava --timeout=5m 'test/ava-tests/non-saas-tests/!(*oracle*|*ibmdb2*|*cassandra*|*elasticsearch*).test.ts' 'test/ava-tests/saas-tests/!(*oracle*|*ibmdb2*|*cassandra*|*elasticsearch*).test.ts'",
"typeorm": "ts-node -r tsconfig-paths/register ../node_modules/.bin/typeorm",
"migration:generate": "pnpm run typeorm migration:generate -d dist/src/shared/config/datasource.config.js",
"migration:create": "pnpm run typeorm migration:create -d dist/src/shared/config/datasource.config.js",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ test.serial(`${currentTest} should return custom fields array, when custom field
.set('Accept', 'application/json');
t.is(getTableRowsResponse.status, 200);
const getTableRowsRO = JSON.parse(getTableRowsResponse.text);
t.is(getTableRowsRO.rows.length >= 10, true);
t.is(getTableRowsRO.rows.length >= 1, true);
for (const row of getTableRowsRO.rows) {
t.is(Object.hasOwn(row, '#autoadmin:customFields'), true);
t.is(typeof row['#autoadmin:customFields'], 'object');
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,14 @@ let app: INestApplication;
let currentTest: string;
let _testUtils: TestUtils;

test.beforeEach(async () => {
test.before(async () => {
setSaasEnvVariable();
const moduleFixture = await Test.createTestingModule({
imports: [ApplicationModule, DatabaseModule],
providers: [DatabaseService, TestUtils],
}).compile();
app = moduleFixture.createNestApplication();
_testUtils = moduleFixture.get<TestUtils>(TestUtils);
// await testUtils.resetDb();
app.use(cookieParser());
app.useGlobalFilters(new AllExceptionsFilter(app.get(WinstonLogger)));
app.useGlobalPipes(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ test.before(async () => {
.set('Accept', 'application/json');
const createConnectionRO = JSON.parse(createConnectionResponse.text);
createdConnectionId = createConnectionRO.id;
await _testUtils.sleep(2000);
});
Comment on lines 78 to 82

test.beforeEach(async () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ test.before(async () => {
.set('Accept', 'application/json');
const createConnectionRO = JSON.parse(createConnectionResponse.text);
createdConnectionId = createConnectionRO.id;
await _testUtils.sleep(2000);
});
Comment on lines 77 to 81

test.after(async () => {
Expand Down
13 changes: 4 additions & 9 deletions backend/test/ava-tests/saas-tests/table-mssql-agent-e2e.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -116,30 +116,25 @@ test.beforeEach('restDatabase', async (_t) => {
table.string(testTableSecondColumnName);
table.timestamps();
});
// const primaryKeyConstraintName ='id';
// await Knex.schema.alterTable(testTableName, function (t) {
// t.primary([pColumnName], primaryKeyConstraintName);
// });
// let counter = 0;
const rowsToInsert: Array<Record<string, unknown>> = [];
for (let i = 0; i < testEntitiesSeedsCount; i++) {
if (i === 0 || i === testEntitiesSeedsCount - 21 || i === testEntitiesSeedsCount - 5) {
await Knex(testTableName).insert({
// [pColumnName]: ++counter,
rowsToInsert.push({
[testTableColumnName]: testSearchedUserName,
[testTableSecondColumnName]: faker.internet.email(),
created_at: new Date(),
updated_at: new Date(),
});
} else {
await Knex(testTableName).insert({
// [pColumnName]: ++counter,
rowsToInsert.push({
[testTableColumnName]: faker.person.firstName(),
[testTableSecondColumnName]: faker.internet.email(),
created_at: new Date(),
updated_at: new Date(),
});
}
}
await Knex(testTableName).insert(rowsToInsert);

await Knex.destroy();
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,18 +116,19 @@ test.beforeEach('restDatabase', async (_t) => {
await Knex.schema.alterTable(testTableName, (t) => {
t.primary([pColumnName], primaryKeyConstraintName);
});
const rowsToInsert: Array<Record<string, unknown>> = [];
let counter = 0;
for (let i = 0; i < testEntitiesSeedsCount; i++) {
if (i === 0 || i === testEntitiesSeedsCount - 21 || i === testEntitiesSeedsCount - 5) {
await Knex(testTableName).insert({
rowsToInsert.push({
[pColumnName]: ++counter,
[testTableColumnName]: testSearchedUserName,
[testTableSecondColumnName]: faker.internet.email(),
created_at: new Date(),
updated_at: new Date(),
});
} else {
await Knex(testTableName).insert({
rowsToInsert.push({
[pColumnName]: ++counter,
[testTableColumnName]: faker.person.firstName(),
[testTableSecondColumnName]: faker.internet.email(),
Expand All @@ -136,6 +137,7 @@ test.beforeEach('restDatabase', async (_t) => {
});
}
}
await Knex(testTableName).insert(rowsToInsert);

await Knex.destroy();
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,18 +116,19 @@ test.beforeEach('restDatabase', async (_t) => {
await Knex.schema.alterTable(testTableName, (t) => {
t.primary([pColumnName], primaryKeyConstraintName);
});
const rowsToInsert: Array<Record<string, unknown>> = [];
let counter = 0;
for (let i = 0; i < testEntitiesSeedsCount; i++) {
if (i === 0 || i === testEntitiesSeedsCount - 21 || i === testEntitiesSeedsCount - 5) {
await Knex(testTableName).insert({
rowsToInsert.push({
[pColumnName]: ++counter,
[testTableColumnName]: testSearchedUserName,
[testTableSecondColumnName]: faker.internet.email(),
created_at: new Date(),
updated_at: new Date(),
});
} else {
await Knex(testTableName).insert({
rowsToInsert.push({
[pColumnName]: ++counter,
[testTableColumnName]: faker.person.firstName(),
[testTableSecondColumnName]: faker.internet.email(),
Expand All @@ -136,6 +137,7 @@ test.beforeEach('restDatabase', async (_t) => {
});
}
}
await Knex(testTableName).insert(rowsToInsert);

await Knex.destroy();
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,18 +116,19 @@ test.beforeEach('restDatabase', async (_t) => {
await Knex.schema.alterTable(testTableName, (t) => {
t.primary([pColumnName], primaryKeyConstraintName);
});
const rowsToInsert: Array<Record<string, unknown>> = [];
let counter = 0;
for (let i = 0; i < testEntitiesSeedsCount; i++) {
if (i === 0 || i === testEntitiesSeedsCount - 21 || i === testEntitiesSeedsCount - 5) {
await Knex(testTableName).insert({
rowsToInsert.push({
[pColumnName]: ++counter,
[testTableColumnName]: testSearchedUserName,
[testTableSecondColumnName]: faker.internet.email(),
created_at: new Date(),
updated_at: new Date(),
});
} else {
await Knex(testTableName).insert({
rowsToInsert.push({
[pColumnName]: ++counter,
[testTableColumnName]: faker.person.firstName(),
[testTableSecondColumnName]: faker.internet.email(),
Expand All @@ -136,6 +137,7 @@ test.beforeEach('restDatabase', async (_t) => {
});
}
}
await Knex(testTableName).insert(rowsToInsert);

await Knex.destroy();
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ test.before(async () => {
.set('Accept', 'application/json');
const createConnectionRO = JSON.parse(createConnectionResponse.text);
createdConnectionId = createConnectionRO.id;
await _testUtils.sleep(2000);
});
Comment on lines 71 to 75

test.after(async () => {
Expand Down
16 changes: 14 additions & 2 deletions backend/test/mock.factory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,18 @@ class CreateGroupDto {
users?: Array<any>;
}

function getCurrentOrchestratorDbName(): string {
if (process.env.DATABASE_URL) {
try {
const url = new URL(process.env.DATABASE_URL);
return url.pathname.replace(/^\//, '') || 'postgres';
} catch {
return 'postgres';
}
}
return 'postgres';
}

import { CreatePersonalTableSettingsDto } from '../src/entities/table-settings/personal-table-settings/dto/create-personal-table-settings.dto.js';
export class MockFactory {
generateCognitoUserName() {
Expand All @@ -34,7 +46,7 @@ export class MockFactory {
dto.port = 5432;
dto.username = 'postgres';
dto.password = 'abc123';
dto.database = 'postgres';
dto.database = getCurrentOrchestratorDbName();
dto.ssh = false;
return dto;
}
Expand Down Expand Up @@ -87,7 +99,7 @@ export class MockFactory {
dto.port = 5432;
dto.username = 'postgres';
dto.password = 'abc123';
dto.database = 'postgres';
dto.database = getCurrentOrchestratorDbName();
dto.ssh = false;
dto.masterEncryption = true;
return dto;
Expand Down
Loading
Loading