Skip to content

Commit

Permalink
Merge pull request #18921 from PeteLevineA/continued-mysql-support
Browse files Browse the repository at this point in the history
patch: Add Continued MySQL Support
  • Loading branch information
freben committed Aug 22, 2023
2 parents 658b24e + e57d718 commit 20def97
Show file tree
Hide file tree
Showing 28 changed files with 172 additions and 66 deletions.
13 changes: 13 additions & 0 deletions .changeset/yellow-trains-protect.md
@@ -0,0 +1,13 @@
---
'@backstage/plugin-code-coverage-backend': patch
'@backstage/plugin-tech-insights-backend': patch
'@backstage/plugin-linguist-backend': patch
'@backstage/backend-common': patch
'@backstage/plugin-catalog-backend': patch
'@backstage/backend-tasks': patch
'@backstage/plugin-app-backend': patch
'@backstage/plugin-bazaar-backend': patch
'@backstage/plugin-scaffolder-backend': patch
---

Changes needed to support MySQL
Expand Up @@ -118,7 +118,7 @@ describe('MyDatabaseClass', () => {
// "physical" databases to test against is much costlier than creating the
// "logical" databases within them that the individual tests use.
const databases = TestDatabases.create({
ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'],
ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3', 'MYSQL_8'],
});

// Just an example of how to conveniently bundle up the setup code
Expand Down
1 change: 1 addition & 0 deletions packages/backend-common/package.json
Expand Up @@ -92,6 +92,7 @@
"minimatch": "^5.0.0",
"minimist": "^1.2.5",
"morgan": "^1.10.0",
"mysql2": "^2.2.5",
"node-fetch": "^2.6.7",
"node-forge": "^1.3.1",
"pg": "^8.3.0",
Expand Down
27 changes: 27 additions & 0 deletions packages/backend-common/src/database/DatabaseManager.test.ts
Expand Up @@ -425,6 +425,33 @@ describe('DatabaseManager', () => {
);
});

it('generates a database name override when prefix is not explicitly set for mysql', async () => {
const testManager = DatabaseManager.fromConfig(
new ConfigReader({
backend: {
database: {
client: 'mysql',
connection: {
host: 'localhost',
user: 'foo',
password: 'bar',
database: 'foodb',
},
},
},
}),
);

await testManager.forPlugin('testplugin').getClient();
const mockCalls = mocked(createDatabaseClient).mock.calls.splice(-1);
const [_baseConfig, overrides] = mockCalls[0];

expect(overrides).toHaveProperty(
'connection.database',
expect.stringContaining('backstage_plugin_'),
);
});

it('uses values from plugin connection string if top level client should be used', async () => {
const pluginId = 'stringoverride';
await manager.forPlugin(pluginId).getClient();
Expand Down
2 changes: 1 addition & 1 deletion packages/backend-tasks/migrations/20210928160613_init.js
Expand Up @@ -26,7 +26,7 @@ exports.up = async function up(knex) {
await knex.schema.createTable('backstage_backend_tasks__tasks', table => {
table.comment('Tasks used for scheduling work on multiple workers');
table
.text('id')
.string('id')
.primary()
.notNullable()
.comment('The unique ID of this particular task');
Expand Down
2 changes: 1 addition & 1 deletion packages/backend-tasks/src/migrations.test.ts
Expand Up @@ -43,7 +43,7 @@ jest.setTimeout(60_000);

describe('migrations', () => {
const databases = TestDatabases.create({
ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'],
ids: ['POSTGRES_13', 'POSTGRES_9', 'MYSQL_8', 'SQLITE_3'],
});

it.each(databases.eachSupportedId())(
Expand Down
Expand Up @@ -42,6 +42,7 @@ describe('PluginTaskSchedulerJanitor', () => {
'POSTGRES_13',
'POSTGRES_9',
'SQLITE_3',
'MYSQL_8',
],
});

Expand Down
2 changes: 1 addition & 1 deletion packages/backend-tasks/src/tasks/TaskScheduler.test.ts
Expand Up @@ -25,7 +25,7 @@ jest.setTimeout(60_000);
describe('TaskScheduler', () => {
const logger = getVoidLogger();
const databases = TestDatabases.create({
ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'],
ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3', 'MYSQL_8'],
});

async function createDatabase(
Expand Down
2 changes: 1 addition & 1 deletion packages/backend-tasks/src/tasks/TaskWorker.test.ts
Expand Up @@ -28,7 +28,7 @@ jest.setTimeout(60_000);
describe('TaskWorker', () => {
const logger = getVoidLogger();
const databases = TestDatabases.create({
ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'],
ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3', 'MYSQL_8'],
});

beforeEach(() => {
Expand Down
27 changes: 18 additions & 9 deletions packages/backend-tasks/src/tasks/TaskWorker.ts
Expand Up @@ -175,11 +175,15 @@ export class TaskWorker {
const time = new CronTime(settings.cadence)
.sendAt()
.minus({ seconds: 1 }) // immediately, if "* * * * * *"
.toUTC()
.toISO();
startAt = this.knex.client.config.client.includes('sqlite3')
? this.knex.raw('datetime(?)', [time])
: this.knex.raw(`?`, [time]);
.toUTC();

if (this.knex.client.config.client.includes('sqlite3')) {
startAt = this.knex.raw('datetime(?)', [time.toISO()]);
} else if (this.knex.client.config.client.includes('mysql')) {
startAt = this.knex.raw(`?`, [time.toSQL({ includeOffset: false })]);
} else {
startAt = this.knex.raw(`?`, [time.toISO()]);
}
} else {
startAt = this.knex.fn.now();
}
Expand Down Expand Up @@ -279,11 +283,16 @@ export class TaskWorker {

let nextRun: Knex.Raw;
if (isCron) {
const time = new CronTime(settings.cadence).sendAt().toUTC().toISO();
const time = new CronTime(settings.cadence).sendAt().toUTC();
this.logger.debug(`task: ${this.taskId} will next occur around ${time}`);
nextRun = this.knex.client.config.client.includes('sqlite3')
? this.knex.raw('datetime(?)', [time])
: this.knex.raw(`?`, [time]);

if (this.knex.client.config.client.includes('sqlite3')) {
nextRun = this.knex.raw('datetime(?)', [time.toISO()]);
} else if (this.knex.client.config.client.includes('mysql')) {
nextRun = this.knex.raw(`?`, [time.toSQL({ includeOffset: false })]);
} else {
nextRun = this.knex.raw(`?`, [time.toISO()]);
}
} else {
const dt = Duration.fromISO(settings.cadence).as('seconds');
this.logger.debug(
Expand Down
1 change: 1 addition & 0 deletions packages/backend/package.json
Expand Up @@ -87,6 +87,7 @@
"express-prom-bundle": "^6.3.6",
"express-promise-router": "^4.1.0",
"luxon": "^3.0.0",
"mysql2": "^2.2.5",
"pg": "^8.3.0",
"pg-connection-string": "^2.3.0",
"prom-client": "^14.0.1",
Expand Down
1 change: 1 addition & 0 deletions packages/e2e-test/package.json
Expand Up @@ -35,6 +35,7 @@
"cross-fetch": "^3.1.5",
"fs-extra": "10.1.0",
"handlebars": "^4.7.3",
"mysql2": "^2.2.5",
"pgtools": "^1.0.0",
"puppeteer": "^17.0.0",
"tree-kill": "^1.2.2"
Expand Down
54 changes: 37 additions & 17 deletions packages/e2e-test/src/commands/run.ts
Expand Up @@ -31,7 +31,10 @@ import {
waitForExit,
print,
} from '../lib/helpers';

import mysql from 'mysql2/promise';
import pgtools from 'pgtools';

import { findPaths } from '@backstage/cli-common';

// eslint-disable-next-line no-restricted-syntax
Expand Down Expand Up @@ -66,9 +69,12 @@ export async function run() {
print('Starting the app');
await testAppServe(pluginId, appDir);

if (Boolean(process.env.POSTGRES_USER)) {
print('Testing the PostgreSQL backend startup');
await preCleanPostgres();
if (
Boolean(process.env.POSTGRES_USER) ||
Boolean(process.env.MYSQL_CONNECTION)
) {
print('Testing the database backend startup');
await preCleanDatabase();
const appConfig = path.resolve(appDir, 'app-config.yaml');
const productionConfig = path.resolve(appDir, 'app-config.production.yaml');
await testBackendStart(
Expand All @@ -79,7 +85,7 @@ export async function run() {
productionConfig,
);
}
print('Testing the SQLite backend startup');
print('Testing the Database backend startup');
await testBackendStart(appDir);

if (process.env.CI) {
Expand Down Expand Up @@ -427,24 +433,39 @@ async function testAppServe(pluginId: string, appDir: string) {
}

/** Drops PG databases */
async function dropDB(database: string) {
const config = {
host: process.env.POSTGRES_HOST,
port: process.env.POSTGRES_PORT,
user: process.env.POSTGRES_USER,
password: process.env.POSTGRES_PASSWORD,
};

async function dropDB(database: string, client: string) {
try {
await pgtools.dropdb(config, database);
if (client === 'postgres') {
const config = {
host: process.env.POSTGRES_HOST,
port: process.env.POSTGRES_PORT,
user: process.env.POSTGRES_USER,
password: process.env.POSTGRES_PASSWORD,
};
await pgtools.dropdb(config, database);
} else if (client === 'mysql') {
const connectionString = process.env.MYSQL_CONNECTION ?? '';
const connection = await mysql.createConnection(connectionString);
await connection.execute('DROP DATABASE ?', [database]);
}
} catch (_) {
/* do nothing*/
/* do nothing */
}
}

/** Clean remnants from prior e2e runs */
async function preCleanPostgres() {
async function preCleanDatabase() {
print('Dropping old DBs');
if (Boolean(process.env.POSTGRES_HOST)) {
await dropClientDatabases('postgres');
}
if (Boolean(process.env.MYSQL_CONNECTION)) {
await dropClientDatabases('mysql');
}
print('Dropped DBs');
}

async function dropClientDatabases(client: string) {
await Promise.all(
[
'catalog',
Expand All @@ -454,9 +475,8 @@ async function preCleanPostgres() {
'proxy',
'techdocs',
'search',
].map(name => dropDB(`backstage_plugin_${name}`)),
].map(name => dropDB(`backstage_plugin_${name}`, client)),
);
print('Created DBs');
}

/**
Expand Down
19 changes: 17 additions & 2 deletions plugins/app-backend/migrations/20211229105307_init.js
Expand Up @@ -20,11 +20,20 @@
* @param {import('knex').Knex} knex
*/
exports.up = async function up(knex) {
return knex.schema.createTable('static_assets_cache', table => {
const isMySQL = knex.client.config.client.includes('mysql');
await knex.schema.createTable('static_assets_cache', table => {
table.comment(
'A cache of static assets that where previously deployed and may still be lazy-loaded by clients',
);
table.text('path').primary().notNullable().comment('The path of the file');
if (!isMySQL) {
table
.text('path')
.primary()
.notNullable()
.comment('The path of the file');
} else {
table.text('path').notNullable().comment('The path of the file');
}
table
.dateTime('last_modified_at')
.defaultTo(knex.fn.now())
Expand All @@ -35,6 +44,12 @@ exports.up = async function up(knex) {
table.binary('content').notNullable().comment('The asset content');
table.index('last_modified_at', 'static_asset_cache_last_modified_at_idx');
});
// specifically for mysql specify a unique index up to 254 characters(mysql limit)
if (isMySQL) {
await knex.schema.raw(
'create unique index static_assets_cache_path_idx on static_assets_cache(path(254));',
);
}
};

/**
Expand Down
14 changes: 9 additions & 5 deletions plugins/app-backend/src/lib/assets/StaticAssetsStore.test.ts
Expand Up @@ -37,7 +37,7 @@ jest.setTimeout(60_000);

describe('StaticAssetsStore', () => {
const databases = TestDatabases.create({
ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'],
ids: ['MYSQL_8', 'POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'],
});

it.each(databases.eachSupportedId())(
Expand Down Expand Up @@ -153,14 +153,18 @@ describe('StaticAssetsStore', () => {
content: async () => Buffer.alloc(0),
},
]);

// interval check for postgresql
let hourPast = `now() + interval '-3600 seconds'`;
if (knex.client.config.client.includes('mysql')) {
hourPast = `date_sub(now(), interval 3600 second)`;
} else if (knex.client.config.client.includes('sqlite3')) {
hourPast = `datetime('now', '-3600 seconds')`;
}
// Rewrite modified time of "old" to be 1h in the past
const updated = await knex('static_assets_cache')
.where({ path: 'old' })
.update({
last_modified_at: knex.client.config.client.includes('sqlite3')
? knex.raw(`datetime('now', '-3600 seconds')`)
: knex.raw(`now() + interval '-3600 seconds'`),
last_modified_at: knex.raw(hourPast),
});
expect(updated).toBe(1);

Expand Down
20 changes: 13 additions & 7 deletions plugins/app-backend/src/lib/assets/StaticAssetsStore.ts
Expand Up @@ -138,14 +138,20 @@ export class StaticAssetsStore implements StaticAssetProvider {
*/
async trimAssets(options: { maxAgeSeconds: number }) {
const { maxAgeSeconds } = options;
let lastModifiedInterval = this.#db.raw(
`now() + interval '${-maxAgeSeconds} seconds'`,
);
if (this.#db.client.config.client.includes('mysql')) {
lastModifiedInterval = this.#db.raw(
`date_sub(now(), interval ${maxAgeSeconds} second)`,
);
} else if (this.#db.client.config.client.includes('sqlite3')) {
lastModifiedInterval = this.#db.raw(`datetime('now', ?)`, [
`-${maxAgeSeconds} seconds`,
]);
}
await this.#db<StaticAssetRow>('static_assets_cache')
.where(
'last_modified_at',
'<=',
this.#db.client.config.client.includes('sqlite3')
? this.#db.raw(`datetime('now', ?)`, [`-${maxAgeSeconds} seconds`])
: this.#db.raw(`now() + interval '${-maxAgeSeconds} seconds'`),
)
.where('last_modified_at', '<=', lastModifiedInterval)
.delete();
}
}
4 changes: 2 additions & 2 deletions plugins/bazaar-backend/migrations/20211014144054_init.js
Expand Up @@ -23,7 +23,7 @@ exports.up = async function up(knex) {
await knex.schema.createTable('metadata', table => {
table.comment('The table of Bazaar metadata');
table
.text('entity_ref')
.string('entity_ref')
.notNullable()
.unique()
.comment('The ref of the entity');
Expand All @@ -49,7 +49,7 @@ exports.up = async function up(knex) {
await knex.schema.createTable('members', table => {
table.comment('The table of Bazaar members');
table
.text('entity_ref')
.string('entity_ref')
.notNullable()
.references('metadata.entity_ref')
.onDelete('CASCADE')
Expand Down
Expand Up @@ -89,9 +89,11 @@ exports.up = async function up(knex) {
await knex.schema.alterTable('members', table => {
table
.integer('item_id')
.unsigned()
.references('metadata.id')
.onDelete('CASCADE')
.comment('Id of the associated item');
table.dropForeign('entity_ref');
table.dropColumn('entity_ref');
});
}
Expand Down
2 changes: 1 addition & 1 deletion plugins/bazaar-backend/src/service/DatabaseHandler.test.ts
Expand Up @@ -36,7 +36,7 @@ jest.setTimeout(60_000);

describe('DatabaseHandler', () => {
const databases = TestDatabases.create({
ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'],
ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3', 'MYSQL_8'],
});

function createDatabaseManager(
Expand Down

0 comments on commit 20def97

Please sign in to comment.