diff --git a/.changeset/yellow-trains-protect.md b/.changeset/yellow-trains-protect.md new file mode 100644 index 0000000000000..4b8c50ac3e126 --- /dev/null +++ b/.changeset/yellow-trains-protect.md @@ -0,0 +1,13 @@ +--- +'@backstage/plugin-code-coverage-backend': patch +'@backstage/plugin-tech-insights-backend': patch +'@backstage/plugin-linguist-backend': patch +'@backstage/backend-common': patch +'@backstage/plugin-catalog-backend': patch +'@backstage/backend-tasks': patch +'@backstage/plugin-app-backend': patch +'@backstage/plugin-bazaar-backend': patch +'@backstage/plugin-scaffolder-backend': patch +--- + +Changes needed to support MySQL diff --git a/docs/backend-system/building-plugins-and-modules/02-testing.md b/docs/backend-system/building-plugins-and-modules/02-testing.md index fdd8cdb1e30fc..8ede8855e4209 100644 --- a/docs/backend-system/building-plugins-and-modules/02-testing.md +++ b/docs/backend-system/building-plugins-and-modules/02-testing.md @@ -118,7 +118,7 @@ describe('MyDatabaseClass', () => { // "physical" databases to test against is much costlier than creating the // "logical" databases within them that the individual tests use. const databases = TestDatabases.create({ - ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'], + ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3', 'MYSQL_8'], }); // Just an example of how to conveniently bundle up the setup code diff --git a/packages/backend-common/package.json b/packages/backend-common/package.json index fd9b9cb0dfc1e..dee9ea2333467 100644 --- a/packages/backend-common/package.json +++ b/packages/backend-common/package.json @@ -92,6 +92,7 @@ "minimatch": "^5.0.0", "minimist": "^1.2.5", "morgan": "^1.10.0", + "mysql2": "^2.2.5", "node-fetch": "^2.6.7", "node-forge": "^1.3.1", "pg": "^8.3.0", diff --git a/packages/backend-common/src/database/DatabaseManager.test.ts b/packages/backend-common/src/database/DatabaseManager.test.ts index 69386d77c5143..bf4af17d05954 100644 --- a/packages/backend-common/src/database/DatabaseManager.test.ts +++ b/packages/backend-common/src/database/DatabaseManager.test.ts @@ -425,6 +425,33 @@ describe('DatabaseManager', () => { ); }); + it('generates a database name override when prefix is not explicitly set for mysql', async () => { + const testManager = DatabaseManager.fromConfig( + new ConfigReader({ + backend: { + database: { + client: 'mysql', + connection: { + host: 'localhost', + user: 'foo', + password: 'bar', + database: 'foodb', + }, + }, + }, + }), + ); + + await testManager.forPlugin('testplugin').getClient(); + const mockCalls = mocked(createDatabaseClient).mock.calls.splice(-1); + const [_baseConfig, overrides] = mockCalls[0]; + + expect(overrides).toHaveProperty( + 'connection.database', + expect.stringContaining('backstage_plugin_'), + ); + }); + it('uses values from plugin connection string if top level client should be used', async () => { const pluginId = 'stringoverride'; await manager.forPlugin(pluginId).getClient(); diff --git a/packages/backend-tasks/migrations/20210928160613_init.js b/packages/backend-tasks/migrations/20210928160613_init.js index 21d80ae24ce97..f9900cab11f4f 100644 --- a/packages/backend-tasks/migrations/20210928160613_init.js +++ b/packages/backend-tasks/migrations/20210928160613_init.js @@ -26,7 +26,7 @@ exports.up = async function up(knex) { await knex.schema.createTable('backstage_backend_tasks__tasks', table => { table.comment('Tasks used for scheduling work on multiple workers'); table - .text('id') + .string('id') .primary() .notNullable() .comment('The unique ID of this particular task'); diff --git a/packages/backend-tasks/src/migrations.test.ts b/packages/backend-tasks/src/migrations.test.ts index 7e471a1c16665..1d701c971de0d 100644 --- a/packages/backend-tasks/src/migrations.test.ts +++ b/packages/backend-tasks/src/migrations.test.ts @@ -43,7 +43,7 @@ jest.setTimeout(60_000); describe('migrations', () => { const databases = TestDatabases.create({ - ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'], + ids: ['POSTGRES_13', 'POSTGRES_9', 'MYSQL_8', 'SQLITE_3'], }); it.each(databases.eachSupportedId())( diff --git a/packages/backend-tasks/src/tasks/PluginTaskSchedulerJanitor.test.ts b/packages/backend-tasks/src/tasks/PluginTaskSchedulerJanitor.test.ts index 5b5d592b7b1b6..efbad2e8b6e60 100644 --- a/packages/backend-tasks/src/tasks/PluginTaskSchedulerJanitor.test.ts +++ b/packages/backend-tasks/src/tasks/PluginTaskSchedulerJanitor.test.ts @@ -42,6 +42,7 @@ describe('PluginTaskSchedulerJanitor', () => { 'POSTGRES_13', 'POSTGRES_9', 'SQLITE_3', + 'MYSQL_8', ], }); diff --git a/packages/backend-tasks/src/tasks/TaskScheduler.test.ts b/packages/backend-tasks/src/tasks/TaskScheduler.test.ts index 89eff433dbd8d..6aa77dd7fe640 100644 --- a/packages/backend-tasks/src/tasks/TaskScheduler.test.ts +++ b/packages/backend-tasks/src/tasks/TaskScheduler.test.ts @@ -25,7 +25,7 @@ jest.setTimeout(60_000); describe('TaskScheduler', () => { const logger = getVoidLogger(); const databases = TestDatabases.create({ - ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'], + ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3', 'MYSQL_8'], }); async function createDatabase( diff --git a/packages/backend-tasks/src/tasks/TaskWorker.test.ts b/packages/backend-tasks/src/tasks/TaskWorker.test.ts index fa2577ce539bb..b8b24e045a502 100644 --- a/packages/backend-tasks/src/tasks/TaskWorker.test.ts +++ b/packages/backend-tasks/src/tasks/TaskWorker.test.ts @@ -28,7 +28,7 @@ jest.setTimeout(60_000); describe('TaskWorker', () => { const logger = getVoidLogger(); const databases = TestDatabases.create({ - ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'], + ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3', 'MYSQL_8'], }); beforeEach(() => { diff --git a/packages/backend-tasks/src/tasks/TaskWorker.ts b/packages/backend-tasks/src/tasks/TaskWorker.ts index 483fb1a02dde6..a03d1ec0588c0 100644 --- a/packages/backend-tasks/src/tasks/TaskWorker.ts +++ b/packages/backend-tasks/src/tasks/TaskWorker.ts @@ -175,11 +175,15 @@ export class TaskWorker { const time = new CronTime(settings.cadence) .sendAt() .minus({ seconds: 1 }) // immediately, if "* * * * * *" - .toUTC() - .toISO(); - startAt = this.knex.client.config.client.includes('sqlite3') - ? this.knex.raw('datetime(?)', [time]) - : this.knex.raw(`?`, [time]); + .toUTC(); + + if (this.knex.client.config.client.includes('sqlite3')) { + startAt = this.knex.raw('datetime(?)', [time.toISO()]); + } else if (this.knex.client.config.client.includes('mysql')) { + startAt = this.knex.raw(`?`, [time.toSQL({ includeOffset: false })]); + } else { + startAt = this.knex.raw(`?`, [time.toISO()]); + } } else { startAt = this.knex.fn.now(); } @@ -279,11 +283,16 @@ export class TaskWorker { let nextRun: Knex.Raw; if (isCron) { - const time = new CronTime(settings.cadence).sendAt().toUTC().toISO(); + const time = new CronTime(settings.cadence).sendAt().toUTC(); this.logger.debug(`task: ${this.taskId} will next occur around ${time}`); - nextRun = this.knex.client.config.client.includes('sqlite3') - ? this.knex.raw('datetime(?)', [time]) - : this.knex.raw(`?`, [time]); + + if (this.knex.client.config.client.includes('sqlite3')) { + nextRun = this.knex.raw('datetime(?)', [time.toISO()]); + } else if (this.knex.client.config.client.includes('mysql')) { + nextRun = this.knex.raw(`?`, [time.toSQL({ includeOffset: false })]); + } else { + nextRun = this.knex.raw(`?`, [time.toISO()]); + } } else { const dt = Duration.fromISO(settings.cadence).as('seconds'); this.logger.debug( diff --git a/packages/backend/package.json b/packages/backend/package.json index 9e0896233d58d..98fa88f169533 100644 --- a/packages/backend/package.json +++ b/packages/backend/package.json @@ -87,6 +87,7 @@ "express-prom-bundle": "^6.3.6", "express-promise-router": "^4.1.0", "luxon": "^3.0.0", + "mysql2": "^2.2.5", "pg": "^8.3.0", "pg-connection-string": "^2.3.0", "prom-client": "^14.0.1", diff --git a/packages/e2e-test/package.json b/packages/e2e-test/package.json index 48d4643394f35..7e6ebdd749dc6 100644 --- a/packages/e2e-test/package.json +++ b/packages/e2e-test/package.json @@ -35,6 +35,7 @@ "cross-fetch": "^3.1.5", "fs-extra": "10.1.0", "handlebars": "^4.7.3", + "mysql2": "^2.2.5", "pgtools": "^1.0.0", "puppeteer": "^17.0.0", "tree-kill": "^1.2.2" diff --git a/packages/e2e-test/src/commands/run.ts b/packages/e2e-test/src/commands/run.ts index b3fb8d9a3c6de..20b2753f6bce2 100644 --- a/packages/e2e-test/src/commands/run.ts +++ b/packages/e2e-test/src/commands/run.ts @@ -31,7 +31,10 @@ import { waitForExit, print, } from '../lib/helpers'; + +import mysql from 'mysql2/promise'; import pgtools from 'pgtools'; + import { findPaths } from '@backstage/cli-common'; // eslint-disable-next-line no-restricted-syntax @@ -66,9 +69,12 @@ export async function run() { print('Starting the app'); await testAppServe(pluginId, appDir); - if (Boolean(process.env.POSTGRES_USER)) { - print('Testing the PostgreSQL backend startup'); - await preCleanPostgres(); + if ( + Boolean(process.env.POSTGRES_USER) || + Boolean(process.env.MYSQL_CONNECTION) + ) { + print('Testing the database backend startup'); + await preCleanDatabase(); const appConfig = path.resolve(appDir, 'app-config.yaml'); const productionConfig = path.resolve(appDir, 'app-config.production.yaml'); await testBackendStart( @@ -79,7 +85,7 @@ export async function run() { productionConfig, ); } - print('Testing the SQLite backend startup'); + print('Testing the Database backend startup'); await testBackendStart(appDir); if (process.env.CI) { @@ -427,24 +433,39 @@ async function testAppServe(pluginId: string, appDir: string) { } /** Drops PG databases */ -async function dropDB(database: string) { - const config = { - host: process.env.POSTGRES_HOST, - port: process.env.POSTGRES_PORT, - user: process.env.POSTGRES_USER, - password: process.env.POSTGRES_PASSWORD, - }; - +async function dropDB(database: string, client: string) { try { - await pgtools.dropdb(config, database); + if (client === 'postgres') { + const config = { + host: process.env.POSTGRES_HOST, + port: process.env.POSTGRES_PORT, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, + }; + await pgtools.dropdb(config, database); + } else if (client === 'mysql') { + const connectionString = process.env.MYSQL_CONNECTION ?? ''; + const connection = await mysql.createConnection(connectionString); + await connection.execute('DROP DATABASE ?', [database]); + } } catch (_) { - /* do nothing*/ + /* do nothing */ } } /** Clean remnants from prior e2e runs */ -async function preCleanPostgres() { +async function preCleanDatabase() { print('Dropping old DBs'); + if (Boolean(process.env.POSTGRES_HOST)) { + await dropClientDatabases('postgres'); + } + if (Boolean(process.env.MYSQL_CONNECTION)) { + await dropClientDatabases('mysql'); + } + print('Dropped DBs'); +} + +async function dropClientDatabases(client: string) { await Promise.all( [ 'catalog', @@ -454,9 +475,8 @@ async function preCleanPostgres() { 'proxy', 'techdocs', 'search', - ].map(name => dropDB(`backstage_plugin_${name}`)), + ].map(name => dropDB(`backstage_plugin_${name}`, client)), ); - print('Created DBs'); } /** diff --git a/plugins/app-backend/migrations/20211229105307_init.js b/plugins/app-backend/migrations/20211229105307_init.js index 29e37b35f74b4..25ed48c307952 100644 --- a/plugins/app-backend/migrations/20211229105307_init.js +++ b/plugins/app-backend/migrations/20211229105307_init.js @@ -20,11 +20,20 @@ * @param {import('knex').Knex} knex */ exports.up = async function up(knex) { - return knex.schema.createTable('static_assets_cache', table => { + const isMySQL = knex.client.config.client.includes('mysql'); + await knex.schema.createTable('static_assets_cache', table => { table.comment( 'A cache of static assets that where previously deployed and may still be lazy-loaded by clients', ); - table.text('path').primary().notNullable().comment('The path of the file'); + if (!isMySQL) { + table + .text('path') + .primary() + .notNullable() + .comment('The path of the file'); + } else { + table.text('path').notNullable().comment('The path of the file'); + } table .dateTime('last_modified_at') .defaultTo(knex.fn.now()) @@ -35,6 +44,12 @@ exports.up = async function up(knex) { table.binary('content').notNullable().comment('The asset content'); table.index('last_modified_at', 'static_asset_cache_last_modified_at_idx'); }); + // specifically for mysql specify a unique index up to 254 characters(mysql limit) + if (isMySQL) { + await knex.schema.raw( + 'create unique index static_assets_cache_path_idx on static_assets_cache(path(254));', + ); + } }; /** diff --git a/plugins/app-backend/src/lib/assets/StaticAssetsStore.test.ts b/plugins/app-backend/src/lib/assets/StaticAssetsStore.test.ts index 8a0935d4e3217..68fe4e22872cb 100644 --- a/plugins/app-backend/src/lib/assets/StaticAssetsStore.test.ts +++ b/plugins/app-backend/src/lib/assets/StaticAssetsStore.test.ts @@ -37,7 +37,7 @@ jest.setTimeout(60_000); describe('StaticAssetsStore', () => { const databases = TestDatabases.create({ - ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'], + ids: ['MYSQL_8', 'POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'], }); it.each(databases.eachSupportedId())( @@ -153,14 +153,18 @@ describe('StaticAssetsStore', () => { content: async () => Buffer.alloc(0), }, ]); - + // interval check for postgresql + let hourPast = `now() + interval '-3600 seconds'`; + if (knex.client.config.client.includes('mysql')) { + hourPast = `date_sub(now(), interval 3600 second)`; + } else if (knex.client.config.client.includes('sqlite3')) { + hourPast = `datetime('now', '-3600 seconds')`; + } // Rewrite modified time of "old" to be 1h in the past const updated = await knex('static_assets_cache') .where({ path: 'old' }) .update({ - last_modified_at: knex.client.config.client.includes('sqlite3') - ? knex.raw(`datetime('now', '-3600 seconds')`) - : knex.raw(`now() + interval '-3600 seconds'`), + last_modified_at: knex.raw(hourPast), }); expect(updated).toBe(1); diff --git a/plugins/app-backend/src/lib/assets/StaticAssetsStore.ts b/plugins/app-backend/src/lib/assets/StaticAssetsStore.ts index 6905d63779bba..9f6ada717d7de 100644 --- a/plugins/app-backend/src/lib/assets/StaticAssetsStore.ts +++ b/plugins/app-backend/src/lib/assets/StaticAssetsStore.ts @@ -138,14 +138,20 @@ export class StaticAssetsStore implements StaticAssetProvider { */ async trimAssets(options: { maxAgeSeconds: number }) { const { maxAgeSeconds } = options; + let lastModifiedInterval = this.#db.raw( + `now() + interval '${-maxAgeSeconds} seconds'`, + ); + if (this.#db.client.config.client.includes('mysql')) { + lastModifiedInterval = this.#db.raw( + `date_sub(now(), interval ${maxAgeSeconds} second)`, + ); + } else if (this.#db.client.config.client.includes('sqlite3')) { + lastModifiedInterval = this.#db.raw(`datetime('now', ?)`, [ + `-${maxAgeSeconds} seconds`, + ]); + } await this.#db('static_assets_cache') - .where( - 'last_modified_at', - '<=', - this.#db.client.config.client.includes('sqlite3') - ? this.#db.raw(`datetime('now', ?)`, [`-${maxAgeSeconds} seconds`]) - : this.#db.raw(`now() + interval '${-maxAgeSeconds} seconds'`), - ) + .where('last_modified_at', '<=', lastModifiedInterval) .delete(); } } diff --git a/plugins/bazaar-backend/migrations/20211014144054_init.js b/plugins/bazaar-backend/migrations/20211014144054_init.js index 503812225d43d..480771acb415c 100644 --- a/plugins/bazaar-backend/migrations/20211014144054_init.js +++ b/plugins/bazaar-backend/migrations/20211014144054_init.js @@ -23,7 +23,7 @@ exports.up = async function up(knex) { await knex.schema.createTable('metadata', table => { table.comment('The table of Bazaar metadata'); table - .text('entity_ref') + .string('entity_ref') .notNullable() .unique() .comment('The ref of the entity'); @@ -49,7 +49,7 @@ exports.up = async function up(knex) { await knex.schema.createTable('members', table => { table.comment('The table of Bazaar members'); table - .text('entity_ref') + .string('entity_ref') .notNullable() .references('metadata.entity_ref') .onDelete('CASCADE') diff --git a/plugins/bazaar-backend/migrations/20211117092217_optional_entity_ref.js b/plugins/bazaar-backend/migrations/20211117092217_optional_entity_ref.js index 0953d5c2b192b..ae2618fbef573 100644 --- a/plugins/bazaar-backend/migrations/20211117092217_optional_entity_ref.js +++ b/plugins/bazaar-backend/migrations/20211117092217_optional_entity_ref.js @@ -89,9 +89,11 @@ exports.up = async function up(knex) { await knex.schema.alterTable('members', table => { table .integer('item_id') + .unsigned() .references('metadata.id') .onDelete('CASCADE') .comment('Id of the associated item'); + table.dropForeign('entity_ref'); table.dropColumn('entity_ref'); }); } diff --git a/plugins/bazaar-backend/src/service/DatabaseHandler.test.ts b/plugins/bazaar-backend/src/service/DatabaseHandler.test.ts index 0c78dbdd87125..4e46581240e03 100644 --- a/plugins/bazaar-backend/src/service/DatabaseHandler.test.ts +++ b/plugins/bazaar-backend/src/service/DatabaseHandler.test.ts @@ -36,7 +36,7 @@ jest.setTimeout(60_000); describe('DatabaseHandler', () => { const databases = TestDatabases.create({ - ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'], + ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3', 'MYSQL_8'], }); function createDatabaseManager( diff --git a/plugins/catalog-backend-module-incremental-ingestion/src/module/WrapperProviders.test.ts b/plugins/catalog-backend-module-incremental-ingestion/src/module/WrapperProviders.test.ts index 0d910f664df42..1f318c259082b 100644 --- a/plugins/catalog-backend-module-incremental-ingestion/src/module/WrapperProviders.test.ts +++ b/plugins/catalog-backend-module-incremental-ingestion/src/module/WrapperProviders.test.ts @@ -24,7 +24,7 @@ import { WrapperProviders } from './WrapperProviders'; describe('WrapperProviders', () => { const applyDatabaseMigrations = jest.fn(); const databases = TestDatabases.create({ - ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3'], + ids: ['POSTGRES_13', 'POSTGRES_9', 'SQLITE_3', 'MYSQL_8'], }); const config = new ConfigReader({}); const logger = getVoidLogger(); diff --git a/plugins/catalog-backend/migrations/20210302150147_refresh_state.js b/plugins/catalog-backend/migrations/20210302150147_refresh_state.js index 1c8081c0ef9b2..f60066772789a 100644 --- a/plugins/catalog-backend/migrations/20210302150147_refresh_state.js +++ b/plugins/catalog-backend/migrations/20210302150147_refresh_state.js @@ -37,7 +37,7 @@ exports.up = async function up(knex) { .notNullable() .comment('The unprocessed entity (in original form) as JSON'); table - .text('processed_entity') + .text('processed_entity', 'longtext') .nullable() .comment('The processed entity (not yet stitched) as JSON'); table @@ -83,7 +83,7 @@ exports.up = async function up(knex) { .notNullable() .comment('Random value representing a unique stitch attempt ticket'); table - .text('final_entity') + .text('final_entity', 'longtext') .nullable() .comment('The JSON encoded final entity'); table.index('entity_id', 'final_entities_entity_id_idx'); diff --git a/plugins/catalog-backend/src/database/DefaultProcessingDatabase.ts b/plugins/catalog-backend/src/database/DefaultProcessingDatabase.ts index 4959a33eef926..61a062bc4a542 100644 --- a/plugins/catalog-backend/src/database/DefaultProcessingDatabase.ts +++ b/plugins/catalog-backend/src/database/DefaultProcessingDatabase.ts @@ -138,6 +138,7 @@ export class DefaultProcessingDatabase implements ProcessingDatabase { type, }), ); + await tx.batchInsert( 'relations', this.deduplicateRelations(relationRows), diff --git a/plugins/code-coverage-backend/migrations/20210302_init.js b/plugins/code-coverage-backend/migrations/20210302_init.js index a794b6a60cf86..35eb91541b3af 100644 --- a/plugins/code-coverage-backend/migrations/20210302_init.js +++ b/plugins/code-coverage-backend/migrations/20210302_init.js @@ -34,7 +34,7 @@ exports.up = async function up(knex) { .comment('An insert counter to ensure ordering'); table.uuid('id').notNullable().comment('The ID of the code coverage'); table - .text('entity') + .string('entity') .notNullable() .comment('The entity ref that this code coverage applies to'); table diff --git a/plugins/linguist-backend/migrations/20221115_init.js b/plugins/linguist-backend/migrations/20221115_init.js index 3bcf99e08b701..359f7c83591c0 100644 --- a/plugins/linguist-backend/migrations/20221115_init.js +++ b/plugins/linguist-backend/migrations/20221115_init.js @@ -32,7 +32,7 @@ exports.up = async function up(knex) { .comment('An insert counter to ensure ordering'); table.uuid('id').notNullable().comment('The ID of the Linguist result'); table - .text('entity_ref') + .string('entity_ref') .unique() .notNullable() .comment('The entity ref that this Linguist result applies to'); diff --git a/plugins/scaffolder-backend/src/scaffolder/tasks/DatabaseTaskStore.ts b/plugins/scaffolder-backend/src/scaffolder/tasks/DatabaseTaskStore.ts index f3cb9b645e219..c96bd6b9cda27 100644 --- a/plugins/scaffolder-backend/src/scaffolder/tasks/DatabaseTaskStore.ts +++ b/plugins/scaffolder-backend/src/scaffolder/tasks/DatabaseTaskStore.ts @@ -261,18 +261,21 @@ export class DatabaseTaskStore implements TaskStore { tasks: { taskId: string }[]; }> { const { timeoutS } = options; - + let heartbeatInterval = this.db.raw(`? - interval '${timeoutS} seconds'`, [ + this.db.fn.now(), + ]); + if (this.db.client.config.client.includes('mysql')) { + heartbeatInterval = this.db.raw( + `date_sub(now(), interval ${timeoutS} second)`, + ); + } else if (this.db.client.config.client.includes('sqlite3')) { + heartbeatInterval = this.db.raw(`datetime('now', ?)`, [ + `-${timeoutS} seconds`, + ]); + } const rawRows = await this.db('tasks') .where('status', 'processing') - .andWhere( - 'last_heartbeat_at', - '<=', - this.db.client.config.client.includes('sqlite3') - ? this.db.raw(`datetime('now', ?)`, [`-${timeoutS} seconds`]) - : this.db.raw(`? - interval '${timeoutS} seconds'`, [ - this.db.fn.now(), - ]), - ); + .andWhere('last_heartbeat_at', '<=', heartbeatInterval); const tasks = rawRows.map(row => ({ taskId: row.id, })); diff --git a/plugins/tech-insights-backend/migrations/202109061111_fact_schemas.js b/plugins/tech-insights-backend/migrations/202109061111_fact_schemas.js index 6094daa4b7711..2521f6d8bb9f9 100644 --- a/plugins/tech-insights-backend/migrations/202109061111_fact_schemas.js +++ b/plugins/tech-insights-backend/migrations/202109061111_fact_schemas.js @@ -25,7 +25,7 @@ exports.up = async function up(knex) { 'The table for tech insight fact schemas. Containing a versioned data model definition for a collection of facts.', ); table - .text('id') + .string('id') .notNullable() .comment('Identifier of the fact retriever plugin/package'); table diff --git a/plugins/tech-insights-backend/migrations/202109061212_facts.js b/plugins/tech-insights-backend/migrations/202109061212_facts.js index 0bdcdc0da1725..237d82686910e 100644 --- a/plugins/tech-insights-backend/migrations/202109061212_facts.js +++ b/plugins/tech-insights-backend/migrations/202109061212_facts.js @@ -25,7 +25,7 @@ exports.up = async function up(knex) { 'The table for tech insight fact collections. Contains facts for individual fact retriever namespace/ref.', ); table - .text('id') + .string('id') .notNullable() .comment('Unique identifier of the fact retriever plugin/package'); table @@ -40,7 +40,7 @@ exports.up = async function up(knex) { .notNullable() .comment('The timestamp when this entry was created'); table - .text('entity') + .string('entity') .notNullable() .comment('Identifier of the entity these facts relate to'); table diff --git a/yarn.lock b/yarn.lock index 987d57bc57229..186d4ce69300a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -23698,6 +23698,7 @@ __metadata: cross-fetch: ^3.1.5 fs-extra: 10.1.0 handlebars: ^4.7.3 + mysql2: ^2.2.5 nodemon: ^3.0.1 pgtools: ^1.0.0 puppeteer: ^17.0.0 @@ -25300,6 +25301,7 @@ __metadata: express-prom-bundle: ^6.3.6 express-promise-router: ^4.1.0 luxon: ^3.0.0 + mysql2: ^2.2.5 pg: ^8.3.0 pg-connection-string: ^2.3.0 prom-client: ^14.0.1