diff --git a/lib/EntityManager.ts b/lib/EntityManager.ts index a3eed5e6c728..63c5a9ddd9c4 100644 --- a/lib/EntityManager.ts +++ b/lib/EntityManager.ts @@ -5,7 +5,7 @@ import { FilterQuery, IDatabaseDriver } from './drivers'; import { EntityData, EntityMetadata, EntityName, IEntity, IEntityType, IPrimaryKey } from './decorators'; import { QueryBuilder, QueryOrderMap, SmartQueryHelper } from './query'; import { MetadataStorage } from './metadata'; -import { Connection } from './connections'; +import { Connection, Transaction } from './connections'; export class EntityManager { @@ -15,6 +15,7 @@ export class EntityManager { private readonly metadata = MetadataStorage.getMetadata(); private readonly unitOfWork = new UnitOfWork(this); private readonly entityFactory = new EntityFactory(this.unitOfWork, this.driver, this.config); + private transactionContext: Transaction; constructor(readonly config: Configuration, private readonly driver: IDatabaseDriver) { } @@ -45,7 +46,7 @@ export class EntityManager { createQueryBuilder(entityName: EntityName, alias?: string): QueryBuilder { entityName = Utils.className(entityName); - return new QueryBuilder(entityName, this.metadata, this.driver, alias); + return new QueryBuilder(entityName, this.metadata, this.driver, this.transactionContext, alias); } async find>(entityName: EntityName, where?: FilterQuery, options?: FindOptions): Promise; @@ -55,7 +56,7 @@ export class EntityManager { where = SmartQueryHelper.processWhere(where, entityName); this.validator.validateParams(where); const options = Utils.isObject(populate) ? populate : { populate, orderBy, limit, offset }; - const results = await this.driver.find(entityName, where, options.populate || [], options.orderBy || {}, options.limit, options.offset); + const results = await this.driver.find(entityName, where, options.populate || [], options.orderBy || {}, options.limit, options.offset, this.transactionContext); if (results.length === 0) { return []; @@ -89,7 +90,7 @@ export class EntityManager { } this.validator.validateParams(where); - const data = await this.driver.findOne(entityName, where, options.populate, options.orderBy, options.fields, options.lockMode); + const data = await this.driver.findOne(entityName, where, options.populate, options.orderBy, options.fields, options.lockMode, this.transactionContext); if (!data) { return null; @@ -101,26 +102,15 @@ export class EntityManager { return entity; } - async beginTransaction(): Promise { - await this.driver.beginTransaction(); - } - - async commit(): Promise { - await this.driver.commit(); - } - - async rollback(): Promise { - await this.driver.rollback(); - } - - async transactional(cb: (em: EntityManager) => Promise): Promise { + async transactional(cb: (em: EntityManager) => Promise, ctx = this.transactionContext): Promise { const em = this.fork(false); - await em.getDriver().transactional(async () => { + await em.getConnection().transactional(async trx => { + em.transactionContext = trx; const ret = await cb(em); await em.flush(); return ret; - }); + }, ctx); } async lock(entity: IEntity, lockMode: LockMode, lockVersion?: number | Date): Promise { @@ -131,7 +121,7 @@ export class EntityManager { entityName = Utils.className(entityName); data = SmartQueryHelper.processParams(data); this.validator.validateParams(data, 'insert data'); - const res = await this.driver.nativeInsert(entityName, data); + const res = await this.driver.nativeInsert(entityName, data, this.transactionContext); return res.insertId; } @@ -142,7 +132,7 @@ export class EntityManager { where = SmartQueryHelper.processWhere(where as FilterQuery, entityName); this.validator.validateParams(data, 'update data'); this.validator.validateParams(where, 'update condition'); - const res = await this.driver.nativeUpdate(entityName, where, data); + const res = await this.driver.nativeUpdate(entityName, where, data, this.transactionContext); return res.affectedRows; } @@ -151,7 +141,7 @@ export class EntityManager { entityName = Utils.className(entityName); where = SmartQueryHelper.processWhere(where as FilterQuery, entityName); this.validator.validateParams(where, 'delete condition'); - const res = await this.driver.nativeDelete(entityName, where); + const res = await this.driver.nativeDelete(entityName, where, this.transactionContext); return res.affectedRows; } @@ -218,7 +208,8 @@ export class EntityManager { entityName = Utils.className(entityName); where = SmartQueryHelper.processWhere(where as FilterQuery, entityName); this.validator.validateParams(where); - return this.driver.count(entityName, where); + + return this.driver.count(entityName, where, this.transactionContext); } persist(entity: IEntity | IEntity[], flush = this.config.get('autoFlush')): void | Promise { @@ -326,6 +317,14 @@ export class EntityManager { return em.entityFactory; } + isInTransaction(): boolean { + return !!this.transactionContext; + } + + getTransactionContext(): T { + return this.transactionContext as T; + } + private checkLockRequirements(mode: LockMode | undefined, meta: EntityMetadata): void { if (!mode) { return; @@ -335,7 +334,7 @@ export class EntityManager { throw ValidationError.notVersioned(meta); } - if ([LockMode.PESSIMISTIC_READ, LockMode.PESSIMISTIC_WRITE].includes(mode) && !this.getDriver().isInTransaction()) { + if ([LockMode.PESSIMISTIC_READ, LockMode.PESSIMISTIC_WRITE].includes(mode) && !this.isInTransaction()) { throw ValidationError.transactionRequired(); } } diff --git a/lib/connections/AbstractSqlConnection.ts b/lib/connections/AbstractSqlConnection.ts new file mode 100644 index 000000000000..6562c6638c80 --- /dev/null +++ b/lib/connections/AbstractSqlConnection.ts @@ -0,0 +1,100 @@ +import * as Knex from 'knex'; +import { Config, QueryBuilder, Raw, Transaction } from 'knex'; +import { readFile } from 'fs-extra'; + +import { Connection, QueryResult } from './Connection'; +import { Utils } from '../utils'; +import { EntityData, IEntity } from '../decorators'; + +export abstract class AbstractSqlConnection extends Connection { + + protected client: Knex; + + getKnex(): Knex { + return this.client; + } + + async close(force?: boolean): Promise { + await this.client.destroy(); + } + + async isConnected(): Promise { + try { + await this.client.raw('select 1'); + return true; + } catch { + return false; + } + } + + async transactional(cb: (trx: Transaction) => Promise, ctx?: Transaction): Promise { + await (ctx || this.client).transaction(async trx => { + try { + const ret = await cb(trx); + await trx.commit(); + + return ret; + } catch (e) { + await trx.rollback(e); + throw e; + } + }); + } + + async execute | EntityData[]>(queryOrKnex: string | QueryBuilder | Raw, params: any[] = [], method: 'all' | 'get' | 'run' = 'all'): Promise { + if (Utils.isObject(queryOrKnex)) { + return await this.executeKnex(queryOrKnex, method); + } + + const res = await this.executeQuery(queryOrKnex, params, () => this.client.raw(queryOrKnex, params)); + return this.transformRawResult(res, method); + } + + async loadFile(path: string): Promise { + const buf = await readFile(path); + await this.client.raw(buf.toString()); + } + + protected createKnexClient(type: string): Knex { + return Knex(this.getKnexOptions(type)) + .on('query', data => { + if (!data.__knexQueryUid) { + this.logQuery(data.sql.toLowerCase().replace(/;$/, '')); + } + }); + } + + protected getKnexOptions(type: string): Config { + return { + client: type, + connection: this.getConnectionOptions(), + pool: this.config.get('pool'), + }; + } + + protected async executeKnex(qb: QueryBuilder | Raw, method: 'all' | 'get' | 'run'): Promise { + const q = qb.toSQL(); + const query = q.toNative ? q.toNative() : q; + const res = await this.executeQuery(query.sql, query.bindings, () => qb); + + return this.transformKnexResult(res, method); + } + + protected transformKnexResult(res: any, method: 'all' | 'get' | 'run'): QueryResult | any | any[] { + if (method === 'all') { + return res; + } + + if (method === 'get') { + return res[0]; + } + + const affectedRows = typeof res === 'number' ? res : 0; + const insertId = typeof res[0] === 'number' ? res[0] : 0; + + return { insertId, affectedRows, row: res[0] }; + } + + protected abstract transformRawResult(res: any, method: 'all' | 'get' | 'run'): T; + +} diff --git a/lib/connections/Connection.ts b/lib/connections/Connection.ts index 956a6d40bd85..a2656911e8c2 100644 --- a/lib/connections/Connection.ts +++ b/lib/connections/Connection.ts @@ -1,5 +1,6 @@ import { URL } from 'url'; -import { Configuration } from '../utils'; +import { Transaction as KnexTransaction } from 'knex'; +import { Configuration, Utils } from '../utils'; import { MetadataStorage } from '../metadata'; export abstract class Connection { @@ -30,24 +31,7 @@ export abstract class Connection { */ abstract getDefaultClientUrl(): string; - /** - * Begins a transaction (if supported) - */ - async beginTransaction(savepoint?: string): Promise { - throw new Error(`Transactions are not supported by current driver`); - } - - /** - * Commits statements in a transaction - */ - async commit(savepoint?: string): Promise { - throw new Error(`Transactions are not supported by current driver`); - } - - /** - * Rollback changes in a transaction - */ - async rollback(savepoint?: string): Promise { + async transactional(cb: (trx: Transaction) => Promise, ctx?: Transaction): Promise { throw new Error(`Transactions are not supported by current driver`); } @@ -73,25 +57,15 @@ export abstract class Connection { } protected async executeQuery(query: string, params: any[], cb: () => Promise): Promise { - try { - const now = Date.now(); - const res = await cb(); - this.logQuery(query, Date.now() - now); + const now = Date.now(); + const res = await cb(); + this.logQuery(query, Date.now() - now); - return res; - } catch (e) { - e.message += `\n in query: ${query}`; - - if (params && params.length) { - e.message += `\n with params: ${JSON.stringify(params)}`; - } - - throw e; - } + return res; } - protected logQuery(query: string, took: number): void { - this.logger.debug(`[query-logger] ${query} [took ${took} ms]`); + protected logQuery(query: string, took?: number): void { + this.logger.debug(`[query-logger] ${query}` + (Utils.isDefined(took) ? ` [took ${took} ms]` : '')); } } @@ -109,3 +83,5 @@ export interface ConnectionConfig { password?: string; database?: string; } + +export type Transaction = KnexTransaction; diff --git a/lib/connections/MySqlConnection.ts b/lib/connections/MySqlConnection.ts index 529e22ddd3fd..1050e9545677 100644 --- a/lib/connections/MySqlConnection.ts +++ b/lib/connections/MySqlConnection.ts @@ -1,56 +1,18 @@ -import { Connection as MySql2Connection, ConnectionOptions, createConnection } from 'mysql2/promise'; -import { readFile } from 'fs-extra'; -import { Connection, QueryResult } from './Connection'; +import { MySqlConnectionConfig } from 'knex'; +import { AbstractSqlConnection } from './AbstractSqlConnection'; -export class MySqlConnection extends Connection { - - protected client: MySql2Connection; +export class MySqlConnection extends AbstractSqlConnection { async connect(): Promise { - this.client = await createConnection(this.getConnectionOptions()); - } - - async close(force?: boolean): Promise { - await this.client.end({ force }); - } - - async isConnected(): Promise { - try { - await this.client.query('SELECT 1'); - return true; - } catch { - return false; - } - } - - async beginTransaction(): Promise { - await this.query('START TRANSACTION'); - } - - async commit(): Promise { - await this.query('COMMIT'); - } - - async rollback(): Promise { - await this.query('ROLLBACK'); + this.client = this.createKnexClient('mysql2'); } getDefaultClientUrl(): string { return 'mysql://root@127.0.0.1:3306'; } - async execute(query: string, params: any[] = [], method: 'all' | 'get' | 'run' = 'all'): Promise { - const res = await this.executeQuery(query, params, () => this.client.execute(query, params)); - - if (method === 'get') { - return (res as QueryResult[][])[0][0]; - } - - return res[0]; - } - - getConnectionOptions(): ConnectionOptions { - const ret: ConnectionOptions = super.getConnectionOptions(); + getConnectionOptions(): MySqlConnectionConfig { + const ret: MySqlConnectionConfig = super.getConnectionOptions(); if (this.config.get('multipleStatements')) { ret.multipleStatements = this.config.get('multipleStatements'); @@ -59,14 +21,19 @@ export class MySqlConnection extends Connection { return ret; } - async loadFile(path: string): Promise { - await this.client.query((await readFile(path)).toString()); - } + protected transformRawResult(res: any, method: 'all' | 'get' | 'run'): T { + if (method === 'run' && res[0].constructor.name === 'ResultSetHeader') { + return { + insertId: res[0].insertId, + affectedRows: res[0].affectedRows, + } as unknown as T; + } + + if (method === 'get') { + return res[0][0]; + } - private async query(sql: string): Promise { - const now = Date.now(); - await this.client.query(sql); - this.logQuery(sql, Date.now() - now); + return res[0]; } } diff --git a/lib/connections/PostgreSqlConnection.ts b/lib/connections/PostgreSqlConnection.ts index 4e89ad37ff92..4726e9da6d67 100644 --- a/lib/connections/PostgreSqlConnection.ts +++ b/lib/connections/PostgreSqlConnection.ts @@ -1,69 +1,29 @@ -import { Client } from 'pg'; -import { readFile } from 'fs-extra'; -import { Connection, QueryResult } from './Connection'; -import { EntityData, IEntity } from '../decorators'; +import { AbstractSqlConnection } from './AbstractSqlConnection'; -export class PostgreSqlConnection extends Connection { - - protected client: Client; +export class PostgreSqlConnection extends AbstractSqlConnection { async connect(): Promise { - this.client = new Client(this.getConnectionOptions()); - await this.client.connect(); - } - - async close(force?: boolean): Promise { - await this.client.end(); - } - - async isConnected(): Promise { - try { - await this.client.query('SELECT 1'); - return true; - } catch { - return false; - } - } - - async beginTransaction(savepoint?: string): Promise { - await this.execute(savepoint ? `SAVEPOINT ${savepoint}` : 'START TRANSACTION', [], 'run'); - } - - async commit(savepoint?: string): Promise { - await this.execute(savepoint ? `RELEASE SAVEPOINT ${savepoint}` : 'COMMIT', [], 'run'); - } - - async rollback(savepoint?: string): Promise { - await this.execute(savepoint ? `ROLLBACK TO SAVEPOINT ${savepoint}` : 'ROLLBACK', [], 'run'); + this.client = this.createKnexClient('pg'); } getDefaultClientUrl(): string { return 'postgre://postgres@127.0.0.1:5432'; } - async execute(query: string, params: any[] = [], method: 'all' | 'get' | 'run' = 'all'): Promise { - const res = await this.executeQuery(query, params, () => this.client.query(query, params)); - return this.transformResult(res, method); - } - - async loadFile(path: string): Promise { - await this.client.query((await readFile(path)).toString()); - } - - private transformResult(res: any, method: 'all' | 'get' | 'run'): QueryResult | EntityData | EntityData[] { + protected transformRawResult(res: any, method: 'all' | 'get' | 'run'): T { if (method === 'get') { return res.rows[0]; } - if (method === 'run') { - return { - affectedRows: res.rowCount || 0, - insertId: res.rows[0] ? res.rows[0].id : 0, - row: res.rows[0], - }; + if (method === 'all') { + return res.rows; } - return res.rows; + return { + affectedRows: res.rowCount, + insertId: res.rows[0] ? res.rows[0].id : 0, + row: res.rows[0], + } as unknown as T; } } diff --git a/lib/connections/SqliteConnection.ts b/lib/connections/SqliteConnection.ts index fde849fc8fbb..7c31896e1b90 100644 --- a/lib/connections/SqliteConnection.ts +++ b/lib/connections/SqliteConnection.ts @@ -1,86 +1,109 @@ -import * as sqlite from 'sqlite'; -import { Database } from 'sqlite'; import { readFile } from 'fs-extra'; +import { Config } from 'knex'; +const Bluebird = require('bluebird'); -import { Connection, QueryResult } from './Connection'; -import { EntityData, IEntity } from '../decorators'; +import { AbstractSqlConnection } from './AbstractSqlConnection'; -export class SqliteConnection extends Connection { - - protected client: SqliteDatabase; +export class SqliteConnection extends AbstractSqlConnection { async connect(): Promise { - this.client = await sqlite.open(this.config.get('dbName')) as SqliteDatabase; - await this.client.exec('PRAGMA foreign_keys = ON'); + this.client = this.createKnexClient(this.getPatchedDialect()); + await this.client.raw('pragma foreign_keys = on'); } - async close(force?: boolean): Promise { - await this.client.close(); + getDefaultClientUrl(): string { + return ''; } - async isConnected(): Promise { - return this.client['driver']['open']; + getClientUrl(): string { + return ''; } - async beginTransaction(savepoint?: string): Promise { - await this.execute(savepoint ? `SAVEPOINT ${savepoint}` : 'BEGIN', [], 'run'); + async loadFile(path: string): Promise { + const conn = await this.client.client.acquireConnection(); + await conn.exec((await readFile(path)).toString()); + await this.client.client.releaseConnection(conn); } - async commit(savepoint?: string): Promise { - await this.execute(savepoint ? `RELEASE SAVEPOINT ${savepoint}` : 'COMMIT', [], 'run'); + protected getKnexOptions(type: string): Config { + return { + client: type, + connection: { + filename: this.config.get('dbName'), + }, + useNullAsDefault: true, + }; } - async rollback(savepoint?: string): Promise { - await this.execute(savepoint ? `ROLLBACK TO SAVEPOINT ${savepoint}` : 'ROLLBACK', [], 'run'); - } + protected transformRawResult(res: any, method: 'all' | 'get' | 'run'): T { + if (method === 'get') { + return res[0]; + } - getDefaultClientUrl(): string { - return ''; - } + if (method === 'all') { + return res; + } - getClientUrl(): string { - return ''; + return { + insertId: res.lastID, + affectedRows: res.changes, + } as unknown as T; } - async execute(query: string, params: any[] = [], method: 'all' | 'get' | 'run' = 'all'): Promise { - params = params.map(p => { - if (p instanceof Date) { - p = p.toISOString(); - } + /** + * monkey patch knex' sqlite dialect so it returns inserted id when doing raw insert query + */ + private getPatchedDialect() { + const dialect = require('knex/lib/dialects/sqlite3/index.js'); - if (typeof p === 'boolean') { - p = +p; + const processResponse = dialect.prototype.processResponse; + dialect.prototype.processResponse = (obj: any, runner: any) => { + if (obj.method === 'raw' && obj.sql.trim().match('^insert into|update|delete')) { + return obj.context; } - return p; - }); + return processResponse(obj, runner); + }; - const res = await this.executeQuery(query, params, async () => { - const statement = await this.client.prepare(query); - const result = await statement[method](...params); - await statement.finalize(); + dialect.prototype._query = (connection: any, obj: any) => { + const callMethod = this.getCallMethod(obj); - return result; - }); + return new Bluebird((resolve: any, reject: any) => { + /* istanbul ignore if */ + if (!connection || !connection[callMethod]) { + return reject(new Error(`Error calling ${callMethod} on connection.`)); + } - return this.transformResult(res, method); - } + connection[callMethod](obj.sql, obj.bindings, function (this: any, err: any, response: any) { + if (err) { + return reject(err); + } - async loadFile(path: string): Promise { - await this.client.exec((await readFile(path)).toString()); + obj.response = response; + obj.context = this; + + return resolve(obj); + }); + }); + }; + + return dialect; } - private transformResult(res: any, method: 'all' | 'get' | 'run'): QueryResult | EntityData | EntityData[] { - if (method === 'run') { - return { - affectedRows: res.changes, - insertId: res.lastID, - }; + private getCallMethod(obj: any): string { + if (obj.method === 'raw' && obj.sql.trim().match('^insert into|update|delete')) { + return 'run'; } - return res; + switch (obj.method) { + case 'insert': + case 'update': + case 'counter': + case 'del': + return 'run'; + default: + return 'all'; + } } } - -export type SqliteDatabase = Database & { driver: { open: boolean } }; diff --git a/lib/decorators/Entity.ts b/lib/decorators/Entity.ts index 8e3ea075df11..f1ffe19b0dff 100644 --- a/lib/decorators/Entity.ts +++ b/lib/decorators/Entity.ts @@ -65,9 +65,10 @@ export interface EntityProperty = any> { length?: any; reference: ReferenceType; fieldName: string; - default?: string; + default?: any; unique?: boolean; nullable?: boolean; + unsigned: boolean; persist?: boolean; hidden?: boolean; version?: boolean; diff --git a/lib/decorators/Property.ts b/lib/decorators/Property.ts index 68a83094b046..e95463cdf522 100644 --- a/lib/decorators/Property.ts +++ b/lib/decorators/Property.ts @@ -21,6 +21,7 @@ export type PropertyOptions = { default?: any; unique?: boolean; nullable?: boolean; + unsigned?: boolean; persist?: boolean; hidden?: boolean; version?: boolean; diff --git a/lib/drivers/AbstractSqlDriver.ts b/lib/drivers/AbstractSqlDriver.ts index cb82025602e4..718d5f77be7e 100644 --- a/lib/drivers/AbstractSqlDriver.ts +++ b/lib/drivers/AbstractSqlDriver.ts @@ -1,16 +1,18 @@ +import { Transaction } from 'knex'; import { EntityData, IEntityType, IPrimaryKey } from '../decorators'; import { DatabaseDriver } from './DatabaseDriver'; -import { Connection, QueryResult } from '../connections'; +import { QueryResult } from '../connections'; +import { AbstractSqlConnection } from '../connections/AbstractSqlConnection'; import { ReferenceType } from '../entity'; import { FilterQuery } from './IDatabaseDriver'; import { QueryBuilder, QueryOrderMap } from '../query'; import { Utils } from '../utils'; import { LockMode } from '../unit-of-work'; -export abstract class AbstractSqlDriver extends DatabaseDriver { +export abstract class AbstractSqlDriver extends DatabaseDriver { - async find>(entityName: string, where: FilterQuery, populate: string[] = [], orderBy: QueryOrderMap = {}, limit?: number, offset?: number): Promise { - const qb = this.createQueryBuilder(entityName); + async find>(entityName: string, where: FilterQuery, populate: string[] = [], orderBy: QueryOrderMap = {}, limit?: number, offset?: number, ctx?: Transaction): Promise { + const qb = this.createQueryBuilder(entityName, ctx); qb.select('*').populate(populate).where(where).orderBy(orderBy); if (limit !== undefined) { @@ -20,7 +22,7 @@ export abstract class AbstractSqlDriver extends DatabaseDr return qb.execute('all'); } - async findOne>(entityName: string, where: FilterQuery | string, populate: string[] = [], orderBy: QueryOrderMap = {}, fields?: string[], lockMode?: LockMode): Promise { + async findOne>(entityName: string, where: FilterQuery | string, populate: string[] = [], orderBy: QueryOrderMap = {}, fields?: string[], lockMode?: LockMode, ctx?: Transaction): Promise { const pk = this.metadata[entityName].primaryKey; if (Utils.isPrimaryKey(where)) { @@ -31,7 +33,7 @@ export abstract class AbstractSqlDriver extends DatabaseDr fields.unshift(pk); } - return this.createQueryBuilder(entityName) + return this.createQueryBuilder(entityName, ctx) .select(fields || '*') .populate(populate) .where(where) @@ -41,61 +43,57 @@ export abstract class AbstractSqlDriver extends DatabaseDr .execute('get'); } - async count(entityName: string, where: any): Promise { - const qb = this.createQueryBuilder(entityName); + async count(entityName: string, where: any, ctx?: Transaction): Promise { + const qb = this.createQueryBuilder(entityName, ctx); const pk = this.metadata[entityName].primaryKey; const res = await qb.count(pk, true).where(where).execute('get', false); return +res.count; } - async nativeInsert>(entityName: string, data: EntityData): Promise { + async nativeInsert>(entityName: string, data: EntityData, ctx?: Transaction): Promise { const collections = this.extractManyToMany(entityName, data); - const pk = this.getPrimaryKeyField(entityName) as keyof T; - - if (Object.keys(data).length === 0) { - data[pk] = null as T[keyof T]; - } - - const qb = this.createQueryBuilder(entityName); + const pk = this.getPrimaryKeyField(entityName); + const qb = this.createQueryBuilder(entityName, ctx); const res = await qb.insert(data).execute('run', false); - res.insertId = res.insertId || data[pk]; - await this.processManyToMany(entityName, res.insertId, collections); + res.row = res.row || {}; + res.insertId = res.insertId || res.row[pk] || data[pk]; + await this.processManyToMany(entityName, res.insertId, collections, ctx); return res; } - async nativeUpdate>(entityName: string, where: FilterQuery, data: EntityData): Promise { - const pk = this.metadata[entityName] ? this.metadata[entityName].primaryKey : this.config.getNamingStrategy().referenceColumnName(); + async nativeUpdate>(entityName: string, where: FilterQuery, data: EntityData, ctx?: Transaction): Promise { + const pk = this.getPrimaryKeyField(entityName); if (Utils.isPrimaryKey(where)) { where = { [pk]: where }; } const collections = this.extractManyToMany(entityName, data); - let res: QueryResult = { affectedRows: 0, insertId: 0 }; + let res: QueryResult = { affectedRows: 0, insertId: 0, row: {} }; if (Object.keys(data).length) { - const qb = this.createQueryBuilder(entityName); + const qb = this.createQueryBuilder(entityName, ctx); res = await qb.update(data).where(where).execute('run', false); } - await this.processManyToMany(entityName, Utils.extractPK(data[pk] || where, this.metadata[entityName])!, collections); + await this.processManyToMany(entityName, Utils.extractPK(data[pk] || where, this.metadata[entityName])!, collections, ctx); return res; } - async nativeDelete>(entityName: string, where: FilterQuery | string | any): Promise { + async nativeDelete>(entityName: string, where: FilterQuery | string | any, ctx?: Transaction): Promise { if (Utils.isPrimaryKey(where)) { - const pk = this.metadata[entityName] ? this.metadata[entityName].primaryKey : this.config.getNamingStrategy().referenceColumnName(); + const pk = this.getPrimaryKeyField(entityName); where = { [pk]: where }; } - return this.createQueryBuilder(entityName).delete(where).execute('run', false); + return this.createQueryBuilder(entityName, ctx).delete(where).execute('run', false); } - protected createQueryBuilder(entityName: string): QueryBuilder { - return new QueryBuilder(entityName, this.metadata, this); + protected createQueryBuilder(entityName: string, ctx?: Transaction): QueryBuilder { + return new QueryBuilder(entityName, this.metadata, this, ctx); } protected extractManyToMany>(entityName: string, data: EntityData): EntityData { @@ -118,7 +116,7 @@ export abstract class AbstractSqlDriver extends DatabaseDr return ret; } - protected async processManyToMany>(entityName: string, pk: IPrimaryKey, collections: EntityData) { + protected async processManyToMany>(entityName: string, pk: IPrimaryKey, collections: EntityData, ctx?: Transaction) { if (!this.metadata[entityName]) { return; } @@ -130,11 +128,11 @@ export abstract class AbstractSqlDriver extends DatabaseDr const prop = props[k]; const fk1 = prop.joinColumn; const fk2 = prop.inverseJoinColumn; - const qb1 = this.createQueryBuilder(prop.pivotTable); + const qb1 = this.createQueryBuilder(prop.pivotTable, ctx); await qb1.delete({ [fk1]: pk }).execute('run', false); for (const item of collections[k]) { - const qb2 = this.createQueryBuilder(prop.pivotTable); + const qb2 = this.createQueryBuilder(prop.pivotTable, ctx); await qb2.insert({ [fk1]: pk, [fk2]: item }).execute('run', false); } } diff --git a/lib/drivers/DatabaseDriver.ts b/lib/drivers/DatabaseDriver.ts index 9c19aa4565d5..9bcae33ee2b0 100644 --- a/lib/drivers/DatabaseDriver.ts +++ b/lib/drivers/DatabaseDriver.ts @@ -1,7 +1,7 @@ import { FilterQuery, IDatabaseDriver } from './IDatabaseDriver'; import { EntityData, EntityMetadata, EntityProperty, IEntity, IEntityType, IPrimaryKey } from '../decorators'; import { MetadataStorage } from '../metadata'; -import { Connection, QueryResult } from '../connections'; +import { Connection, QueryResult, Transaction } from '../connections'; import { Configuration, Utils } from '../utils'; import { QueryOrder, QueryOrderMap } from '../query'; import { Platform } from '../platforms'; @@ -13,20 +13,18 @@ export abstract class DatabaseDriver implements IDatabaseD protected readonly platform: Platform; protected readonly metadata = MetadataStorage.getMetadata(); protected readonly logger = this.config.getLogger(); - protected transactionLevel = 0; - protected transactionRolledBack = false; constructor(protected readonly config: Configuration) { } - abstract async find(entityName: string, where: FilterQuery, populate?: string[], orderBy?: QueryOrderMap, limit?: number, offset?: number): Promise; + abstract async find(entityName: string, where: FilterQuery, populate?: string[], orderBy?: QueryOrderMap, limit?: number, offset?: number, ctx?: Transaction): Promise; - abstract async findOne(entityName: string, where: FilterQuery | string, populate: string[], orderBy?: QueryOrderMap, fields?: string[], lockMode?: LockMode): Promise; + abstract async findOne(entityName: string, where: FilterQuery | string, populate: string[], orderBy?: QueryOrderMap, fields?: string[], lockMode?: LockMode, ctx?: Transaction): Promise; - abstract async nativeInsert>(entityName: string, data: EntityData): Promise; + abstract async nativeInsert>(entityName: string, data: EntityData, ctx?: Transaction): Promise; - abstract async nativeUpdate(entityName: string, where: FilterQuery | IPrimaryKey, data: EntityData): Promise; + abstract async nativeUpdate(entityName: string, where: FilterQuery | IPrimaryKey, data: EntityData, ctx?: Transaction): Promise; - abstract async nativeDelete(entityName: string, where: FilterQuery | IPrimaryKey): Promise; + abstract async nativeDelete(entityName: string, where: FilterQuery | IPrimaryKey, ctx?: Transaction): Promise; abstract async count(entityName: string, where: FilterQuery): Promise; @@ -34,7 +32,7 @@ export abstract class DatabaseDriver implements IDatabaseD throw new Error(`Aggregations are not supported by ${this.constructor.name} driver`); } - async loadFromPivotTable(prop: EntityProperty, owners: IPrimaryKey[]): Promise> { + async loadFromPivotTable(prop: EntityProperty, owners: IPrimaryKey[], ctx?: Transaction): Promise> { if (!this.platform.usesPivotTable()) { throw new Error(`${this.constructor.name} does not use pivot tables`); } @@ -43,7 +41,7 @@ export abstract class DatabaseDriver implements IDatabaseD const fk2 = prop.inverseJoinColumn; const pivotTable = prop.owner ? prop.pivotTable : this.metadata[prop.type].properties[prop.mappedBy].pivotTable; const orderBy = { [`${pivotTable}.${this.metadata[pivotTable].primaryKey}`]: QueryOrder.ASC }; - const items = owners.length ? await this.find(prop.type, { [fk1]: { $in: owners } }, [pivotTable], orderBy) : []; + const items = owners.length ? await this.find(prop.type, { [fk1]: { $in: owners } }, [pivotTable], orderBy, undefined, undefined, ctx) : []; const map: Record = {}; owners.forEach(owner => map['' + owner] = []); @@ -80,49 +78,6 @@ export abstract class DatabaseDriver implements IDatabaseD return this.connection as C; } - async beginTransaction(): Promise { - this.transactionLevel++; - await this.runTransaction('beginTransaction'); - } - - async commit(): Promise { - if (this.transactionRolledBack) { - throw new Error('Transaction commit failed because the transaction has been marked for rollback only'); - } - - await this.runTransaction('commit'); - this.transactionLevel = Math.max(this.transactionLevel - 1, 0); - } - - async rollback(): Promise { - await this.runTransaction('rollback'); - - if (this.transactionLevel === 1) { - this.transactionRolledBack = false; - } else if (!this.platform.supportsSavePoints()) { - this.transactionRolledBack = true; - } - - this.transactionLevel = Math.max(this.transactionLevel - 1, 0); - } - - async transactional(cb: () => Promise): Promise { - try { - await this.beginTransaction(); - const ret = await cb(); - await this.commit(); - - return ret; - } catch (e) { - await this.rollback(); - throw e; - } - } - - isInTransaction(): boolean { - return this.transactionLevel > 0; - } - getPlatform(): Platform { return this.platform; } @@ -131,15 +86,4 @@ export abstract class DatabaseDriver implements IDatabaseD return this.metadata[entityName] ? this.metadata[entityName].primaryKey : this.config.getNamingStrategy().referenceColumnName(); } - private async runTransaction(method: 'beginTransaction' | 'commit' | 'rollback'): Promise { - if (this.transactionLevel === 1 || this.platform.supportsSavePoints()) { - const useSavepoint = this.transactionLevel !== 1 && this.platform.supportsSavePoints(); - await this.connection[method](useSavepoint ? this.getSavePointName() : undefined); - } - } - - private getSavePointName(): string { - return `${this.constructor.name}_${this.transactionLevel}`; - } - } diff --git a/lib/drivers/IDatabaseDriver.ts b/lib/drivers/IDatabaseDriver.ts index f8a89e870e06..22549ce38efc 100644 --- a/lib/drivers/IDatabaseDriver.ts +++ b/lib/drivers/IDatabaseDriver.ts @@ -1,5 +1,5 @@ import { EntityData, EntityMetadata, EntityProperty, IEntity, IEntityType, IPrimaryKey } from '../decorators'; -import { Connection, QueryResult } from '../connections'; +import { Connection, QueryResult, Transaction } from '../connections'; import { QueryOrderMap } from '../query'; import { Platform } from '../platforms'; import { LockMode } from '../unit-of-work'; @@ -11,20 +11,20 @@ export interface IDatabaseDriver { /** * Finds selection of entities */ - find(entityName: string, where: FilterQuery, populate?: string[], orderBy?: QueryOrderMap, limit?: number, offset?: number): Promise; + find(entityName: string, where: FilterQuery, populate?: string[], orderBy?: QueryOrderMap, limit?: number, offset?: number, ctx?: Transaction): Promise; /** * Finds single entity (table row, document) */ - findOne(entityName: string, where: FilterQuery | IPrimaryKey, populate?: string[], orderBy?: QueryOrderMap, fields?: string[], lockMode?: LockMode): Promise; + findOne(entityName: string, where: FilterQuery | IPrimaryKey, populate?: string[], orderBy?: QueryOrderMap, fields?: string[], lockMode?: LockMode, ctx?: Transaction): Promise; - nativeInsert(entityName: string, data: EntityData): Promise; + nativeInsert(entityName: string, data: EntityData, ctx?: Transaction): Promise; - nativeUpdate(entityName: string, where: FilterQuery | IPrimaryKey, data: EntityData): Promise; + nativeUpdate(entityName: string, where: FilterQuery | IPrimaryKey, data: EntityData, ctx?: Transaction): Promise; - nativeDelete(entityName: string, where: FilterQuery | IPrimaryKey): Promise; + nativeDelete(entityName: string, where: FilterQuery | IPrimaryKey, ctx?: Transaction): Promise; - count(entityName: string, where: FilterQuery): Promise; + count(entityName: string, where: FilterQuery, ctx?: Transaction): Promise; aggregate(entityName: string, pipeline: any[]): Promise; @@ -33,29 +33,7 @@ export interface IDatabaseDriver { /** * When driver uses pivot tables for M:N, this method will load identifiers for given collections from them */ - loadFromPivotTable(prop: EntityProperty, owners: IPrimaryKey[]): Promise>; - - /** - * Begins a transaction (if supported) - */ - beginTransaction(): Promise; - - /** - * Commits statements in a transaction - */ - commit(): Promise; - - /** - * Rollback changes in a transaction - */ - rollback(): Promise; - - /** - * Runs callback inside transaction - */ - transactional(cb: () => Promise): Promise; - - isInTransaction(): boolean; + loadFromPivotTable(prop: EntityProperty, owners: IPrimaryKey[], ctx?: Transaction): Promise>; getPlatform(): Platform; diff --git a/lib/drivers/PostgreSqlDriver.ts b/lib/drivers/PostgreSqlDriver.ts index 0c1489e9297c..33257b86a463 100644 --- a/lib/drivers/PostgreSqlDriver.ts +++ b/lib/drivers/PostgreSqlDriver.ts @@ -1,33 +1,10 @@ import { PostgreSqlConnection } from '../connections/PostgreSqlConnection'; import { AbstractSqlDriver } from './AbstractSqlDriver'; -import { EntityData, IEntityType } from '../decorators'; -import { QueryType } from '../query'; import { PostgreSqlPlatform } from '../platforms/PostgreSqlPlatform'; -import { QueryResult } from '../connections'; export class PostgreSqlDriver extends AbstractSqlDriver { protected readonly connection = new PostgreSqlConnection(this.config); protected readonly platform = new PostgreSqlPlatform(); - async nativeInsert>(entityName: string, data: EntityData): Promise { - const collections = this.extractManyToMany(entityName, data); - const qb = this.createQueryBuilder(entityName).insert(data); - const params = qb.getParams(); - let sql = qb.getQuery(); - - if (qb.type === QueryType.INSERT && Object.keys(params).length === 0) { - const pk = this.getPrimaryKeyField(entityName); - const prop = this.metadata[entityName].properties[pk]; - sql = sql.replace('() VALUES ()', `("${prop.fieldName}") VALUES (DEFAULT)`); - } - - const res = await this.connection.execute(sql, params, 'run'); - const pk = this.getPrimaryKeyField(entityName); - res.insertId = res.insertId || data[pk]; - await this.processManyToMany(entityName, res.insertId, collections); - - return res; - } - } diff --git a/lib/entity/EntityLoader.ts b/lib/entity/EntityLoader.ts index b7ca08874670..ba7029e308ef 100644 --- a/lib/entity/EntityLoader.ts +++ b/lib/entity/EntityLoader.ts @@ -115,7 +115,7 @@ export class EntityLoader { } private async findChildrenFromPivotTable>(filtered: T[], prop: EntityProperty, field: keyof T): Promise { - const map = await this.driver.loadFromPivotTable(prop, filtered.map(e => e.__primaryKey)); + const map = await this.driver.loadFromPivotTable(prop, filtered.map(e => e.__primaryKey), this.em.getTransactionContext()); const children: IEntity[] = []; for (const entity of filtered) { diff --git a/lib/entity/EntityValidator.ts b/lib/entity/EntityValidator.ts index 182d808fda46..c8a57214e1ae 100644 --- a/lib/entity/EntityValidator.ts +++ b/lib/entity/EntityValidator.ts @@ -98,7 +98,7 @@ export class EntityValidator { } private fixDateType(givenValue: string): Date | string { - const date = new Date(givenValue); + const date = new Date(parseFloat(givenValue) || givenValue); return date.toString() !== 'Invalid Date' ? date : givenValue; } diff --git a/lib/metadata/MetadataDiscovery.ts b/lib/metadata/MetadataDiscovery.ts index 7fac24841620..fdf8f4e979a1 100644 --- a/lib/metadata/MetadataDiscovery.ts +++ b/lib/metadata/MetadataDiscovery.ts @@ -227,7 +227,7 @@ export class MetadataDiscovery { private definePivotTableEntity(meta: EntityMetadata, prop: EntityProperty): EntityMetadata | undefined { if (prop.reference === ReferenceType.MANY_TO_MANY && prop.owner && prop.pivotTable) { const pk = this.namingStrategy.referenceColumnName(); - const primaryProp = { name: pk, type: 'number', reference: ReferenceType.SCALAR, primary: true } as EntityProperty; + const primaryProp = { name: pk, type: 'number', reference: ReferenceType.SCALAR, primary: true, unsigned: true } as EntityProperty; this.initFieldName(primaryProp); return this.metadata[prop.pivotTable] = { @@ -292,7 +292,7 @@ export class MetadataDiscovery { } } - private getDefaultVersionValue(prop: EntityProperty): string { + private getDefaultVersionValue(prop: EntityProperty): any { if (prop.default) { return prop.default; } @@ -302,7 +302,7 @@ export class MetadataDiscovery { return this.platform.getCurrentTimestampSQL(prop.length); } - return '1'; + return 1; } } diff --git a/lib/platforms/MySqlPlatform.ts b/lib/platforms/MySqlPlatform.ts index b7a0a6c6e3a6..390e615e3e47 100644 --- a/lib/platforms/MySqlPlatform.ts +++ b/lib/platforms/MySqlPlatform.ts @@ -5,8 +5,4 @@ export class MySqlPlatform extends Platform { protected schemaHelper = new MySqlSchemaHelper(); - getReadLockSQL(): string { - return 'LOCK IN SHARE MODE'; - } - } diff --git a/lib/platforms/Platform.ts b/lib/platforms/Platform.ts index 547c7ecfba45..09f19a23b0ba 100644 --- a/lib/platforms/Platform.ts +++ b/lib/platforms/Platform.ts @@ -14,18 +14,10 @@ export abstract class Platform { return true; } - supportsSavePoints(): boolean { - return false; - } - - getNamingStrategy(): { new(): NamingStrategy} { + getNamingStrategy(): { new(): NamingStrategy } { return UnderscoreNamingStrategy; } - getParameterPlaceholder(index?: number): string { - return '?'; - } - usesReturningStatement(): boolean { return false; } @@ -67,34 +59,7 @@ export abstract class Platform { * Returns the SQL specific for the platform to get the current timestamp */ getCurrentTimestampSQL(length: number): string { - return 'CURRENT_TIMESTAMP' + (length ? `(${length})` : ''); - } - - /** - * Returns the FOR UPDATE expression. - * - */ - getForUpdateSQL(): string { - return 'FOR UPDATE'; - } - - /** - * Returns the SQL snippet to append to any SELECT statement which locks rows in shared read lock. - * - * This defaults to the ANSI SQL "FOR UPDATE", which is an exclusive lock (Write). Some database - * vendors allow to lighten this constraint up to be a real read lock. - */ - getReadLockSQL(): string { - return this.getForUpdateSQL(); - } - - /** - * Returns the SQL snippet to append to any SELECT statement which obtains an exclusive lock on the rows. - * - * The semantics of this lock mode should equal the SELECT .. FOR UPDATE of the ANSI SQL standard. - */ - getWriteLockSQL(): string { - return this.getForUpdateSQL(); + return 'current_timestamp' + (length ? `(${length})` : ''); } } diff --git a/lib/platforms/PostgreSqlPlatform.ts b/lib/platforms/PostgreSqlPlatform.ts index c819a70f6bd6..9c6bca21bf68 100644 --- a/lib/platforms/PostgreSqlPlatform.ts +++ b/lib/platforms/PostgreSqlPlatform.ts @@ -1,4 +1,3 @@ -import { NamingStrategy, UnderscoreNamingStrategy } from '../naming-strategy'; import { Platform } from './Platform'; import { PostgreSqlSchemaHelper } from '../schema/PostgreSqlSchemaHelper'; @@ -6,18 +5,6 @@ export class PostgreSqlPlatform extends Platform { protected schemaHelper = new PostgreSqlSchemaHelper(); - supportsSavePoints(): boolean { - return true; - } - - getNamingStrategy(): { new(): NamingStrategy} { - return UnderscoreNamingStrategy; - } - - getParameterPlaceholder(index?: number): string { - return '$' + index; - } - usesReturningStatement(): boolean { return true; } @@ -26,8 +13,4 @@ export class PostgreSqlPlatform extends Platform { return true; } - getReadLockSQL(): string { - return 'FOR SHARE'; - } - } diff --git a/lib/platforms/SqlitePlatform.ts b/lib/platforms/SqlitePlatform.ts index 992a8cc3a673..d88f4377b92e 100644 --- a/lib/platforms/SqlitePlatform.ts +++ b/lib/platforms/SqlitePlatform.ts @@ -5,10 +5,6 @@ export class SqlitePlatform extends Platform { protected schemaHelper = new SqliteSchemaHelper(); - supportsSavePoints(): boolean { - return true; - } - requiresNullableForAlteringColumn() { return true; } @@ -17,8 +13,4 @@ export class SqlitePlatform extends Platform { return super.getCurrentTimestampSQL(0); } - getForUpdateSQL(): string { - return ''; - } - } diff --git a/lib/query/QueryBuilder.ts b/lib/query/QueryBuilder.ts index 77a1569aabf9..41edd1765504 100644 --- a/lib/query/QueryBuilder.ts +++ b/lib/query/QueryBuilder.ts @@ -1,11 +1,13 @@ +import { QueryBuilder as KnexQueryBuilder, Raw, Transaction } from 'knex'; import { Utils, ValidationError } from '../utils'; import { QueryBuilderHelper } from './QueryBuilderHelper'; import { SmartQueryHelper } from './SmartQueryHelper'; import { EntityMetadata, EntityProperty } from '../decorators'; import { ReferenceType } from '../entity'; import { QueryFlag, QueryOrderMap, QueryType } from './enums'; -import { IDatabaseDriver } from '../drivers'; import { LockMode } from '../unit-of-work'; +import { AbstractSqlConnection } from '../connections/AbstractSqlConnection'; +import { IDatabaseDriver } from '../drivers'; /** * SQL query builder @@ -30,13 +32,15 @@ export class QueryBuilder { private _limit: number; private _offset: number; private lockMode?: LockMode; - private readonly connection = this.driver.getConnection(); + private readonly connection = this.driver.getConnection() as AbstractSqlConnection; private readonly platform = this.driver.getPlatform(); - private readonly helper = new QueryBuilderHelper(this.entityName, this.alias, this._aliasMap, this.metadata, this.platform); + private readonly knex = this.connection.getKnex(); + private readonly helper = new QueryBuilderHelper(this.entityName, this.alias, this._aliasMap, this.metadata, this.knex, this.platform); constructor(private readonly entityName: string, private readonly metadata: Record, private readonly driver: IDatabaseDriver, + private readonly context?: Transaction, readonly alias = `e0`) { } select(fields: string | string[], distinct = false): this { @@ -66,17 +70,16 @@ export class QueryBuilder { } count(field?: string, distinct = false): this { - this.select(field || this.metadata[this.entityName].primaryKey); - this.flags.push(QueryFlag.COUNT); + this._fields = [field || this.metadata[this.entityName].primaryKey]; if (distinct) { this.flags.push(QueryFlag.DISTINCT); } - return this; + return this.init(QueryType.COUNT); } - join(field: string, alias: string, type: 'left' | 'inner' = 'inner'): this { + join(field: string, alias: string, type: 'leftJoin' | 'innerJoin' = 'innerJoin'): this { const [fromAlias, fromField] = this.helper.splitField(field); const entityName = this._aliasMap[fromAlias]; const prop = this.metadata[entityName].properties[fromField]; @@ -99,7 +102,7 @@ export class QueryBuilder { } leftJoin(field: string, alias: string): this { - return this.join(field, alias, 'left'); + return this.join(field, alias, 'leftJoin'); } where(cond: Record, operator?: keyof typeof QueryBuilderHelper.GROUP_OPERATORS): this; @@ -191,7 +194,7 @@ export class QueryBuilder { } setLockMode(mode?: LockMode): this { - if ([LockMode.NONE, LockMode.PESSIMISTIC_READ, LockMode.PESSIMISTIC_WRITE].includes(mode!) && !this.driver.isInTransaction()) { + if ([LockMode.NONE, LockMode.PESSIMISTIC_READ, LockMode.PESSIMISTIC_WRITE].includes(mode!) && !this.context) { throw ValidationError.transactionRequired(); } @@ -200,52 +203,37 @@ export class QueryBuilder { return this; } - getQuery(): string { + getKnexQuery(): KnexQueryBuilder { this.finalize(); - let sql = this.getQueryBase(); + const qb = this.getQueryBase(); - sql += this.helper.getClause('WHERE', this.helper.getQueryCondition(this.type, this._cond).join(' AND '), this._cond); - sql += this.helper.getClause('GROUP BY', this.prepareFields(this._groupBy), this._groupBy); - sql += this.helper.getClause('HAVING', this.helper.getQueryCondition(this.type, this._having).join(' AND '), this._having); - sql += this.helper.getClause('ORDER BY', this.helper.getQueryOrder(this.type, this._orderBy, this._populateMap).join(', '), this._orderBy); - sql += this.helper.getClause('LIMIT', '?', this._limit); - sql += this.helper.getClause('OFFSET', '?', this._offset); + Utils.runIfNotEmpty(() => this.helper.appendQueryCondition(this.type, this._cond, qb), this._cond); + Utils.runIfNotEmpty(() => qb.groupBy(this.prepareFields(this._groupBy)), this._groupBy); + Utils.runIfNotEmpty(() => this.helper.appendQueryCondition(this.type, this._having, qb, undefined, 'having'), this._having); + Utils.runIfNotEmpty(() => qb.orderBy(this.helper.getQueryOrder(this.type, this._orderBy, this._populateMap)), this._orderBy); + Utils.runIfNotEmpty(() => qb.limit(this._limit), this._limit); + Utils.runIfNotEmpty(() => qb.offset(this._offset), this._offset); if (this.type === QueryType.TRUNCATE && this.platform.usesCascadeStatement()) { - sql += ' CASCADE'; + return this.knex.raw(qb.toSQL().toNative().sql + ' cascade') as any; } - sql += this.helper.getLockSQL(this.lockMode); + this.helper.getLockSQL(qb, this.lockMode); + this.helper.finalize(this.type, qb, this.metadata[this.entityName]); - return this.helper.finalize(this.type, sql, this.metadata[this.entityName]); + return qb; } - getParams(): any[] { - this.finalize(); - let ret: any[] = []; - - if (this.type === QueryType.INSERT && this._data) { - ret = Object.values(this._data); - } else if (this.type === QueryType.UPDATE) { - ret = Object.values(this._data); - } - - ret = ret.concat(this.helper.getWhereParams(this._cond)); - ret = ret.concat(this.helper.getWhereParams(this._having)); - - if (this._limit) { - ret.push(this._limit); - } - - if (this._offset) { - ret.push(this._offset); - } + getQuery(): string { + return this.getKnexQuery().toSQL().toNative().sql; + } - return SmartQueryHelper.processParams(ret); + getParams(): any[] { + return this.getKnexQuery().toSQL().toNative().bindings; } async execute(method: 'all' | 'get' | 'run' = 'all', mapResults = true): Promise { - const res = await this.connection.execute(this.getQuery(), this.getParams(), method); + const res = await this.connection.execute(this.getKnexQuery(), [], method); if (!mapResults) { return res; @@ -259,31 +247,31 @@ export class QueryBuilder { } clone(): QueryBuilder { - const qb = new QueryBuilder(this.entityName, this.metadata, this.driver, this.alias); + const qb = new QueryBuilder(this.entityName, this.metadata, this.driver, this.context, this.alias); Object.assign(qb, this); // clone array/object properties const properties = ['flags', '_fields', '_populate', '_populateMap', '_joins', '_aliasMap', '_cond', '_data', '_orderBy']; properties.forEach(prop => (qb as any)[prop] = Utils.copy(this[prop as keyof this])); + qb.finalized = false; return qb; } - private prepareFields(fields: string[], glue = ', '): string { + private prepareFields(fields: string[]): (string | Raw)[] { const ret: string[] = []; fields.forEach(f => { if (this._joins[f]) { - ret.push(...this.helper.mapJoinColumns(this.type, this._joins[f])); - return; + return ret.push(...this.helper.mapJoinColumns(this.type, this._joins[f]) as string[]); } - ret.push(this.helper.mapper(this.type, f)); + ret.push(this.helper.mapper(this.type, f) as string); }); Object.keys(this._populateMap).forEach(f => { if (!fields.includes(f)) { - ret.push(...this.helper.mapJoinColumns(this.type, this._joins[f])); + ret.push(...this.helper.mapJoinColumns(this.type, this._joins[f]) as string[]); } if (this._joins[f].prop.reference !== ReferenceType.ONE_TO_ONE) { @@ -291,15 +279,7 @@ export class QueryBuilder { } }); - if (this.flags.includes(QueryFlag.COUNT)) { - if (this.flags.includes(QueryFlag.DISTINCT)) { - return `COUNT(DISTINCT ${ret[0]}) AS ${this.helper.wrap('count')}`; - } - - return `COUNT(${ret[0]}) AS ${this.helper.wrap('count')}`; - } - - return ret.join(glue); + return ret; } private processWhere(cond: any): any { @@ -335,7 +315,7 @@ export class QueryBuilder { this._fields.push(prop.name); const alias2 = `e${this.aliasCounter++}`; - this._joins[prop.name] = this.helper.joinOneToReference(prop, this.alias, alias2, 'left'); + this._joins[prop.name] = this.helper.joinOneToReference(prop, this.alias, alias2, 'leftJoin'); const prop2 = this.metadata[prop.type].properties[prop.mappedBy]; Utils.renameKey(cond, prop.name, `${alias2}.${prop2.referenceColumnName}`); } @@ -343,7 +323,7 @@ export class QueryBuilder { private processManyToMany(prop: EntityProperty, cond: any): void { const alias1 = `e${this.aliasCounter++}`; const join = { - type: 'left', + type: 'leftJoin', alias: alias1, ownerAlias: this.alias, joinColumn: prop.joinColumn, @@ -364,7 +344,7 @@ export class QueryBuilder { private processOneToMany(prop: EntityProperty, cond: any): void { const alias2 = `e${this.aliasCounter++}`; - this._joins[prop.name] = this.helper.joinOneToReference(prop, this.alias, alias2, 'left'); + this._joins[prop.name] = this.helper.joinOneToReference(prop, this.alias, alias2, 'leftJoin'); Utils.renameKey(cond, prop.name, `${alias2}.${prop.referenceColumnName}`); } @@ -383,36 +363,50 @@ export class QueryBuilder { return this; } - private getQueryBase(): string { - let sql = this.type + ' '; + private getQueryBase(): KnexQueryBuilder { + const qb = this.createBuilder(); switch (this.type) { case QueryType.SELECT: - sql += this.flags.includes(QueryFlag.DISTINCT) && !this.flags.includes(QueryFlag.COUNT) ? 'DISTINCT ' : ''; - sql += this.prepareFields(this._fields); - sql += ` FROM ${this.helper.getTableName(this.entityName, true)} AS ${this.helper.wrap(this.alias)}`; - sql += this.helper.processJoins(this._joins); + qb.select(this.prepareFields(this._fields)); + + if (this.flags.includes(QueryFlag.DISTINCT)) { + qb.distinct(); + } + + this.helper.processJoins(qb, this._joins); + break; + case QueryType.COUNT: + const m = this.flags.includes(QueryFlag.DISTINCT) ? 'countDistinct' : 'count'; + qb[m](this.helper.mapper(this.type, this._fields[0], undefined, 'count')); + this.helper.processJoins(qb, this._joins); break; case QueryType.INSERT: - sql += `INTO ${this.helper.getTableName(this.entityName, true)}`; - sql += ' (' + Object.keys(this._data).map(k => this.helper.wrap(k)).join(', ') + ')'; - sql += ' VALUES (' + Object.keys(this._data).map(() => '?').join(', ') + ')'; + qb.insert(this._data); break; case QueryType.UPDATE: - sql += this.helper.getTableName(this.entityName, true); - const set = Object.keys(this._data).map(k => this.helper.wrap(k) + ' = ?'); - this.helper.updateVersionProperty(set); - sql += ' SET ' + set.join(', '); + qb.update(this._data); + this.helper.updateVersionProperty(qb); break; case QueryType.DELETE: - sql += 'FROM ' + this.helper.getTableName(this.entityName, true); + qb.delete(); break; case QueryType.TRUNCATE: - sql += 'TABLE ' + this.helper.getTableName(this.entityName, true); + qb.truncate(); break; } - return sql; + return qb; + } + + private createBuilder(): KnexQueryBuilder { + const tableName = this.helper.getTableName(this.entityName) + ([QueryType.SELECT, QueryType.COUNT].includes(this.type) ? ` as ${this.alias}` : ''); + const qb = this.knex(tableName); + + if (this.context) { + qb.transacting(this.context); + } + return qb; } private finalize(): void { @@ -427,15 +421,16 @@ export class QueryBuilder { if (this.metadata[field]) { // pivot table entity const prop = this.metadata[field].properties[this.entityName]; - this._joins[field] = this.helper.joinPivotTable(field, prop, this.alias, `e${this.aliasCounter++}`, 'left'); + this._joins[field] = this.helper.joinPivotTable(field, prop, this.alias, `e${this.aliasCounter++}`, 'leftJoin'); this._populateMap[field] = this._joins[field].alias; } else if (this.helper.isOneToOneInverse(field)) { const prop = this.metadata[this.entityName].properties[field]; - this._joins[prop.name] = this.helper.joinOneToReference(prop, this.alias, `e${this.aliasCounter++}`, 'left'); + this._joins[prop.name] = this.helper.joinOneToReference(prop, this.alias, `e${this.aliasCounter++}`, 'leftJoin'); this._populateMap[field] = this._joins[field].alias; } }); + SmartQueryHelper.processParams([this._data, this._cond, this._having]); this.finalized = true; } @@ -443,7 +438,7 @@ export class QueryBuilder { export interface JoinOptions { table: string; - type: 'left' | 'inner'; + type: 'leftJoin' | 'innerJoin'; alias: string; ownerAlias: string; joinColumn?: string; diff --git a/lib/query/QueryBuilderHelper.ts b/lib/query/QueryBuilderHelper.ts index 9aa55f356c9e..233cf74cb95d 100644 --- a/lib/query/QueryBuilderHelper.ts +++ b/lib/query/QueryBuilderHelper.ts @@ -1,3 +1,5 @@ +import * as Knex from 'knex'; +import { QueryBuilder as KnexQueryBuilder, Raw } from 'knex'; import { Utils, ValidationError } from '../utils'; import { EntityMetadata, EntityProperty } from '../decorators'; import { QueryOrderMap, QueryOrderNumeric, QueryType } from './enums'; @@ -9,14 +11,14 @@ import { LockMode } from '../unit-of-work'; export class QueryBuilderHelper { static readonly GROUP_OPERATORS = { - $and: 'AND', - $or: 'OR', + $and: 'and', + $or: 'or', }; static readonly OPERATORS = { $eq: '=', - $in: 'IN', - $nin: 'NOT IN', + $in: 'in', + $nin: 'not in', $gt: '>', $gte: '>=', $lt: '<', @@ -24,76 +26,35 @@ export class QueryBuilderHelper { $ne: '!=', }; - private readonly quoteChar = this.platform.getSchemaHelper().getIdentifierQuoteCharacter(); - constructor(private readonly entityName: string, private readonly alias: string, private readonly aliasMap: Record, private readonly metadata: Record, + private readonly knex: Knex, private readonly platform: Platform) { } - getWhereParams(conditions: Record): any[] { - const ret: any[] = []; - - Object.entries(conditions).forEach(([key, cond]) => { - if (['$and', '$or', '$not'].includes(key)) { - return ret.push(...this.getGroupWhereParams(key, cond)); - } - - if (cond === null) { - return; - } - - // grouped condition for one field - if (Utils.isObject(cond) && Object.keys(cond).length > 1) { - const subConditions = Object.entries(cond).map(([subKey, subValue]) => ({ [key]: { [subKey]: subValue } })); - return ret.push(...this.getWhereParams({ $and: subConditions })); - } - - if (cond instanceof RegExp) { - return ret.push(this.getRegExpParam(cond)); - } - - if (!Utils.isObject(cond) && !Array.isArray(cond)) { - return ret.push(cond); - } - - ret.push(...this.processComplexParam(key, cond)); - }); - - return ret; - } - - wrap(field: string) { - if (field === '*') { - return field; - } - - return this.quoteChar + field + this.quoteChar; - } - - mapper(type: QueryType, field: string, value?: any, alias?: string): string { + mapper(type: QueryType, field: string, value?: any, alias?: string): string | Raw { let ret = field; - const customExpression = field.match(/\(.*\)| |^\d/); + const customExpression = this.isCustomExpression(field); // do not wrap custom expressions if (!customExpression) { - ret = this.prefixAndWrap(field); + ret = this.prefix(field); } - if (typeof value !== 'undefined') { - ret += this.processValue(field, value); + if (alias) { + ret += ' as ' + alias; } - if (alias) { - ret += ' AS ' + this.wrap(alias); + if (customExpression) { + return this.knex.raw(ret, value); } - if (type !== QueryType.SELECT || customExpression || this.isPrefixed(ret)) { + if (![QueryType.SELECT, QueryType.COUNT].includes(type) || this.isPrefixed(ret)) { return ret; } - return this.wrap(this.alias) + '.' + ret; + return this.alias + '.' + ret; } processData(data: any): any { @@ -103,7 +64,7 @@ export class QueryBuilderHelper { if (this.metadata[this.entityName] && this.metadata[this.entityName].properties[k]) { const prop = this.metadata[this.entityName].properties[k]; - if (Array.isArray(data[k])) { + if (Array.isArray(data[k]) || (Utils.isObject(data[k]) && !(data[k] instanceof Date))) { data[k] = JSON.stringify(data[k]); } @@ -116,7 +77,7 @@ export class QueryBuilderHelper { return data; } - joinOneToReference(prop: EntityProperty, ownerAlias: string, alias: string, type: 'left' | 'inner'): JoinOptions { + joinOneToReference(prop: EntityProperty, ownerAlias: string, alias: string, type: 'leftJoin' | 'innerJoin'): JoinOptions { const prop2 = this.metadata[prop.type].properties[prop.mappedBy || prop.inversedBy]; return { table: this.getTableName(prop.type), @@ -130,7 +91,7 @@ export class QueryBuilderHelper { }; } - joinManyToOneReference(prop: EntityProperty, ownerAlias: string, alias: string, type: 'left' | 'inner'): JoinOptions { + joinManyToOneReference(prop: EntityProperty, ownerAlias: string, alias: string, type: 'leftJoin' | 'innerJoin'): JoinOptions { return { table: this.getTableName(prop.type), joinColumn: prop.inverseJoinColumn, @@ -142,7 +103,7 @@ export class QueryBuilderHelper { }; } - joinManyToManyReference(prop: EntityProperty, ownerAlias: string, alias: string, pivotAlias: string, type: 'left' | 'inner'): Record { + joinManyToManyReference(prop: EntityProperty, ownerAlias: string, alias: string, pivotAlias: string, type: 'leftJoin' | 'innerJoin'): Record { const join = { type, ownerAlias, @@ -173,7 +134,7 @@ export class QueryBuilderHelper { return ret; } - joinPivotTable(field: string, prop: EntityProperty, ownerAlias: string, alias: string, type: 'left' | 'inner'): JoinOptions { + joinPivotTable(field: string, prop: EntityProperty, ownerAlias: string, alias: string, type: 'leftJoin' | 'innerJoin'): JoinOptions { const prop2 = this.metadata[field].properties[prop.mappedBy || prop.inversedBy]; return { table: this.metadata[field].collection, @@ -187,14 +148,16 @@ export class QueryBuilderHelper { }; } - processJoins(joins: Record): string { - return Object.values(joins).map(join => { - const type = join.type === 'inner' ? '' : join.type.toUpperCase() + ' '; - return ` ${type}JOIN ${this.wrap(join.table)} AS ${this.wrap(join.alias)} ON ${this.wrap(join.ownerAlias)}.${this.wrap(join.primaryKey!)} = ${this.wrap(join.alias)}.${this.wrap(join.joinColumn!)}`; - }).join(''); + processJoins(qb: KnexQueryBuilder, joins: Record): void { + Object.values(joins).forEach(join => { + const table = `${join.table} as ${join.alias}`; + const left = `${join.ownerAlias}.${join.primaryKey!}`; + const right = `${join.alias}.${join.joinColumn!}`; + qb[join.type](table, left, right); + }); } - mapJoinColumns(type: QueryType, join: JoinOptions): string[] { + mapJoinColumns(type: QueryType, join: JoinOptions): (string | Raw)[] { if (join.prop && join.prop.reference === ReferenceType.ONE_TO_ONE && !join.prop.owner) { return [this.mapper(type, `${join.alias}.${join.inverseJoinColumn}`, undefined, join.prop.fieldName)]; } @@ -210,14 +173,8 @@ export class QueryBuilderHelper { return prop && prop.reference === ReferenceType.ONE_TO_ONE && !prop.owner; } - getTableName(entityName: string, wrap = false): string { - const name = this.metadata[entityName] ? this.metadata[entityName].collection : entityName; - - if (wrap) { - return this.wrap(name); - } - - return name; + getTableName(entityName: string): string { + return this.metadata[entityName] ? this.metadata[entityName].collection : entityName; } getRegExpParam(re: RegExp): string { @@ -243,27 +200,74 @@ export class QueryBuilderHelper { return `%${value}%`; } - getQueryCondition(type: QueryType, cond: any): string[] { - return Object.keys(cond).map(k => { + appendQueryCondition(type: QueryType, cond: any, qb: KnexQueryBuilder, operator?: '$and' | '$or', method: 'where' | 'having' = 'where'): void { + Object.keys(cond).forEach(k => { if (k === '$and' || k === '$or') { - return this.getGroupQueryCondition(type, k, cond[k]); + return this.appendGroupCondition(type, qb, k, method, cond[k]); } if (k === '$not') { - return 'NOT (' + this.getQueryCondition(type, cond[k])[0] + ')'; + const m = operator === '$or' ? 'orWhereNot' : 'whereNot'; + return qb[m](inner => this.appendQueryCondition(type, cond[k], inner)); } - // grouped condition for one field - if (Utils.isObject(cond[k]) && Object.keys(cond[k]).length > 1) { - const subCondition = Object.entries(cond[k]).map(([subKey, subValue]) => ({ [k]: { [subKey]: subValue } })); - return this.getGroupQueryCondition(type, '$and', subCondition); + this.appendQuerySubCondition(qb, type, method, cond, k, operator); + }); + } + + private appendQuerySubCondition(qb: KnexQueryBuilder, type: QueryType, method: 'where' | 'having', cond: any, key: string, operator?: '$and' | '$or'): void { + const m = operator === '$or' ? 'orWhere' : method; + + if (cond[key] instanceof RegExp) { + return void qb[m](this.mapper(type, key) as string, 'like', this.getRegExpParam(cond[key])); + } + + if (Utils.isObject(cond[key]) && !(cond[key] instanceof Date)) { + return this.processObjectSubCondition(cond, key, qb, method, m, type); + } + + if (this.isCustomExpression(key)) { + return this.processCustomExpression(key, cond, qb, m, type); + } + + const op = cond[key] === null ? 'is' : '='; + + qb[m](this.mapper(type, key, cond[key]) as string, op, cond[key]); + } + + private processCustomExpression(key: string, cond: any, qb: KnexQueryBuilder, m: 'where' | 'orWhere' | 'having', type: QueryType): void { + // unwind parameters when ? found in field name + const count = key.concat('?').match(/\?/g)!.length - 1; + const params1 = cond[key].slice(0, count).map((c: any) => Utils.isObject(c) ? JSON.stringify(c) : c); + const params2 = cond[key].slice(count); + + if (params2.length > 0) { + return void qb[m](this.mapper(type, key, params1) as string, params2); + } + + qb[m](this.mapper(type, key, params1) as string); + } + + private processObjectSubCondition(cond: any, key: string, qb: Knex.QueryBuilder, method: 'where' | 'having', m: 'where' | 'orWhere' | 'having', type: QueryType): void { + // grouped condition for one field + if (Object.keys(cond[key]).length > 1) { + const subCondition = Object.entries(cond[key]).map(([subKey, subValue]) => ({ [key]: { [subKey]: subValue } })); + return void qb[m](inner => subCondition.map((sub: any) => this.appendQueryCondition(type, sub, inner, '$and', method))); + } + + // operators + for (const [op, replacement] of Object.entries(QueryBuilderHelper.OPERATORS)) { + if (!(op in cond[key])) { + continue; } - return this.mapper(type, k, cond[k]); - }); + qb[m](this.mapper(type, key) as string, replacement, cond[key][op]); + + break; + } } - getQueryOrder(type: QueryType, orderBy: QueryOrderMap, populate: Record): string[] { + getQueryOrder(type: QueryType, orderBy: QueryOrderMap, populate: Record): { column: string, order: string }[] { return Object.keys(orderBy).map(k => { let alias = this.alias; let field = k; @@ -276,35 +280,17 @@ export class QueryBuilderHelper { const direction = orderBy[k]; const order = Utils.isNumber(direction) ? QueryOrderNumeric[direction] : direction; - return this.mapper(type, `${alias}.${field}`) + ' ' + order; + return { column: this.mapper(type, `${alias}.${field}`) as string, order: order.toLowerCase() }; }); } - getClause(type: string, clause: string, data: any): string { - if (Utils.isEmpty(data)) { - return ''; - } - - return ` ${type} ${clause}`; - } - - finalize(type: QueryType, sql: string, meta?: EntityMetadata): string { - let append = ''; + finalize(type: QueryType, qb: KnexQueryBuilder, meta?: EntityMetadata): void { const useReturningStatement = type === QueryType.INSERT && this.platform.usesReturningStatement(); if (useReturningStatement && meta) { const returningProps = Object.values(meta.properties).filter(prop => prop.primary || prop.default); - append = ` RETURNING ${returningProps.map(prop => this.wrap(prop.fieldName)).join(', ')}`; - } - - if (this.platform.getParameterPlaceholder() === '?') { - return sql + append; + qb.returning(returningProps.map(prop => prop.fieldName)); } - - let index = 1; - return sql.replace(/(\?)/g, () => { - return this.platform.getParameterPlaceholder(index++); - }) + append; } splitField(field: string): [string, string] { @@ -337,23 +323,21 @@ export class QueryBuilderHelper { } } - getLockSQL(lockMode?: LockMode): string { + getLockSQL(qb: KnexQueryBuilder, lockMode?: LockMode): void { if (lockMode === LockMode.PESSIMISTIC_READ) { - return ' ' + this.platform.getReadLockSQL(); + return void qb.forShare(); } if (lockMode === LockMode.PESSIMISTIC_WRITE) { - return ' ' + this.platform.getWriteLockSQL(); + return void qb.forUpdate(); } if (lockMode === LockMode.OPTIMISTIC && this.metadata[this.entityName] && !this.metadata[this.entityName].versionProperty) { throw ValidationError.lockFailed(this.entityName); } - - return ''; } - updateVersionProperty(set: string[]): void { + updateVersionProperty(qb: KnexQueryBuilder): void { const meta = this.metadata[this.entityName]; if (!meta || !meta.versionProperty) { @@ -361,104 +345,42 @@ export class QueryBuilderHelper { } const versionProperty = meta.properties[meta.versionProperty]; - let sql = `${this.wrap(versionProperty.fieldName)} = `; + let sql = versionProperty.fieldName + ' + 1'; if (versionProperty.type.toLowerCase() === 'date') { - sql += this.platform.getCurrentTimestampSQL(versionProperty.length); - } else { - sql += this.wrap(versionProperty.fieldName) + ' + 1'; + sql = this.platform.getCurrentTimestampSQL(versionProperty.length); } - set.push(sql); + qb.update(versionProperty.fieldName, this.knex.raw(sql)); } - private processComplexParam(key: string, cond: any): any[] { - // unwind parameters when ? found in field name - const customExpression = key.match(/\(.*\)| |\?/) && Array.isArray(cond); - - if (customExpression) { - const count = key.concat('?').match(/\?/g)!.length - 1; - return cond.slice(0, count).map((c: any) => Utils.isObject(c) ? JSON.stringify(c) : c).concat(cond.slice(count)); - } - - const operator = Object.keys(QueryBuilderHelper.OPERATORS).find(op => op in cond)!; - - if (operator) { - return Utils.asArray(cond[operator]); - } - - return Utils.asArray(cond); + private isCustomExpression(field: string): boolean { + return !!field.match(/[ ?<>=()]|^\d/); } - private prefixAndWrap(field: string): string { + private prefix(field: string): string { if (!this.isPrefixed(field)) { - return this.wrap(this.fieldName(field, this.alias)); + return this.fieldName(field, this.alias); } const [a, f] = field.split('.'); - return this.wrap(a) + '.' + this.wrap(this.fieldName(f, a)); + return a + '.' + this.fieldName(f, a); } - private getGroupWhereParams(key: string, cond: Record): any[] { - if (key === '$and' || key === '$or') { - return Utils.flatten(cond.map((sub: any) => this.getWhereParams(sub))); - } else { - return this.getWhereParams(cond); - } - } - - private processValue(field: string, value: any): string { - if (value === null) { - return ' IS NULL'; - } - - if (value instanceof RegExp) { - return ' LIKE ?'; - } - - if (Utils.isObject(value) && !(value instanceof Date)) { - return this.processObjectValue(value); - } - - const wildcards = field.concat('?').match(/\?/g)!.length - 1; - - if (Array.isArray(value) && value.length === wildcards) { - return ''; - } - - return ' = ?'; - } - - private processObjectValue(value: any): string { - let ret = ''; - - for (const [op, replacement] of Object.entries(QueryBuilderHelper.OPERATORS)) { - if (!(op in value)) { - continue; + private appendGroupCondition(type: QueryType, qb: KnexQueryBuilder, operator: '$and' | '$or', method: 'where' | 'having', subCondition: any[]): void { + const m = operator === '$or' ? 'orWhere' : 'andWhere'; + qb[method](outer => subCondition.forEach((sub: any) => { + if (Object.keys(sub).length === 1) { + return this.appendQueryCondition(type, sub, outer, operator); } - const token = Array.isArray(value[op]) ? `(${value[op].map(() => '?').join(', ')})` : '?'; - ret = ` ${replacement} ${token}`; - - break; - } - - return ret; - } - - private getGroupQueryCondition(type: QueryType, operator: '$and' | '$or', subCondition: any[]): string { - const glue = QueryBuilderHelper.GROUP_OPERATORS[operator]; - const group = subCondition.map(sub => { - const cond = this.getQueryCondition(type, sub); - return cond.length > 1 ? '(' + cond.join(` AND `) + ')' : cond[0]; - }); - - return '(' + group.join(` ${glue} `) + ')'; + outer[m](inner => this.appendQueryCondition(type, sub, inner, '$and')); + })); } private isPrefixed(field: string): boolean { - return new RegExp(`${this.quoteChar}?\\w+${this.quoteChar}?\\.`).test(field); + return !!field.match(/\w+\./); } private fieldName(field: string, alias?: string): string { diff --git a/lib/query/enums.ts b/lib/query/enums.ts index e3d9b10b7092..14d6f18e82c7 100644 --- a/lib/query/enums.ts +++ b/lib/query/enums.ts @@ -1,13 +1,13 @@ export enum QueryType { TRUNCATE = 'TRUNCATE', SELECT = 'SELECT', + COUNT = 'COUNT', INSERT = 'INSERT', UPDATE = 'UPDATE', DELETE = 'DELETE', } export enum QueryFlag { - COUNT = 'SELECT', DISTINCT = 'DISTINCT', } diff --git a/lib/schema/MySqlSchemaHelper.ts b/lib/schema/MySqlSchemaHelper.ts index 485916db0a87..5ca89a997063 100644 --- a/lib/schema/MySqlSchemaHelper.ts +++ b/lib/schema/MySqlSchemaHelper.ts @@ -1,5 +1,6 @@ import { SchemaHelper } from './SchemaHelper'; import { EntityProperty } from '../decorators'; +import { MySqlTableBuilder } from 'knex'; export class MySqlSchemaHelper extends SchemaHelper { @@ -20,28 +21,21 @@ export class MySqlSchemaHelper extends SchemaHelper { date: 0, }; - getIdentifierQuoteCharacter(): string { - return '`'; - } - getSchemaBeginning(): string { - return 'SET NAMES utf8;\nSET FOREIGN_KEY_CHECKS=0;\n\n\n'; + return 'set names utf8;\nset foreign_key_checks = 0;\n\n'; } getSchemaEnd(): string { - return 'SET FOREIGN_KEY_CHECKS=1;\n'; + return 'set foreign_key_checks = 1;\n'; } - getSchemaTableEnd(): string { - return ' ENGINE=InnoDB DEFAULT CHARSET=utf8'; + finalizeTable(table: MySqlTableBuilder): void { + table.engine('InnoDB'); + table.charset('utf8'); } getTypeDefinition(prop: EntityProperty): string { return super.getTypeDefinition(prop, MySqlSchemaHelper.TYPES, MySqlSchemaHelper.DEFAULT_TYPE_LENGTHS); } - getUnsignedSuffix(prop: EntityProperty): string { - return ' unsigned'; - } - } diff --git a/lib/schema/PostgreSqlSchemaHelper.ts b/lib/schema/PostgreSqlSchemaHelper.ts index 0400822621af..cf71a718331a 100644 --- a/lib/schema/PostgreSqlSchemaHelper.ts +++ b/lib/schema/PostgreSqlSchemaHelper.ts @@ -1,5 +1,5 @@ import { SchemaHelper } from './SchemaHelper'; -import { EntityMetadata, EntityProperty } from '../decorators'; +import { EntityProperty } from '../decorators'; export class PostgreSqlSchemaHelper extends SchemaHelper { @@ -20,38 +20,19 @@ export class PostgreSqlSchemaHelper extends SchemaHelper { }; getSchemaBeginning(): string { - return `SET NAMES 'utf8';\nSET session_replication_role = 'replica';\n\n\n`; + return `set names 'utf8';\nset session_replication_role = 'replica';\n\n`; } getSchemaEnd(): string { - return `SET session_replication_role = 'origin';\n`; - } - - getAutoIncrementStatement(meta: EntityMetadata): string { - return `DEFAULT NEXTVAL('${meta.collection}_seq')`; + return `set session_replication_role = 'origin';\n`; } getTypeDefinition(prop: EntityProperty): string { return super.getTypeDefinition(prop, PostgreSqlSchemaHelper.TYPES, PostgreSqlSchemaHelper.DEFAULT_TYPE_LENGTHS); } - getUnsignedSuffix(prop: EntityProperty): string { - return ` check (${this.quoteIdentifier(prop.fieldName)} > 0)`; - } - - supportsSequences(): boolean { - return true; - } - indexForeignKeys() { return false; } - dropTable(meta: EntityMetadata): string { - let ret = `DROP TABLE IF EXISTS ${this.quoteIdentifier(meta.collection)} CASCADE;\n`; - ret += `DROP SEQUENCE IF EXISTS ${this.quoteIdentifier(meta.collection + '_seq')};\n`; - - return ret; - } - } diff --git a/lib/schema/SchemaGenerator.ts b/lib/schema/SchemaGenerator.ts index a51a88eba318..36c94ae1d2d1 100644 --- a/lib/schema/SchemaGenerator.ts +++ b/lib/schema/SchemaGenerator.ts @@ -1,4 +1,5 @@ -import { Cascade, IDatabaseDriver, ReferenceType } from '..'; +import { ColumnBuilder, TableBuilder } from 'knex'; +import { AbstractSqlDriver, Cascade, ReferenceType, Utils } from '..'; import { EntityMetadata, EntityProperty } from '../decorators'; import { Platform } from '../platforms'; @@ -6,20 +7,20 @@ export class SchemaGenerator { private readonly platform: Platform = this.driver.getPlatform(); private readonly helper = this.platform.getSchemaHelper(); + private readonly knex = this.driver.getConnection().getKnex(); - constructor(private readonly driver: IDatabaseDriver, + constructor(private readonly driver: AbstractSqlDriver, private readonly metadata: Record) { } generate(): string { let ret = this.helper.getSchemaBeginning(); + Object.values(this.metadata).forEach(meta => ret += this.knex.schema.dropTableIfExists(meta.collection).toQuery() + ';\n'); + ret += '\n'; + Object.values(this.metadata).forEach(meta => ret += this.createTable(meta)); Object.values(this.metadata).forEach(meta => { - ret += this.helper.dropTable(meta) + '\n'; - ret += this.createTable(meta) + '\n'; - }); - - Object.values(this.metadata).forEach(meta => { - ret += this.createForeignKeys(meta); + const alter = this.knex.schema.alterTable(meta.collection, table => this.createForeignKeys(table, meta)).toQuery(); + ret += alter ? alter + ';\n\n' : ''; }); ret += this.helper.getSchemaEnd(); @@ -28,29 +29,13 @@ export class SchemaGenerator { } private createTable(meta: EntityMetadata): string { - const pkProp = meta.properties[meta.primaryKey]; - let ret = ''; - - if (this.helper.supportsSequences() && pkProp.type === 'number') { - ret += `CREATE SEQUENCE ${this.helper.quoteIdentifier(meta.collection + '_seq')};\n`; - } - - ret += `CREATE TABLE ${this.helper.quoteIdentifier(meta.collection)} (\n`; - - Object - .values(meta.properties) - .filter(prop => this.shouldHaveColumn(prop)) - .forEach(prop => ret += ' ' + this.createTableColumn(meta, prop) + ',\n'); - - if (this.helper.supportsSchemaConstraints()) { - ret += this.createIndexes(meta); - } else { - ret = ret.substr(0, ret.length - 2) + '\n'; - } - - ret += `)${this.helper.getSchemaTableEnd()};\n\n`; - - return ret; + return this.knex.schema.createTable(meta.collection, table => { + Object + .values(meta.properties) + .filter(prop => this.shouldHaveColumn(prop)) + .forEach(prop => this.createTableColumn(table, prop)); + this.helper.finalizeTable(table); + }).toQuery() + ';\n\n'; } private shouldHaveColumn(prop: EntityProperty): boolean { @@ -69,97 +54,79 @@ export class SchemaGenerator { return prop.reference === ReferenceType.MANY_TO_ONE || (prop.reference === ReferenceType.ONE_TO_ONE && prop.owner); } - private createTableColumn(meta: EntityMetadata, prop: EntityProperty, alter = false): string { - const fieldName = prop.fieldName; - const ret = this.helper.quoteIdentifier(fieldName) + ' ' + this.type(prop); - const nullable = (alter && this.platform.requiresNullableForAlteringColumn()) || prop.nullable!; - - if (prop.primary) { - return ret + this.helper.createPrimaryKeyColumn(meta, prop); + private createTableColumn(table: TableBuilder, prop: EntityProperty, alter = false): ColumnBuilder { + if (prop.primary && prop.type === 'number') { + return table.increments(prop.fieldName); } - return ret + this.helper.createColumn(meta, prop, nullable); - } - - private createIndexes(meta: EntityMetadata): string { - let ret = ` PRIMARY KEY (${this.helper.quoteIdentifier(meta.properties[meta.primaryKey].fieldName)})`; - - if (this.helper.indexForeignKeys()) { - Object - .values(meta.properties) - .filter(prop => prop.reference === ReferenceType.MANY_TO_ONE) - .forEach(prop => ret += `,\n KEY ${this.helper.quoteIdentifier(prop.fieldName)} (${this.helper.quoteIdentifier(prop.fieldName)})`); - } + const type = this.type(prop); + const col = table.specificType(prop.fieldName, type); + this.configureColumn(prop, col, alter); - return ret + '\n'; + return col; } - private createForeignKeys(meta: EntityMetadata): string { - const ret = `ALTER TABLE ${this.helper.quoteIdentifier(meta.collection)}`; - let i = 1; + private configureColumn(prop: EntityProperty, col: ColumnBuilder, alter: boolean) { + const nullable = (alter && this.platform.requiresNullableForAlteringColumn()) || prop.nullable!; + const indexed = prop.reference !== ReferenceType.SCALAR && this.helper.indexForeignKeys(); + const hasDefault = typeof prop.default !== 'undefined'; // support falsy default values like `0`, `false` or empty string + + Utils.runIfNotEmpty(() => col.unique(), prop.unique); + Utils.runIfNotEmpty(() => col.nullable(), nullable); + Utils.runIfNotEmpty(() => col.notNullable(), !nullable); + Utils.runIfNotEmpty(() => col.primary(), prop.primary); + Utils.runIfNotEmpty(() => col.unsigned(), this.isUnsigned(prop)); + Utils.runIfNotEmpty(() => col.index(), indexed); + Utils.runIfNotEmpty(() => col.defaultTo(this.knex.raw('' + prop.default)), hasDefault); + } - const constraints = Object - .values(meta.properties) - .filter(prop => prop.reference === ReferenceType.MANY_TO_ONE || (prop.reference === ReferenceType.ONE_TO_ONE && prop.owner)) - .map(prop => this.createForeignKey(meta, prop, i++)); + private isUnsigned(prop: EntityProperty): boolean { + if (prop.reference === ReferenceType.MANY_TO_ONE || prop.reference === ReferenceType.ONE_TO_ONE) { + const meta2 = this.metadata[prop.type]; + const pk = meta2.properties[meta2.primaryKey]; - if (constraints.length === 0) { - return ''; + return pk.type === 'number'; } - if (this.helper.supportsSchemaMultiAlter()) { - return ret + '\n ' + constraints.join(',\n ') + ';\n\n\n'; - } + return (prop.primary || prop.unsigned) && prop.type === 'number'; + } - return constraints.map(c => ret + c + ';').join('\n') + '\n\n'; + private createForeignKeys(table: TableBuilder, meta: EntityMetadata): void { + Object.values(meta.properties) + .filter(prop => prop.reference === ReferenceType.MANY_TO_ONE || (prop.reference === ReferenceType.ONE_TO_ONE && prop.owner)) + .forEach(prop => this.createForeignKey(table, prop)); } - private createForeignKey(meta: EntityMetadata, prop: EntityProperty, index: number): string { + private createForeignKey(table: TableBuilder, prop: EntityProperty): void { if (this.helper.supportsSchemaConstraints()) { - return this.createForeignConstraint(meta, prop, index); - } + this.createForeignKeyReference(table.foreign(prop.fieldName) as ColumnBuilder, prop); - let ret = ' ADD ' + this.createTableColumn(meta, prop, true) + ' '; - ret += this.createForeignKeyReference(prop); - - return ret; - } - - private createForeignConstraint(meta: EntityMetadata, prop: EntityProperty, index: number): string { - let ret = ' ADD CONSTRAINT ' + this.helper.quoteIdentifier(meta.collection + '_ibfk_' + index); - ret += ` FOREIGN KEY (${this.helper.quoteIdentifier(prop.fieldName)}) `; - ret += this.createForeignKeyReference(prop); + return; + } - return ret; + const col = this.createTableColumn(table, prop, true); + this.createForeignKeyReference(col, prop); } - private createForeignKeyReference(prop: EntityProperty): string { + private createForeignKeyReference(col: ColumnBuilder, prop: EntityProperty): void { const meta2 = this.metadata[prop.type]; - const pk2 = meta2.properties[meta2.primaryKey].fieldName; - let ret = `REFERENCES ${this.helper.quoteIdentifier(meta2.collection)} (${this.helper.quoteIdentifier(pk2)})`; + const pk2 = meta2.properties[meta2.primaryKey]; + col.references(pk2.fieldName).inTable(meta2.collection); const cascade = prop.cascade.includes(Cascade.REMOVE) || prop.cascade.includes(Cascade.ALL); - ret += ` ON DELETE ${cascade ? 'CASCADE' : 'SET NULL'}`; + col.onDelete(cascade ? 'cascade' : 'set null'); if (prop.cascade.includes(Cascade.PERSIST) || prop.cascade.includes(Cascade.ALL)) { - ret += ' ON UPDATE CASCADE'; + col.onUpdate('cascade'); } - - return ret; } - private type(prop: EntityProperty, foreignKey?: EntityProperty): string { - const type = this.helper.getTypeDefinition(prop); - - if (prop.reference !== ReferenceType.SCALAR) { - const meta = this.metadata[prop.type]; - return this.type(meta.properties[meta.primaryKey], prop); - } - - if (prop.type === 'number' && prop.primary) { - return type + this.helper.getUnsignedSuffix(foreignKey || prop); + private type(prop: EntityProperty): string { + if (prop.reference === ReferenceType.SCALAR) { + return this.helper.getTypeDefinition(prop); } - return type; + const meta = this.metadata[prop.type]; + return this.helper.getTypeDefinition(meta.properties[meta.primaryKey]); } } diff --git a/lib/schema/SchemaHelper.ts b/lib/schema/SchemaHelper.ts index d0b92f55728b..fce0ea23f0d5 100644 --- a/lib/schema/SchemaHelper.ts +++ b/lib/schema/SchemaHelper.ts @@ -1,11 +1,8 @@ -import { EntityMetadata, EntityProperty } from '../decorators'; +import { TableBuilder } from 'knex'; +import { EntityProperty } from '../decorators'; export abstract class SchemaHelper { - getIdentifierQuoteCharacter(): string { - return '"'; - } - getSchemaBeginning(): string { return ''; } @@ -14,16 +11,8 @@ export abstract class SchemaHelper { return ''; } - getSchemaTableEnd(): string { - return ''; - } - - getAutoIncrementStatement(meta: EntityMetadata): string { - return 'AUTO_INCREMENT'; - } - - getPrimaryKeySubtype(meta: EntityMetadata): string { - return 'NOT NULL'; + finalizeTable(table: TableBuilder): void { + // } getTypeDefinition(prop: EntityProperty, types: Record = {}, lengths: Record = {}): string { @@ -38,73 +27,12 @@ export abstract class SchemaHelper { return type; } - getUnsignedSuffix(prop: EntityProperty): string { - return ''; - } - supportsSchemaConstraints(): boolean { return true; } - supportsSchemaMultiAlter(): boolean { - return true; - } - - supportsSequences(): boolean { - return false; - } - - quoteIdentifier(field: string): string { - const quoteChar = this.getIdentifierQuoteCharacter(); - return quoteChar + field + quoteChar; - } - - dropTable(meta: EntityMetadata): string { - const pkProp = meta.properties[meta.primaryKey]; - let ret = `DROP TABLE IF EXISTS ${this.quoteIdentifier(meta.collection)};\n`; - - if (this.supportsSequences() && pkProp.type === 'number') { - ret += `DROP SEQUENCE IF EXISTS ${this.quoteIdentifier(meta.collection + '_seq')};\n`; - } - - return ret; - } - indexForeignKeys() { return true; } - createPrimaryKeyColumn(meta: EntityMetadata, prop: EntityProperty): string { - let ret = ' ' + this.getPrimaryKeySubtype(meta); - - if (prop.type === 'number') { - ret += ' ' + this.getAutoIncrementStatement(meta); - } - - return ret; - } - - createColumn(meta: EntityMetadata, prop: EntityProperty, nullable: boolean): string { - let ret = ''; - - if (prop.unique) { - ret += ' UNIQUE'; - } - - if (!nullable) { - ret += ' NOT NULL'; - } - - // support falsy default values like `0`, `false or empty string - if (typeof prop.default !== 'undefined') { - return ret + ` DEFAULT ${prop.default}`; - } - - if (nullable) { - return ret + ` DEFAULT NULL`; - } - - return ret; - } - } diff --git a/lib/schema/SqliteSchemaHelper.ts b/lib/schema/SqliteSchemaHelper.ts index 04a5535fc001..64acd253dda6 100644 --- a/lib/schema/SqliteSchemaHelper.ts +++ b/lib/schema/SqliteSchemaHelper.ts @@ -1,29 +1,21 @@ import { SchemaHelper } from './SchemaHelper'; -import { EntityMetadata, EntityProperty } from '../decorators'; +import { EntityProperty } from '../decorators'; export class SqliteSchemaHelper extends SchemaHelper { static readonly TYPES = { - number: 'INTEGER', - boolean: 'INTEGER', - date: 'TEXT', - string: 'TEXT', + number: 'integer', + boolean: 'integer', + date: 'text', + string: 'text', }; - getAutoIncrementStatement(meta: EntityMetadata): string { - return 'AUTOINCREMENT'; - } - getSchemaBeginning(): string { - return 'PRAGMA foreign_keys=OFF;\n\n\n'; + return 'pragma foreign_keys = off;\n\n'; } getSchemaEnd(): string { - return 'PRAGMA foreign_keys=ON;\n'; - } - - getPrimaryKeySubtype(meta: EntityMetadata): string { - return 'PRIMARY KEY'; + return 'pragma foreign_keys = on;\n'; } getTypeDefinition(prop: EntityProperty): string { @@ -35,8 +27,4 @@ export class SqliteSchemaHelper extends SchemaHelper { return false; } - supportsSchemaMultiAlter(): boolean { - return false; - } - } diff --git a/lib/unit-of-work/ChangeSetPersister.ts b/lib/unit-of-work/ChangeSetPersister.ts index b77238f5e120..ef2968ab0a70 100644 --- a/lib/unit-of-work/ChangeSetPersister.ts +++ b/lib/unit-of-work/ChangeSetPersister.ts @@ -2,7 +2,7 @@ import { MetadataStorage } from '../metadata'; import { EntityMetadata, EntityProperty, IEntityType } from '../decorators'; import { EntityIdentifier } from '../entity'; import { ChangeSet, ChangeSetType } from './ChangeSet'; -import { IDatabaseDriver } from '..'; +import { IDatabaseDriver, Transaction } from '..'; import { QueryResult } from '../connections'; import { ValidationError } from '../utils'; @@ -13,7 +13,7 @@ export class ChangeSetPersister { constructor(private readonly driver: IDatabaseDriver, private readonly identifierMap: Record) { } - async persistToDatabase>(changeSet: ChangeSet): Promise { + async persistToDatabase>(changeSet: ChangeSet, ctx?: Transaction): Promise { const meta = this.metadata[changeSet.name]; // process references first @@ -22,35 +22,35 @@ export class ChangeSetPersister { } // persist the entity itself - await this.persistEntity(changeSet, meta); + await this.persistEntity(changeSet, meta, ctx); } - private async persistEntity>(changeSet: ChangeSet, meta: EntityMetadata): Promise { + private async persistEntity>(changeSet: ChangeSet, meta: EntityMetadata, ctx?: Transaction): Promise { let res: QueryResult | undefined; if (changeSet.type === ChangeSetType.DELETE) { - await this.driver.nativeDelete(changeSet.name, changeSet.entity.__primaryKey); + await this.driver.nativeDelete(changeSet.name, changeSet.entity.__primaryKey, ctx); } else if (changeSet.type === ChangeSetType.UPDATE) { - res = await this.updateEntity(meta, changeSet); + res = await this.updateEntity(meta, changeSet, ctx); this.mapReturnedValues(changeSet.entity, res, meta); } else if (changeSet.entity.__primaryKey) { // ChangeSetType.CREATE with primary key - res = await this.driver.nativeInsert(changeSet.name, changeSet.payload); + res = await this.driver.nativeInsert(changeSet.name, changeSet.payload, ctx); this.mapReturnedValues(changeSet.entity, res, meta); delete changeSet.entity.__initialized; } else { // ChangeSetType.CREATE without primary key - res = await this.driver.nativeInsert(changeSet.name, changeSet.payload); + res = await this.driver.nativeInsert(changeSet.name, changeSet.payload, ctx); this.mapReturnedValues(changeSet.entity, res, meta); - changeSet.entity.__primaryKey = res.insertId; + changeSet.entity.__primaryKey = changeSet.entity.__primaryKey || res.insertId; this.identifierMap[changeSet.entity.__uuid].setValue(changeSet.entity.__primaryKey); delete changeSet.entity.__initialized; } - await this.processOptimisticLock(meta, changeSet, res); + await this.processOptimisticLock(meta, changeSet, res, ctx); } - private async updateEntity>(meta: EntityMetadata, changeSet: ChangeSet): Promise { + private async updateEntity>(meta: EntityMetadata, changeSet: ChangeSet, ctx?: Transaction): Promise { if (!meta.versionProperty || !changeSet.entity[meta.versionProperty]) { - return this.driver.nativeUpdate(changeSet.name, changeSet.entity.__primaryKey, changeSet.payload); + return this.driver.nativeUpdate(changeSet.name, changeSet.entity.__primaryKey, changeSet.payload, ctx); } const cond = { @@ -58,16 +58,16 @@ export class ChangeSetPersister { [meta.versionProperty]: changeSet.entity[meta.versionProperty], }; - return this.driver.nativeUpdate(changeSet.name, cond, changeSet.payload); + return this.driver.nativeUpdate(changeSet.name, cond, changeSet.payload, ctx); } - private async processOptimisticLock>(meta: EntityMetadata, changeSet: ChangeSet, res: QueryResult | undefined) { + private async processOptimisticLock>(meta: EntityMetadata, changeSet: ChangeSet, res: QueryResult | undefined, ctx?: Transaction) { if (meta.versionProperty && changeSet.type === ChangeSetType.UPDATE && res && !res.affectedRows) { throw ValidationError.lockFailed(changeSet.entity); } if (meta.versionProperty && [ChangeSetType.CREATE, ChangeSetType.UPDATE].includes(changeSet.type)) { - const e = await this.driver.findOne(meta.name, changeSet.entity.__primaryKey, [], {}, [meta.versionProperty]); + const e = await this.driver.findOne(meta.name, changeSet.entity.__primaryKey, [], {}, [meta.versionProperty], undefined, ctx); changeSet.entity[meta.versionProperty as keyof T] = e![meta.versionProperty] as T[keyof T]; } } diff --git a/lib/unit-of-work/UnitOfWork.ts b/lib/unit-of-work/UnitOfWork.ts index 0bf2c3ab6910..443a86267bcc 100644 --- a/lib/unit-of-work/UnitOfWork.ts +++ b/lib/unit-of-work/UnitOfWork.ts @@ -6,7 +6,7 @@ import { ChangeSetPersister } from './ChangeSetPersister'; import { ChangeSet, ChangeSetType } from './ChangeSet'; import { EntityManager } from '../EntityManager'; import { Utils, ValidationError } from '../utils'; -import { FilterQuery, LockMode } from '..'; +import { FilterQuery, LockMode, Transaction } from '..'; export class UnitOfWork { @@ -99,14 +99,13 @@ export class UnitOfWork { return this.postCommitCleanup(); // nothing to do, do not start transaction } - const driver = this.em.getDriver(); - const runInTransaction = !driver.isInTransaction() && driver.getPlatform().supportsTransactions(); - const promise = Utils.runSerial(this.changeSets, changeSet => this.commitChangeSet(changeSet)); + const runInTransaction = !this.em.isInTransaction() && this.em.getDriver().getPlatform().supportsTransactions(); + const promise = async (tx: Transaction) => await Utils.runSerial(this.changeSets, changeSet => this.commitChangeSet(changeSet, tx)); if (runInTransaction) { - await driver.transactional(() => promise); + await this.em.getConnection().transactional(trx => promise(trx)); } else { - await promise; + await promise(this.em.getTransactionContext()); } this.postCommitCleanup(); @@ -232,10 +231,10 @@ export class UnitOfWork { .forEach(item => this.findNewEntities(item, visited)); } - private async commitChangeSet>(changeSet: ChangeSet): Promise { + private async commitChangeSet>(changeSet: ChangeSet, ctx: Transaction): Promise { const type = changeSet.type.charAt(0).toUpperCase() + changeSet.type.slice(1); await this.runHooks(`before${type}`, changeSet.entity, changeSet.payload); - await this.changeSetPersister.persistToDatabase(changeSet); + await this.changeSetPersister.persistToDatabase(changeSet, ctx); switch (changeSet.type) { case ChangeSetType.CREATE: this.em.merge(changeSet.entity); break; @@ -341,7 +340,7 @@ export class UnitOfWork { } private async lockPessimistic>(entity: T, mode: LockMode): Promise { - if (!this.em.getDriver().isInTransaction()) { + if (!this.em.isInTransaction()) { throw ValidationError.transactionRequired(); } diff --git a/lib/utils/Configuration.ts b/lib/utils/Configuration.ts index 24b661c2dbae..a0cff267ddae 100644 --- a/lib/utils/Configuration.ts +++ b/lib/utils/Configuration.ts @@ -10,11 +10,13 @@ import { Logger, Utils } from '../utils'; import { EntityManager } from '../EntityManager'; import { IDatabaseDriver } from '..'; import { Platform } from '../platforms'; +import { PoolConfig } from 'knex'; export class Configuration { static readonly DEFAULTS = { type: 'mongo', + pool: {}, entities: [], entitiesDirs: [], entitiesDirsTs: [], @@ -167,6 +169,7 @@ export interface MikroORMOptions { user?: string; password?: string; multipleStatements?: boolean; // for mysql driver + pool: PoolConfig, strict: boolean; logger: (message: string) => void; debug: boolean; diff --git a/lib/utils/Utils.ts b/lib/utils/Utils.ts index c98de70be179..e8fba2de4397 100644 --- a/lib/utils/Utils.ts +++ b/lib/utils/Utils.ts @@ -31,10 +31,6 @@ export class Utils { return [...new Set(items)]; } - static flatten(arrays: T[][]): T[] { - return [].concat(...arrays as any[]); - } - static merge(target: any, ...sources: any[]): any { if (!sources.length) { return target; @@ -243,4 +239,10 @@ export class Utils { return parts.join('/').replace(/\\/g, '/'); } + static runIfNotEmpty(clause: () => any, data: any): void { + if (!Utils.isEmpty(data)) { + clause(); + } + } + } diff --git a/package.json b/package.json index 878747f1e501..60d9d68a9e29 100644 --- a/package.json +++ b/package.json @@ -83,6 +83,7 @@ "fast-deep-equal": "^2.0.0", "fs-extra": "^8.0.0", "globby": "^10.0.0", + "knex": "^0.19.0", "ts-morph": "^3.0.0", "typescript": "^3.5.0", "uuid": "^3.3.2" @@ -93,6 +94,20 @@ "pg": "^7.10.0", "sqlite": "^3.0.0" }, + "peerDependenciesMeta": { + "mongodb": { + "optional": true + }, + "mysql2": { + "optional": true + }, + "pg": { + "optional": true + }, + "sqlite": { + "optional": true + } + }, "devDependencies": { "@commitlint/cli": "^8.0.0", "@commitlint/config-conventional": "^8.0.0", diff --git a/tests/EntityManager.mongo.test.ts b/tests/EntityManager.mongo.test.ts index 8d1dcf551a4f..389043229d78 100644 --- a/tests/EntityManager.mongo.test.ts +++ b/tests/EntityManager.mongo.test.ts @@ -1245,9 +1245,7 @@ describe('EntityManagerMongo', () => { }); test('EM do not support transactions', async () => { - await expect(orm.em.beginTransaction()).rejects.toThrowError('Transactions are not supported by current driver'); - await expect(orm.em.rollback()).rejects.toThrowError('Transactions are not supported by current driver'); - await expect(orm.em.commit()).rejects.toThrowError('Transactions are not supported by current driver'); + await expect(orm.em.transactional(async em => em)).rejects.toThrowError('Transactions are not supported by current driver'); }); test('loading connected entity will not update identity map for associations', async () => { diff --git a/tests/EntityManager.mysql.test.ts b/tests/EntityManager.mysql.test.ts index 72f9748e0e9d..ab6809bdf6fd 100644 --- a/tests/EntityManager.mysql.test.ts +++ b/tests/EntityManager.mysql.test.ts @@ -47,10 +47,23 @@ describe('EntityManagerMySql', () => { const driver = orm.em.getDriver(); expect(driver instanceof MySqlDriver).toBe(true); await expect(driver.findOne(Book2.name, { foo: 'bar' })).resolves.toBeNull(); - const tag = await driver.nativeInsert(BookTag2.name, { name: 'tag name '}); + const tag = await driver.nativeInsert(BookTag2.name, { name: 'tag name'}); expect((await driver.nativeInsert(Book2.name, { uuid: v4(), tags: [tag.insertId] })).insertId).not.toBeNull(); - const res = await driver.getConnection().execute('SELECT 1 as count'); - expect(res[0]).toEqual({ count: 1 }); + await expect(driver.getConnection().execute('select 1 as count')).resolves.toEqual([{ count: 1 }]); + await expect(driver.getConnection().execute('select 1 as count', [], 'get')).resolves.toEqual({ count: 1 }); + await expect(driver.getConnection().execute('select 1 as count', [], 'run')).resolves.toEqual([{ count: 1 }]); + await expect(driver.getConnection().execute('insert into test2 (name) values (?)', ['test'], 'run')).resolves.toEqual({ + affectedRows: 1, + insertId: 1, + }); + await expect(driver.getConnection().execute('update test2 set name = ? where name = ?', ['test 2', 'test'], 'run')).resolves.toEqual({ + affectedRows: 1, + insertId: 0, + }); + await expect(driver.getConnection().execute('delete from test2 where name = ?', ['test 2'], 'run')).resolves.toEqual({ + affectedRows: 1, + insertId: 0, + }); expect(driver.getPlatform().denormalizePrimaryKey(1)).toBe(1); expect(driver.getPlatform().denormalizePrimaryKey('1')).toBe('1'); await expect(driver.find(BookTag2.name, { books: { $in: [1] } })).resolves.not.toBeNull(); @@ -58,9 +71,9 @@ describe('EntityManagerMySql', () => { test('driver appends errored query', async () => { const driver = orm.em.getDriver(); - const err1 = `Table 'mikro_orm_test.not_existing' doesn't exist\n in query: INSERT INTO \`not_existing\` (\`foo\`) VALUES (?)\n with params: ["bar"]`; + const err1 = `insert into \`not_existing\` (\`foo\`) values ('bar') - Table 'mikro_orm_test.not_existing' doesn't exist`; await expect(driver.nativeInsert('not_existing', { foo: 'bar' })).rejects.toThrowError(err1); - const err2 = `Table 'mikro_orm_test.not_existing' doesn't exist\n in query: DELETE FROM \`not_existing\``; + const err2 = `delete from \`not_existing\` - Table 'mikro_orm_test.not_existing' doesn't exist`; await expect(driver.nativeDelete('not_existing', {})).rejects.toThrowError(err2); }); @@ -152,62 +165,64 @@ describe('EntityManagerMySql', () => { test('transactions', async () => { const god1 = new Author2('God1', 'hello@heaven1.god'); - await orm.em.beginTransaction(); - await orm.em.persistAndFlush(god1); - await orm.em.rollback(); + try { + await orm.em.transactional(async em => { + await em.persistAndFlush(god1); + throw new Error(); // rollback the transaction + }); + } catch { } + const res1 = await orm.em.findOne(Author2, { name: 'God1' }); expect(res1).toBeNull(); - await orm.em.beginTransaction(); - const god2 = new Author2('God2', 'hello@heaven2.god'); - await orm.em.persistAndFlush(god2); - await orm.em.commit(); - const res2 = await orm.em.findOne(Author2, { name: 'God2' }); - expect(res2).not.toBeNull(); - await orm.em.transactional(async em => { - const god3 = new Author2('God3', 'hello@heaven3.god'); - await em.persist(god3); + const god2 = new Author2('God2', 'hello@heaven2.god'); + await em.persist(god2); }); - const res3 = await orm.em.findOne(Author2, { name: 'God3' }); - expect(res3).not.toBeNull(); + + const res2 = await orm.em.findOne(Author2, { name: 'God2' }); + expect(res2).not.toBeNull(); const err = new Error('Test'); try { await orm.em.transactional(async em => { - const god4 = new Author2('God4', 'hello@heaven4.god'); - await em.persist(god4); + const god3 = new Author2('God4', 'hello@heaven4.god'); + await em.persist(god3); throw err; }); } catch (e) { expect(e).toBe(err); - const res4 = await orm.em.findOne(Author2, { name: 'God4' }); - expect(res4).toBeNull(); + const res3 = await orm.em.findOne(Author2, { name: 'God4' }); + expect(res3).toBeNull(); } }); - test('nested transactions', async () => { - const mock = jest.fn(); - const logger = new Logger(mock, true); - Object.assign(orm.em.getConnection(), { logger }); + test('nested transactions with save-points', async () => { + await orm.em.transactional(async em => { + const god1 = new Author2('God1', 'hello1@heaven.god'); + + try { + await em.transactional(async em2 => { + await em2.persist(god1); + throw new Error(); // rollback the transaction + }); + } catch { } + + const res1 = await em.findOne(Author2, { name: 'God1' }); + expect(res1).toBeNull(); - // start outer transaction - const transaction = orm.em.transactional(async em => { - // do stuff inside inner transaction await em.transactional(async em2 => { - await em2.persist(new Author2('God', 'hello@heaven.god')); + const god2 = new Author2('God2', 'hello2@heaven.god'); + em2.persist(god2); }); - }); - // try to commit the outer transaction - await expect(transaction).resolves.toBeUndefined(); - expect(mock.mock.calls.length).toBe(3); - expect(mock.mock.calls[0][0]).toMatch('[query-logger] START TRANSACTION'); - expect(mock.mock.calls[2][0]).toMatch('[query-logger] COMMIT'); + const res2 = await em.findOne(Author2, { name: 'God2' }); + expect(res2).not.toBeNull(); + }); }); - test('nested transaction rollback will rollback the outer one as well', async () => { + test('nested transaction rollback with save-points will commit the outer one', async () => { const mock = jest.fn(); const logger = new Logger(mock, true); Object.assign(orm.em.getConnection(), { logger }); @@ -215,16 +230,26 @@ describe('EntityManagerMySql', () => { // start outer transaction const transaction = orm.em.transactional(async em => { // do stuff inside inner transaction and rollback - await em.beginTransaction(); - await em.persist(new Author2('God', 'hello@heaven.god')); - await em.rollback(); + try { + await em.transactional(async em2 => { + await em2.persistAndFlush(new Author2('God', 'hello@heaven.god')); + throw new Error(); // rollback the transaction + }); + } catch { } + + await em.persist(new Author2('God Persisted!', 'hello-persisted@heaven.god')); }); // try to commit the outer transaction - await expect(transaction).rejects.toThrowError('Transaction commit failed because the transaction has been marked for rollback only'); - expect(mock.mock.calls.length).toBe(3); - expect(mock.mock.calls[0][0]).toMatch('[query-logger] START TRANSACTION'); - expect(mock.mock.calls[2][0]).toMatch('[query-logger] ROLLBACK'); + await expect(transaction).resolves.toBeUndefined(); + expect(mock.mock.calls.length).toBe(6); + expect(mock.mock.calls[0][0]).toMatch('begin'); + expect(mock.mock.calls[1][0]).toMatch('savepoint trx'); + expect(mock.mock.calls[2][0]).toMatch('insert into `author2` (`created_at`, `email`, `name`, `terms_accepted`, `updated_at`) values (?, ?, ?, ?, ?)'); + expect(mock.mock.calls[3][0]).toMatch('rollback to savepoint trx'); + expect(mock.mock.calls[4][0]).toMatch('insert into `author2` (`created_at`, `email`, `name`, `terms_accepted`, `updated_at`) values (?, ?, ?, ?, ?)'); + expect(mock.mock.calls[5][0]).toMatch('commit'); + await expect(orm.em.findOne(Author2, { name: 'God Persisted!' })).resolves.not.toBeNull(); }); test('should load entities', async () => { @@ -516,9 +541,9 @@ describe('EntityManagerMySql', () => { }); expect(mock.mock.calls.length).toBe(3); - expect(mock.mock.calls[0][0]).toMatch('START TRANSACTION'); - expect(mock.mock.calls[1][0]).toMatch('SELECT 1 FROM `author2` AS `e0` WHERE `e0`.`id` = ? FOR UPDATE'); - expect(mock.mock.calls[2][0]).toMatch('COMMIT'); + expect(mock.mock.calls[0][0]).toMatch('begin'); + expect(mock.mock.calls[1][0]).toMatch('select 1 from `author2` as `e0` where `e0`.`id` = ? for update'); + expect(mock.mock.calls[2][0]).toMatch('commit'); }); test('lock supports pessimistic locking [pessimistic read]', async () => { @@ -534,9 +559,9 @@ describe('EntityManagerMySql', () => { }); expect(mock.mock.calls.length).toBe(3); - expect(mock.mock.calls[0][0]).toMatch('START TRANSACTION'); - expect(mock.mock.calls[1][0]).toMatch('SELECT 1 FROM `author2` AS `e0` WHERE `e0`.`id` = ? LOCK IN SHARE MODE'); - expect(mock.mock.calls[2][0]).toMatch('COMMIT'); + expect(mock.mock.calls[0][0]).toMatch('begin'); + expect(mock.mock.calls[1][0]).toMatch('select 1 from `author2` as `e0` where `e0`.`id` = ? lock in share mode'); + expect(mock.mock.calls[2][0]).toMatch('commit'); }); test('custom query expressions via query builder', async () => { @@ -674,16 +699,16 @@ describe('EntityManagerMySql', () => { Object.assign(orm.em.getConnection(), { logger }); const b1 = (await orm.em.findOne(FooBaz2, { id: baz.id }, ['bar']))!; - expect(mock.mock.calls[0][0]).toMatch('SELECT `e0`.*, `e1`.`id` AS `bar_id` FROM `foo_baz2` AS `e0` LEFT JOIN `foo_bar2` AS `e1` ON `e0`.`id` = `e1`.`baz_id` WHERE `e0`.`id` = ? LIMIT ?'); - expect(mock.mock.calls[1][0]).toMatch('SELECT `e0`.* FROM `foo_bar2` AS `e0` WHERE `e0`.`id` IN (?) ORDER BY `e0`.`id` ASC'); + expect(mock.mock.calls[0][0]).toMatch('select `e0`.*, `e1`.`id` as `bar_id` from `foo_baz2` as `e0` left join `foo_bar2` as `e1` on `e0`.`id` = `e1`.`baz_id` where `e0`.`id` = ? limit ?'); + expect(mock.mock.calls[1][0]).toMatch('select `e0`.* from `foo_bar2` as `e0` where `e0`.`id` in (?) order by `e0`.`id` asc'); expect(b1.bar).toBeInstanceOf(FooBar2); expect(b1.bar.id).toBe(bar.id); expect(b1.toJSON()).toMatchObject({ bar: bar.toJSON() }); orm.em.clear(); const b2 = (await orm.em.findOne(FooBaz2, { bar: bar.id }, ['bar']))!; - expect(mock.mock.calls[2][0]).toMatch('SELECT `e0`.*, `e1`.`id` AS `bar_id` FROM `foo_baz2` AS `e0` LEFT JOIN `foo_bar2` AS `e1` ON `e0`.`id` = `e1`.`baz_id` WHERE `e1`.`id` = ? LIMIT ?'); - expect(mock.mock.calls[3][0]).toMatch('SELECT `e0`.* FROM `foo_bar2` AS `e0` WHERE `e0`.`id` IN (?) ORDER BY `e0`.`id` ASC'); + expect(mock.mock.calls[2][0]).toMatch('select `e0`.*, `e1`.`id` as `bar_id` from `foo_baz2` as `e0` left join `foo_bar2` as `e1` on `e0`.`id` = `e1`.`baz_id` where `e1`.`id` = ? limit ?'); + expect(mock.mock.calls[3][0]).toMatch('select `e0`.* from `foo_bar2` as `e0` where `e0`.`id` in (?) order by `e0`.`id` asc'); expect(b2.bar).toBeInstanceOf(FooBar2); expect(b2.bar.id).toBe(bar.id); expect(b2.toJSON()).toMatchObject({ bar: bar.toJSON() }); @@ -1082,14 +1107,14 @@ describe('EntityManagerMySql', () => { // check fired queries expect(mock.mock.calls.length).toBe(8); - expect(mock.mock.calls[0][0]).toMatch('START TRANSACTION'); - expect(mock.mock.calls[1][0]).toMatch('INSERT INTO `author2` (`name`, `email`, `created_at`, `updated_at`, `terms_accepted`) VALUES (?, ?, ?, ?, ?)'); - expect(mock.mock.calls[2][0]).toMatch('INSERT INTO `book2` (`title`, `uuid_pk`, `created_at`, `author_id`) VALUES (?, ?, ?, ?)'); - expect(mock.mock.calls[3][0]).toMatch('INSERT INTO `book2` (`title`, `uuid_pk`, `created_at`, `author_id`) VALUES (?, ?, ?, ?)'); - expect(mock.mock.calls[4][0]).toMatch('INSERT INTO `book2` (`title`, `uuid_pk`, `created_at`, `author_id`) VALUES (?, ?, ?, ?)'); - expect(mock.mock.calls[5][0]).toMatch('UPDATE `author2` SET `favourite_author_id` = ?, `updated_at` = ? WHERE `id` = ?'); - expect(mock.mock.calls[6][0]).toMatch('COMMIT'); - expect(mock.mock.calls[7][0]).toMatch('SELECT `e0`.* FROM `author2` AS `e0` WHERE `e0`.`id` = ?'); + expect(mock.mock.calls[0][0]).toMatch('begin'); + expect(mock.mock.calls[1][0]).toMatch('insert into `author2` (`created_at`, `email`, `name`, `terms_accepted`, `updated_at`) values (?, ?, ?, ?, ?)'); + expect(mock.mock.calls[2][0]).toMatch('insert into `book2` (`author_id`, `created_at`, `title`, `uuid_pk`) values (?, ?, ?, ?)'); + expect(mock.mock.calls[3][0]).toMatch('insert into `book2` (`author_id`, `created_at`, `title`, `uuid_pk`) values (?, ?, ?, ?)'); + expect(mock.mock.calls[4][0]).toMatch('insert into `book2` (`author_id`, `created_at`, `title`, `uuid_pk`) values (?, ?, ?, ?)'); + expect(mock.mock.calls[5][0]).toMatch('update `author2` set `favourite_author_id` = ?, `updated_at` = ? where `id` = ?'); + expect(mock.mock.calls[6][0]).toMatch('commit'); + expect(mock.mock.calls[7][0]).toMatch('select `e0`.* from `author2` as `e0` where `e0`.`id` = ?'); }); test('self referencing 1:1 (1 step)', async () => { diff --git a/tests/EntityManager.postgre.test.ts b/tests/EntityManager.postgre.test.ts index f90990991382..12d12aaa5c0c 100644 --- a/tests/EntityManager.postgre.test.ts +++ b/tests/EntityManager.postgre.test.ts @@ -47,10 +47,31 @@ describe('EntityManagerPostgre', () => { const driver = orm.em.getDriver(); expect(driver).toBeInstanceOf(PostgreSqlDriver); await expect(driver.findOne(Book2.name, { foo: 'bar' })).resolves.toBeNull(); - const tag = await driver.nativeInsert(BookTag2.name, { name: 'tag name '}); + const tag = await driver.nativeInsert(BookTag2.name, { name: 'tag name'}); await expect(driver.nativeInsert(Book2.name, { uuid: v4(), tags: [tag.insertId] })).resolves.not.toBeNull(); - const res = await driver.getConnection().execute('SELECT 1 as count'); - expect(res[0]).toEqual({ count: 1 }); + await expect(driver.getConnection().execute('select 1 as count')).resolves.toEqual([{ count: 1 }]); + await expect(driver.getConnection().execute('select 1 as count', [], 'get')).resolves.toEqual({ count: 1 }); + await expect(driver.getConnection().execute('select 1 as count', [], 'run')).resolves.toEqual({ + affectedRows: 1, + row: { + count: 1, + }, + }); + await expect(driver.getConnection().execute('insert into test2 (name) values (?) returning id', ['test'], 'run')).resolves.toEqual({ + affectedRows: 1, + insertId: 1, + row: { + id: 1, + }, + }); + await expect(driver.getConnection().execute('update test2 set name = ? where name = ?', ['test 2', 'test'], 'run')).resolves.toEqual({ + affectedRows: 1, + insertId: 0, + }); + await expect(driver.getConnection().execute('delete from test2 where name = ?', ['test 2'], 'run')).resolves.toEqual({ + affectedRows: 1, + insertId: 0, + }); expect(driver.getPlatform().denormalizePrimaryKey(1)).toBe(1); expect(driver.getPlatform().denormalizePrimaryKey('1')).toBe('1'); await expect(driver.find(BookTag2.name, { books: { $in: [1] } })).resolves.not.toBeNull(); @@ -58,9 +79,9 @@ describe('EntityManagerPostgre', () => { test('driver appends errored query', async () => { const driver = orm.em.getDriver(); - const err1 = `relation "not_existing" does not exist\n in query: INSERT INTO "not_existing" ("foo") VALUES ($1)\n with params: ["bar"]`; + const err1 = `insert into "not_existing" ("foo") values ($1) - relation "not_existing" does not exist`; await expect(driver.nativeInsert('not_existing', { foo: 'bar' })).rejects.toThrowError(err1); - const err2 = `relation "not_existing" does not exist\n in query: DELETE FROM "not_existing"`; + const err2 = `delete from "not_existing" - relation "not_existing" does not exist`; await expect(driver.nativeDelete('not_existing', {})).rejects.toThrowError(err2); }); @@ -90,55 +111,58 @@ describe('EntityManagerPostgre', () => { test('transactions', async () => { const god1 = new Author2('God1', 'hello@heaven1.god'); - await orm.em.beginTransaction(); - await orm.em.persistAndFlush(god1); - await orm.em.rollback(); + try { + await orm.em.transactional(async em => { + await em.persistAndFlush(god1); + throw new Error(); // rollback the transaction + }); + } catch { } + const res1 = await orm.em.findOne(Author2, { name: 'God1' }); expect(res1).toBeNull(); - await orm.em.beginTransaction(); - const god2 = new Author2('God2', 'hello@heaven2.god'); - await orm.em.persistAndFlush(god2); - await orm.em.commit(); - const res2 = await orm.em.findOne(Author2, { name: 'God2' }); - expect(res2).not.toBeNull(); - await orm.em.transactional(async em => { - const god3 = new Author2('God3', 'hello@heaven3.god'); - await em.persist(god3); + const god2 = new Author2('God2', 'hello@heaven2.god'); + await em.persist(god2); }); - const res3 = await orm.em.findOne(Author2, { name: 'God3' }); - expect(res3).not.toBeNull(); + + const res2 = await orm.em.findOne(Author2, { name: 'God2' }); + expect(res2).not.toBeNull(); const err = new Error('Test'); try { await orm.em.transactional(async em => { - const god4 = new Author2('God4', 'hello@heaven4.god'); - await em.persist(god4); + const god3 = new Author2('God4', 'hello@heaven4.god'); + await em.persist(god3); throw err; }); } catch (e) { expect(e).toBe(err); - const res4 = await orm.em.findOne(Author2, { name: 'God4' }); - expect(res4).toBeNull(); + const res3 = await orm.em.findOne(Author2, { name: 'God4' }); + expect(res3).toBeNull(); } }); test('nested transactions with save-points', async () => { await orm.em.transactional(async em => { - const driver = em.getDriver(); const god1 = new Author2('God1', 'hello1@heaven.god'); - await driver.beginTransaction(); - await em.persistAndFlush(god1); - await driver.rollback(); + + try { + await em.transactional(async em2 => { + await em2.persistAndFlush(god1); + throw new Error(); // rollback the transaction + }); + } catch { } + const res1 = await em.findOne(Author2, { name: 'God1' }); expect(res1).toBeNull(); - await driver.beginTransaction(); - const god2 = new Author2('God2', 'hello2@heaven.god'); - await em.persistAndFlush(god2); - await driver.commit(); + await em.transactional(async em2 => { + const god2 = new Author2('God2', 'hello2@heaven.god'); + await em2.persistAndFlush(god2); + }); + const res2 = await em.findOne(Author2, { name: 'God2' }); expect(res2).not.toBeNull(); }); @@ -152,9 +176,12 @@ describe('EntityManagerPostgre', () => { // start outer transaction const transaction = orm.em.transactional(async em => { // do stuff inside inner transaction and rollback - await em.beginTransaction(); - await em.persistAndFlush(new Author2('God', 'hello@heaven.god')); - await em.rollback(); + try { + await em.transactional(async em2 => { + await em2.persistAndFlush(new Author2('God', 'hello@heaven.god')); + throw new Error(); // rollback the transaction + }); + } catch { } await em.persist(new Author2('God Persisted!', 'hello-persisted@heaven.god')); }); @@ -162,12 +189,12 @@ describe('EntityManagerPostgre', () => { // try to commit the outer transaction await expect(transaction).resolves.toBeUndefined(); expect(mock.mock.calls.length).toBe(6); - expect(mock.mock.calls[0][0]).toMatch('START TRANSACTION'); - expect(mock.mock.calls[1][0]).toMatch('SAVEPOINT PostgreSqlDriver_2'); - expect(mock.mock.calls[2][0]).toMatch('INSERT INTO "author2" ("name", "email", "created_at", "updated_at", "terms_accepted") VALUES ($1, $2, $3, $4, $5) RETURNING "id"'); - expect(mock.mock.calls[3][0]).toMatch('ROLLBACK TO SAVEPOINT PostgreSqlDriver_2'); - expect(mock.mock.calls[4][0]).toMatch('INSERT INTO "author2" ("name", "email", "created_at", "updated_at", "terms_accepted") VALUES ($1, $2, $3, $4, $5) RETURNING "id"'); - expect(mock.mock.calls[5][0]).toMatch('[query-logger] COMMIT'); + expect(mock.mock.calls[0][0]).toMatch('begin'); + expect(mock.mock.calls[1][0]).toMatch('savepoint trx'); + expect(mock.mock.calls[2][0]).toMatch('insert into "author2" ("created_at", "email", "name", "terms_accepted", "updated_at") values ($1, $2, $3, $4, $5) returning "id"'); + expect(mock.mock.calls[3][0]).toMatch('rollback to savepoint trx'); + expect(mock.mock.calls[4][0]).toMatch('insert into "author2" ("created_at", "email", "name", "terms_accepted", "updated_at") values ($1, $2, $3, $4, $5) returning "id"'); + expect(mock.mock.calls[5][0]).toMatch('commit'); await expect(orm.em.findOne(Author2, { name: 'God Persisted!' })).resolves.not.toBeNull(); }); @@ -452,9 +479,9 @@ describe('EntityManagerPostgre', () => { }); expect(mock.mock.calls.length).toBe(3); - expect(mock.mock.calls[0][0]).toMatch('START TRANSACTION'); - expect(mock.mock.calls[1][0]).toMatch('SELECT 1 FROM "author2" AS "e0" WHERE "e0"."id" = $1 FOR UPDATE'); - expect(mock.mock.calls[2][0]).toMatch('COMMIT'); + expect(mock.mock.calls[0][0]).toMatch('begin'); + expect(mock.mock.calls[1][0]).toMatch('select 1 from "author2" as "e0" where "e0"."id" = $1 for update'); + expect(mock.mock.calls[2][0]).toMatch('commit'); }); test('findOne supports pessimistic locking [pessimistic read]', async () => { @@ -470,9 +497,9 @@ describe('EntityManagerPostgre', () => { }); expect(mock.mock.calls.length).toBe(3); - expect(mock.mock.calls[0][0]).toMatch('START TRANSACTION'); - expect(mock.mock.calls[1][0]).toMatch('SELECT 1 FROM "author2" AS "e0" WHERE "e0"."id" = $1 FOR SHARE'); - expect(mock.mock.calls[2][0]).toMatch('COMMIT'); + expect(mock.mock.calls[0][0]).toMatch('begin'); + expect(mock.mock.calls[1][0]).toMatch('select 1 from "author2" as "e0" where "e0"."id" = $1 for share'); + expect(mock.mock.calls[2][0]).toMatch('commit'); }); test('stable results of serialization', async () => { @@ -933,14 +960,14 @@ describe('EntityManagerPostgre', () => { // check fired queries expect(mock.mock.calls.length).toBe(8); - expect(mock.mock.calls[0][0]).toMatch('START TRANSACTION'); - expect(mock.mock.calls[1][0]).toMatch('INSERT INTO "author2" ("name", "email", "created_at", "updated_at", "terms_accepted") VALUES ($1, $2, $3, $4, $5)'); - expect(mock.mock.calls[2][0]).toMatch('INSERT INTO "book2" ("title", "uuid_pk", "created_at", "author_id") VALUES ($1, $2, $3, $4)'); - expect(mock.mock.calls[2][0]).toMatch('INSERT INTO "book2" ("title", "uuid_pk", "created_at", "author_id") VALUES ($1, $2, $3, $4)'); - expect(mock.mock.calls[2][0]).toMatch('INSERT INTO "book2" ("title", "uuid_pk", "created_at", "author_id") VALUES ($1, $2, $3, $4)'); - expect(mock.mock.calls[5][0]).toMatch('UPDATE "author2" SET "favourite_author_id" = $1, "updated_at" = $2 WHERE "id" = $3'); - expect(mock.mock.calls[6][0]).toMatch('COMMIT'); - expect(mock.mock.calls[7][0]).toMatch('SELECT "e0".* FROM "author2" AS "e0" WHERE "e0"."id" = $1'); + expect(mock.mock.calls[0][0]).toMatch('begin'); + expect(mock.mock.calls[1][0]).toMatch('insert into "author2" ("created_at", "email", "name", "terms_accepted", "updated_at") values ($1, $2, $3, $4, $5)'); + expect(mock.mock.calls[2][0]).toMatch('insert into "book2" ("author_id", "created_at", "title", "uuid_pk") values ($1, $2, $3, $4)'); + expect(mock.mock.calls[2][0]).toMatch('insert into "book2" ("author_id", "created_at", "title", "uuid_pk") values ($1, $2, $3, $4)'); + expect(mock.mock.calls[2][0]).toMatch('insert into "book2" ("author_id", "created_at", "title", "uuid_pk") values ($1, $2, $3, $4)'); + expect(mock.mock.calls[5][0]).toMatch('update "author2" set "favourite_author_id" = $1, "updated_at" = $2 where "id" = $3'); + expect(mock.mock.calls[6][0]).toMatch('commit'); + expect(mock.mock.calls[7][0]).toMatch('select "e0".* from "author2" as "e0" where "e0"."id" = $1'); }); test('EM supports smart search conditions', async () => { diff --git a/tests/EntityManager.sqlite.test.ts b/tests/EntityManager.sqlite.test.ts index 174092e83cca..c10f91468557 100644 --- a/tests/EntityManager.sqlite.test.ts +++ b/tests/EntityManager.sqlite.test.ts @@ -44,15 +44,28 @@ describe('EntityManagerSqlite', () => { expect(driver instanceof SqliteDriver).toBe(true); expect(await driver.findOne(Book3.name, { foo: 'bar' })).toBeNull(); expect(await driver.nativeInsert(BookTag3.name, { name: 'tag', books: [1] })).not.toBeNull(); - expect(await driver.getConnection().execute('SELECT 1 as count')).toEqual([{ count: 1 }]); + await expect(driver.getConnection().execute('select 1 as count')).resolves.toEqual([{ count: 1 }]); + await expect(driver.getConnection().execute('select 1 as count', [], 'get')).resolves.toEqual({ count: 1 }); + await expect(driver.getConnection().execute('insert into test3 (name) values (?)', ['test'], 'run')).resolves.toEqual({ + affectedRows: 1, + insertId: 1, + }); + await expect(driver.getConnection().execute('update test3 set name = ? where name = ?', ['test 2', 'test'], 'run')).resolves.toEqual({ + affectedRows: 1, + insertId: 1, + }); + await expect(driver.getConnection().execute('delete from test3 where name = ?', ['test 2'], 'run')).resolves.toEqual({ + affectedRows: 1, + insertId: 1, + }); expect(await driver.find(BookTag3.name, { books: [1] })).not.toBeNull(); }); test('driver appends errored query', async () => { const driver = orm.em.getDriver(); - const err1 = `SQLITE_ERROR: no such table: not_existing\n in query: INSERT INTO "not_existing" ("foo") VALUES (?)\n with params: ["bar"]`; + const err1 = "insert into `not_existing` (`foo`) values ('bar') - SQLITE_ERROR: no such table: not_existing"; await expect(driver.nativeInsert('not_existing', { foo: 'bar' })).rejects.toThrowError(err1); - const err2 = `SQLITE_ERROR: no such table: not_existing\n in query: DELETE FROM "not_existing"`; + const err2 = 'delete from `not_existing` - SQLITE_ERROR: no such table: not_existing'; await expect(driver.nativeDelete('not_existing', {})).rejects.toThrowError(err2); }); @@ -68,55 +81,59 @@ describe('EntityManagerSqlite', () => { test('transactions', async () => { const god1 = new Author3('God1', 'hello@heaven1.god'); - await orm.em.beginTransaction(); - await orm.em.persist(god1, true); - await orm.em.rollback(); + + try { + await orm.em.transactional(async em => { + await em.persistAndFlush(god1); + throw new Error(); // rollback the transaction + }); + } catch { } + const res1 = await orm.em.findOne(Author3, { name: 'God1' }); expect(res1).toBeNull(); - await orm.em.beginTransaction(); - const god2 = new Author3('God2', 'hello@heaven2.god'); - await orm.em.persist(god2, true); - await orm.em.commit(); - const res2 = await orm.em.findOne(Author3, { name: 'God2' }); - expect(res2).not.toBeNull(); - await orm.em.transactional(async em => { - const god3 = new Author3('God3', 'hello@heaven3.god'); - await em.persist(god3); + const god2 = new Author3('God2', 'hello@heaven2.god'); + await em.persist(god2); }); - const res3 = await orm.em.findOne(Author3, { name: 'God3' }); - expect(res3).not.toBeNull(); + + const res2 = await orm.em.findOne(Author3, { name: 'God2' }); + expect(res2).not.toBeNull(); const err = new Error('Test'); try { await orm.em.transactional(async em => { - const god4 = new Author3('God4', 'hello@heaven4.god'); - await em.persist(god4); + const god3 = new Author3('God4', 'hello@heaven4.god'); + await em.persist(god3); throw err; }); } catch (e) { expect(e).toBe(err); - const res4 = await orm.em.findOne(Author3, { name: 'God4' }); - expect(res4).toBeNull(); + const res3 = await orm.em.findOne(Author3, { name: 'God4' }); + expect(res3).toBeNull(); } }); test('nested transactions with save-points', async () => { await orm.em.transactional(async em => { - const driver = em.getDriver(); - const god1 = new Author3('God1', 'hello@heaven1.god'); - await driver.beginTransaction(); - await em.persistAndFlush(god1); - await driver.rollback(); + const god1 = new Author3('God1', 'hello1@heaven.god'); + + try { + await em.transactional(async em2 => { + await em2.persistAndFlush(god1); + throw new Error(); // rollback the transaction + }); + } catch { } + const res1 = await em.findOne(Author3, { name: 'God1' }); expect(res1).toBeNull(); - await driver.beginTransaction(); - const god2 = new Author3('God2', 'hello@heaven2.god'); - await em.persistAndFlush(god2); - await driver.commit(); + await em.transactional(async em2 => { + const god2 = new Author3('God2', 'hello2@heaven.god'); + await em2.persistAndFlush(god2); + }); + const res2 = await em.findOne(Author3, { name: 'God2' }); expect(res2).not.toBeNull(); }); @@ -130,19 +147,26 @@ describe('EntityManagerSqlite', () => { // start outer transaction const transaction = orm.em.transactional(async em => { // do stuff inside inner transaction and rollback - await em.beginTransaction(); - await em.persist(new Author3('God', 'hello@heaven.god')); - await em.rollback(); - - await em.persist(new Author3('God Persisted!', 'hello-persisted@heaven.god')); + try { + await em.transactional(async em2 => { + await em2.persistAndFlush(new Author3('God', 'hello@heaven.god')); + throw new Error(); // rollback the transaction + }); + } catch { } + + await em.persistAndFlush(new Author3('God Persisted!', 'hello-persisted@heaven.god')); }); // try to commit the outer transaction await expect(transaction).resolves.toBeUndefined(); expect(mock.mock.calls.length).toBe(6); - expect(mock.mock.calls[0][0]).toMatch('[query-logger] BEGIN'); - expect(mock.mock.calls[5][0]).toMatch('[query-logger] COMMIT'); - expect(await orm.em.findOne(Author3, { name: 'God Persisted!' })).not.toBeNull(); + expect(mock.mock.calls[0][0]).toMatch('begin'); + expect(mock.mock.calls[1][0]).toMatch('savepoint trx'); + expect(mock.mock.calls[2][0]).toMatch('insert into `author3` (`created_at`, `email`, `name`, `terms_accepted`, `updated_at`) values (?, ?, ?, ?, ?)'); + expect(mock.mock.calls[3][0]).toMatch('rollback to savepoint trx'); + expect(mock.mock.calls[4][0]).toMatch('insert into `author3` (`created_at`, `email`, `name`, `terms_accepted`, `updated_at`) values (?, ?, ?, ?, ?)'); + expect(mock.mock.calls[5][0]).toMatch('commit'); + await expect(orm.em.findOne(Author3, { name: 'God Persisted!' })).resolves.not.toBeNull(); }); test('should load entities', async () => { @@ -410,9 +434,9 @@ describe('EntityManagerSqlite', () => { }); expect(mock.mock.calls.length).toBe(3); - expect(mock.mock.calls[0][0]).toMatch('BEGIN'); - expect(mock.mock.calls[1][0]).toMatch('SELECT 1 FROM "author3" AS "e0" WHERE "e0"."id" = ?'); - expect(mock.mock.calls[2][0]).toMatch('COMMIT'); + expect(mock.mock.calls[0][0]).toMatch('begin'); + expect(mock.mock.calls[1][0]).toMatch('select 1 from `author3` as `e0` where `e0`.`id` = ?'); + expect(mock.mock.calls[2][0]).toMatch('commit'); }); test('findOne does not support pessimistic locking [pessimistic read]', async () => { @@ -428,9 +452,9 @@ describe('EntityManagerSqlite', () => { }); expect(mock.mock.calls.length).toBe(3); - expect(mock.mock.calls[0][0]).toMatch('BEGIN'); - expect(mock.mock.calls[1][0]).toMatch('SELECT 1 FROM "author3" AS "e0" WHERE "e0"."id" = ?'); - expect(mock.mock.calls[2][0]).toMatch('COMMIT'); + expect(mock.mock.calls[0][0]).toMatch('begin'); + expect(mock.mock.calls[1][0]).toMatch('select 1 from `author3` as `e0` where `e0`.`id` = ?'); + expect(mock.mock.calls[2][0]).toMatch('commit'); }); test('stable results of serialization', async () => { diff --git a/tests/QueryBuilder.test.ts b/tests/QueryBuilder.test.ts index a00bb3d7f495..45e6e320c04a 100644 --- a/tests/QueryBuilder.test.ts +++ b/tests/QueryBuilder.test.ts @@ -14,35 +14,35 @@ describe('QueryBuilder', () => { test('select query', async () => { const qb = orm.em.createQueryBuilder(Publisher2); qb.select('*').where({ name: 'test 123', type: PublisherType.GLOBAL }).orderBy({ name: QueryOrder.DESC, type: QueryOrder.ASC }).limit(2, 1); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `publisher2` AS `e0` WHERE `e0`.`name` = ? AND `e0`.`type` = ? ORDER BY `e0`.`name` DESC, `e0`.`type` ASC LIMIT ? OFFSET ?'); + expect(qb.getQuery()).toEqual('select `e0`.* from `publisher2` as `e0` where `e0`.`name` = ? and `e0`.`type` = ? order by `e0`.`name` desc, `e0`.`type` asc limit ? offset ?'); expect(qb.getParams()).toEqual(['test 123', PublisherType.GLOBAL, 2, 1]); }); test('select where is null', async () => { const qb = orm.em.createQueryBuilder(Publisher2); qb.select('*').where({ type: null }).limit(2, 1); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `publisher2` AS `e0` WHERE `e0`.`type` IS NULL LIMIT ? OFFSET ?'); + expect(qb.getQuery()).toEqual('select `e0`.* from `publisher2` as `e0` where `e0`.`type` is null limit ? offset ?'); expect(qb.getParams()).toEqual([2, 1]); }); test('select query with order by variants', async () => { const qb = orm.em.createQueryBuilder(Publisher2); qb.select('*').where({ name: 'test 123' }).orderBy({ a: QueryOrder.DESC, b: 'ASC', c: 'desc', d: -1 }).limit(2, 1); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `publisher2` AS `e0` WHERE `e0`.`name` = ? ORDER BY `e0`.`a` DESC, `e0`.`b` ASC, `e0`.`c` desc, `e0`.`d` DESC LIMIT ? OFFSET ?'); + expect(qb.getQuery()).toEqual('select `e0`.* from `publisher2` as `e0` where `e0`.`name` = ? order by `e0`.`a` desc, `e0`.`b` asc, `e0`.`c` desc, `e0`.`d` desc limit ? offset ?'); expect(qb.getParams()).toEqual(['test 123', 2, 1]); }); test('select constant expression', async () => { const qb = orm.em.createQueryBuilder(Publisher2); qb.select('1').where({ id: 123 }); - expect(qb.getQuery()).toEqual('SELECT 1 FROM `publisher2` AS `e0` WHERE `e0`.`id` = ?'); + expect(qb.getQuery()).toEqual('select 1 from `publisher2` as `e0` where `e0`.`id` = ?'); expect(qb.getParams()).toEqual([123]); }); test('select in query', async () => { const qb = orm.em.createQueryBuilder(Publisher2); qb.select(['id', 'name', 'type']).where({ name: { $in: ['test 123', 'lol 321'] }, type: PublisherType.GLOBAL }).limit(2, 1); - expect(qb.getQuery()).toEqual('SELECT `e0`.`id`, `e0`.`name`, `e0`.`type` FROM `publisher2` AS `e0` WHERE `e0`.`name` IN (?, ?) AND `e0`.`type` = ? LIMIT ? OFFSET ?'); + expect(qb.getQuery()).toEqual('select `e0`.`id`, `e0`.`name`, `e0`.`type` from `publisher2` as `e0` where `e0`.`name` in (?, ?) and `e0`.`type` = ? limit ? offset ?'); expect(qb.getParams()).toEqual(['test 123', 'lol 321', PublisherType.GLOBAL, 2, 1]); }); @@ -53,7 +53,7 @@ describe('QueryBuilder', () => { .andWhere({ type: PublisherType.GLOBAL }) .orWhere({ name: 'lol 321' }) .limit(2, 1); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `publisher2` AS `e0` WHERE ((`e0`.`name` = ? AND `e0`.`type` = ?) OR `e0`.`name` = ?) LIMIT ? OFFSET ?'); + expect(qb.getQuery()).toEqual('select `e0`.* from `publisher2` as `e0` where ((`e0`.`name` = ? and `e0`.`type` = ?) or `e0`.`name` = ?) limit ? offset ?'); expect(qb.getParams()).toEqual(['test 123', PublisherType.GLOBAL, 'lol 321', 2, 1]); }); @@ -63,7 +63,7 @@ describe('QueryBuilder', () => { .andWhere({ type: PublisherType.GLOBAL }) .orWhere({ name: 'lol 321' }) .limit(2, 1); - expect(qb1.getQuery()).toEqual('SELECT `e0`.* FROM `publisher2` AS `e0` WHERE (`e0`.`type` = ? OR `e0`.`name` = ?) LIMIT ? OFFSET ?'); + expect(qb1.getQuery()).toEqual('select `e0`.* from `publisher2` as `e0` where (`e0`.`type` = ? or `e0`.`name` = ?) limit ? offset ?'); expect(qb1.getParams()).toEqual([PublisherType.GLOBAL, 'lol 321', 2, 1]); const qb2 = orm.em.createQueryBuilder(Publisher2) @@ -71,7 +71,7 @@ describe('QueryBuilder', () => { .orWhere({ name: 'lol 321' }) .andWhere({ type: PublisherType.GLOBAL }) .limit(2, 1); - expect(qb2.getQuery()).toEqual('SELECT `e0`.* FROM `publisher2` AS `e0` WHERE (`e0`.`name` = ? AND `e0`.`type` = ?) LIMIT ? OFFSET ?'); + expect(qb2.getQuery()).toEqual('select `e0`.* from `publisher2` as `e0` where (`e0`.`name` = ? and `e0`.`type` = ?) limit ? offset ?'); expect(qb2.getParams()).toEqual(['lol 321', PublisherType.GLOBAL, 2, 1]); }); @@ -83,7 +83,7 @@ describe('QueryBuilder', () => { .andWhere({ name: 'test 321' }) .andWhere({ name: 'lol 321' }) .limit(2, 1); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `publisher2` AS `e0` WHERE (`e0`.`name` = ? AND `e0`.`type` = ? AND `e0`.`name` = ? AND `e0`.`name` = ?) LIMIT ? OFFSET ?'); + expect(qb.getQuery()).toEqual('select `e0`.* from `publisher2` as `e0` where (`e0`.`name` = ? and `e0`.`type` = ? and `e0`.`name` = ? and `e0`.`name` = ?) limit ? offset ?'); expect(qb.getParams()).toEqual(['test 123', PublisherType.GLOBAL, 'test 321', 'lol 321', 2, 1]); }); @@ -95,7 +95,7 @@ describe('QueryBuilder', () => { .orWhere({ name: 'test 321' }) .orWhere({ name: 'lol 321' }) .limit(2, 1); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `publisher2` AS `e0` WHERE (`e0`.`name` = ? OR `e0`.`type` = ? OR `e0`.`name` = ? OR `e0`.`name` = ?) LIMIT ? OFFSET ?'); + expect(qb.getQuery()).toEqual('select `e0`.* from `publisher2` as `e0` where (`e0`.`name` = ? or `e0`.`type` = ? or `e0`.`name` = ? or `e0`.`name` = ?) limit ? offset ?'); expect(qb.getParams()).toEqual(['test 123', PublisherType.GLOBAL, 'test 321', 'lol 321', 2, 1]); }); @@ -105,10 +105,10 @@ describe('QueryBuilder', () => { .leftJoin('fb.baz', 'fz') .where({ 'fz.name': 'test 123' }) .limit(2, 1); - const sql = 'SELECT `fb`.*, `fz`.* FROM `foo_bar2` AS `fb` ' + - 'LEFT JOIN `foo_baz2` AS `fz` ON `fb`.`baz_id` = `fz`.`id` ' + - 'WHERE `fz`.`name` = ? ' + - 'LIMIT ? OFFSET ?'; + const sql = 'select `fb`.*, `fz`.* from `foo_bar2` as `fb` ' + + 'left join `foo_baz2` as `fz` on `fb`.`baz_id` = `fz`.`id` ' + + 'where `fz`.`name` = ? ' + + 'limit ? offset ?'; expect(qb.getQuery()).toEqual(sql); expect(qb.getParams()).toEqual(['test 123', 2, 1]); }); @@ -119,10 +119,10 @@ describe('QueryBuilder', () => { .leftJoin('fz.bar', 'fb') .where({ 'fb.name': 'test 123' }) .limit(2, 1); - const sql = 'SELECT `fb`.*, `fz`.* FROM `foo_baz2` AS `fz` ' + - 'LEFT JOIN `foo_bar2` AS `fb` ON `fz`.`id` = `fb`.`baz_id` ' + - 'WHERE `fb`.`name` = ? ' + - 'LIMIT ? OFFSET ?'; + const sql = 'select `fb`.*, `fz`.* from `foo_baz2` as `fz` ' + + 'left join `foo_bar2` as `fb` on `fz`.`id` = `fb`.`baz_id` ' + + 'where `fb`.`name` = ? ' + + 'limit ? offset ?'; expect(qb.getQuery()).toEqual(sql); expect(qb.getParams()).toEqual(['test 123', 2, 1]); }); @@ -133,10 +133,10 @@ describe('QueryBuilder', () => { .leftJoin('b.author', 'a') .where({ 'a.name': 'test 123' }) .limit(2, 1); - const sql = 'SELECT `a`.*, `b`.* FROM `book2` AS `b` ' + - 'LEFT JOIN `author2` AS `a` ON `b`.`author_id` = `a`.`id` ' + - 'WHERE `a`.`name` = ? ' + - 'LIMIT ? OFFSET ?'; + const sql = 'select `a`.*, `b`.* from `book2` as `b` ' + + 'left join `author2` as `a` on `b`.`author_id` = `a`.`id` ' + + 'where `a`.`name` = ? ' + + 'limit ? offset ?'; expect(qb.getQuery()).toEqual(sql); expect(qb.getParams()).toEqual(['test 123', 2, 1]); }); @@ -147,10 +147,10 @@ describe('QueryBuilder', () => { .leftJoin('a.books', 'b') .where({ 'b.title': 'test 123' }) .limit(2, 1); - const sql = 'SELECT `a`.*, `b`.* FROM `author2` AS `a` ' + - 'LEFT JOIN `book2` AS `b` ON `a`.`id` = `b`.`author_id` ' + - 'WHERE `b`.`title` = ? ' + - 'LIMIT ? OFFSET ?'; + const sql = 'select `a`.*, `b`.* from `author2` as `a` ' + + 'left join `book2` as `b` on `a`.`id` = `b`.`author_id` ' + + 'where `b`.`title` = ? ' + + 'limit ? offset ?'; expect(qb.getQuery()).toEqual(sql); expect(qb.getParams()).toEqual(['test 123', 2, 1]); }); @@ -161,11 +161,11 @@ describe('QueryBuilder', () => { .leftJoin('b.tags', 't') .where({ 't.name': 'test 123' }) .limit(2, 1); - const sql = 'SELECT `b`.*, `t`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id` FROM `book2` AS `b` ' + - 'LEFT JOIN `book2_to_book_tag2` AS `e1` ON `b`.`uuid_pk` = `e1`.`book2_uuid_pk` ' + - 'LEFT JOIN `book_tag2` AS `t` ON `e1`.`book_tag2_id` = `t`.`id` ' + - 'WHERE `t`.`name` = ? ' + - 'LIMIT ? OFFSET ?'; + const sql = 'select `b`.*, `t`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id` from `book2` as `b` ' + + 'left join `book2_to_book_tag2` as `e1` on `b`.`uuid_pk` = `e1`.`book2_uuid_pk` ' + + 'left join `book_tag2` as `t` on `e1`.`book_tag2_id` = `t`.`id` ' + + 'where `t`.`name` = ? ' + + 'limit ? offset ?'; expect(qb.getQuery()).toEqual(sql); expect(qb.getParams()).toEqual(['test 123', 2, 1]); }); @@ -176,11 +176,11 @@ describe('QueryBuilder', () => { .leftJoin('t.books', 'b') .where({ 'b.title': 'test 123' }) .limit(2, 1); - const sql = 'SELECT `b`.*, `t`.*, `e1`.`book_tag2_id`, `e1`.`book2_uuid_pk` FROM `book_tag2` AS `t` ' + - 'LEFT JOIN `book2_to_book_tag2` AS `e1` ON `t`.`id` = `e1`.`book_tag2_id` ' + - 'LEFT JOIN `book2` AS `b` ON `e1`.`book2_uuid_pk` = `b`.`uuid_pk` ' + - 'WHERE `b`.`title` = ? ' + - 'LIMIT ? OFFSET ?'; + const sql = 'select `b`.*, `t`.*, `e1`.`book_tag2_id`, `e1`.`book2_uuid_pk` from `book_tag2` as `t` ' + + 'left join `book2_to_book_tag2` as `e1` on `t`.`id` = `e1`.`book_tag2_id` ' + + 'left join `book2` as `b` on `e1`.`book2_uuid_pk` = `b`.`uuid_pk` ' + + 'where `b`.`title` = ? ' + + 'limit ? offset ?'; expect(qb.getQuery()).toEqual(sql); expect(qb.getParams()).toEqual(['test 123', 2, 1]); }); @@ -193,13 +193,13 @@ describe('QueryBuilder', () => { .join('b.tags', 't') .where({ 'p.name': 'test 123', 'b.title': /3$/ }) .limit(2, 1); - const sql = 'SELECT `p`.*, `b`.*, `a`.*, `t`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id` FROM `publisher2` AS `p` ' + - 'LEFT JOIN `book2` AS `b` ON `p`.`id` = `b`.`publisher_id` ' + - 'JOIN `author2` AS `a` ON `b`.`author_id` = `a`.`id` ' + - 'JOIN `book2_to_book_tag2` AS `e1` ON `b`.`uuid_pk` = `e1`.`book2_uuid_pk` ' + - 'JOIN `book_tag2` AS `t` ON `e1`.`book_tag2_id` = `t`.`id` ' + - 'WHERE `p`.`name` = ? AND `b`.`title` LIKE ? ' + - 'LIMIT ? OFFSET ?'; + const sql = 'select `p`.*, `b`.*, `a`.*, `t`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id` from `publisher2` as `p` ' + + 'left join `book2` as `b` on `p`.`id` = `b`.`publisher_id` ' + + 'inner join `author2` as `a` on `b`.`author_id` = `a`.`id` ' + + 'inner join `book2_to_book_tag2` as `e1` on `b`.`uuid_pk` = `e1`.`book2_uuid_pk` ' + + 'inner join `book_tag2` as `t` on `e1`.`book_tag2_id` = `t`.`id` ' + + 'where `p`.`name` = ? and `b`.`title` like ? ' + + 'limit ? offset ?'; expect(qb.getQuery()).toEqual(sql); expect(qb.getParams()).toEqual(['test 123', '%3', 2, 1]); }); @@ -207,121 +207,121 @@ describe('QueryBuilder', () => { test('select with boolean', async () => { const qb = orm.em.createQueryBuilder(Author2); qb.select('*').where({ termsAccepted: false }); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `author2` AS `e0` WHERE `e0`.`terms_accepted` = ?'); + expect(qb.getQuery()).toEqual('select `e0`.* from `author2` as `e0` where `e0`.`terms_accepted` = ?'); expect(qb.getParams()).toEqual([false]); }); test('select with custom expression', async () => { const qb1 = orm.em.createQueryBuilder(Book2); - qb1.select('*').where({ 'JSON_CONTAINS(`e0`.`meta`, ?)': [{ foo: 'bar' }, true] }); - expect(qb1.getQuery()).toEqual('SELECT `e0`.* FROM `book2` AS `e0` WHERE JSON_CONTAINS(`e0`.`meta`, ?) = ?'); - expect(qb1.getParams()).toEqual(['{"foo":"bar"}', true]); + qb1.select('*').where({ 'JSON_CONTAINS(`e0`.`meta`, ?)': [{ foo: 'bar' }] }); + expect(qb1.getQuery()).toEqual('select `e0`.* from `book2` as `e0` where JSON_CONTAINS(`e0`.`meta`, ?)'); + expect(qb1.getParams()).toEqual(['{"foo":"bar"}']); const qb2 = orm.em.createQueryBuilder(Book2); - qb2.select('*').where({ 'JSON_CONTAINS(`e0`.`meta`, ?)': [{ foo: 'baz' }, false] }); - expect(qb2.getQuery()).toEqual('SELECT `e0`.* FROM `book2` AS `e0` WHERE JSON_CONTAINS(`e0`.`meta`, ?) = ?'); + qb2.select('*').where({ 'JSON_CONTAINS(`e0`.`meta`, ?) = ?': [{ foo: 'baz' }, false] }); + expect(qb2.getQuery()).toEqual('select `e0`.* from `book2` as `e0` where JSON_CONTAINS(`e0`.`meta`, ?) = ?'); expect(qb2.getParams()).toEqual(['{"foo":"baz"}', false]); }); test('select by regexp', async () => { let qb = orm.em.createQueryBuilder(Publisher2); qb.select('*').where({ name: /test/ }); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `publisher2` AS `e0` WHERE `e0`.`name` LIKE ?'); + expect(qb.getQuery()).toEqual('select `e0`.* from `publisher2` as `e0` where `e0`.`name` like ?'); expect(qb.getParams()).toEqual(['%test%']); qb = orm.em.createQueryBuilder(Publisher2); qb.select('*').where({ name: /^test/ }); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `publisher2` AS `e0` WHERE `e0`.`name` LIKE ?'); + expect(qb.getQuery()).toEqual('select `e0`.* from `publisher2` as `e0` where `e0`.`name` like ?'); expect(qb.getParams()).toEqual(['test%']); qb = orm.em.createQueryBuilder(Publisher2); qb.select('*').where({ name: /t.st$/ }); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `publisher2` AS `e0` WHERE `e0`.`name` LIKE ?'); + expect(qb.getQuery()).toEqual('select `e0`.* from `publisher2` as `e0` where `e0`.`name` like ?'); expect(qb.getParams()).toEqual(['%t_st']); qb = orm.em.createQueryBuilder(Publisher2); qb.select('*').where({ name: /^c.o.*l-te.*st\.c.m$/ }); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `publisher2` AS `e0` WHERE `e0`.`name` LIKE ?'); + expect(qb.getQuery()).toEqual('select `e0`.* from `publisher2` as `e0` where `e0`.`name` like ?'); expect(qb.getParams()).toEqual(['c_o%l-te%st.c_m']); }); test('select by m:1', async () => { const qb = orm.em.createQueryBuilder(Author2); qb.select('*').where({ favouriteBook: 123 }); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `author2` AS `e0` WHERE `e0`.`favourite_book_uuid_pk` = ?'); + expect(qb.getQuery()).toEqual('select `e0`.* from `author2` as `e0` where `e0`.`favourite_book_uuid_pk` = ?'); expect(qb.getParams()).toEqual([123]); }); test('select by 1:m', async () => { const qb = orm.em.createQueryBuilder(Author2); qb.select('*').where({ books: { $in: [123, 321] } }); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `author2` AS `e0` LEFT JOIN `book2` AS `e1` ON `e0`.`id` = `e1`.`author_id` WHERE `e1`.`id` IN (?, ?)'); + expect(qb.getQuery()).toEqual('select `e0`.* from `author2` as `e0` left join `book2` as `e1` on `e0`.`id` = `e1`.`author_id` where `e1`.`id` in (?, ?)'); expect(qb.getParams()).toEqual([123, 321]); }); test('select by 1:1', async () => { const qb = orm.em.createQueryBuilder(FooBar2); qb.select('*').where({ baz: 123 }); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `foo_bar2` AS `e0` WHERE `e0`.`baz_id` = ?'); + expect(qb.getQuery()).toEqual('select `e0`.* from `foo_bar2` as `e0` where `e0`.`baz_id` = ?'); expect(qb.getParams()).toEqual([123]); }); test('select by 1:1 inversed', async () => { const qb = orm.em.createQueryBuilder(FooBaz2); qb.select('*').where({ bar: 123 }); - expect(qb.getQuery()).toEqual('SELECT `e0`.*, `e1`.`id` AS `bar_id` FROM `foo_baz2` AS `e0` LEFT JOIN `foo_bar2` AS `e1` ON `e0`.`id` = `e1`.`baz_id` WHERE `e1`.`id` = ?'); + expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`id` as `bar_id` from `foo_baz2` as `e0` left join `foo_bar2` as `e1` on `e0`.`id` = `e1`.`baz_id` where `e1`.`id` = ?'); expect(qb.getParams()).toEqual([123]); }); test('select by 1:1 inversed with populate', async () => { const qb = orm.em.createQueryBuilder(FooBaz2); qb.select('*').where({ id: 123 }).populate(['bar']); - expect(qb.getQuery()).toEqual('SELECT `e0`.*, `e1`.`id` AS `bar_id` FROM `foo_baz2` AS `e0` LEFT JOIN `foo_bar2` AS `e1` ON `e0`.`id` = `e1`.`baz_id` WHERE `e0`.`id` = ?'); + expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`id` as `bar_id` from `foo_baz2` as `e0` left join `foo_bar2` as `e1` on `e0`.`id` = `e1`.`baz_id` where `e0`.`id` = ?'); expect(qb.getParams()).toEqual([123]); }); test('select by 1:1 inversed (uuid pk)', async () => { const qb = orm.em.createQueryBuilder(Book2); qb.select('*').where({ test: 123 }); - expect(qb.getQuery()).toEqual('SELECT `e0`.*, `e1`.`id` AS `test_id` FROM `book2` AS `e0` LEFT JOIN `test2` AS `e1` ON `e0`.`uuid_pk` = `e1`.`book_uuid_pk` WHERE `e1`.`id` = ?'); + expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`id` as `test_id` from `book2` as `e0` left join `test2` as `e1` on `e0`.`uuid_pk` = `e1`.`book_uuid_pk` where `e1`.`id` = ?'); expect(qb.getParams()).toEqual([123]); }); test('select by 1:1 inversed with populate (uuid pk)', async () => { const qb = orm.em.createQueryBuilder(Book2); qb.select('*').where({ test: 123 }).populate(['test']); - expect(qb.getQuery()).toEqual('SELECT `e0`.*, `e1`.`id` AS `test_id` FROM `book2` AS `e0` LEFT JOIN `test2` AS `e1` ON `e0`.`uuid_pk` = `e1`.`book_uuid_pk` WHERE `e1`.`id` = ?'); + expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`id` as `test_id` from `book2` as `e0` left join `test2` as `e1` on `e0`.`uuid_pk` = `e1`.`book_uuid_pk` where `e1`.`id` = ?'); expect(qb.getParams()).toEqual([123]); }); test('select by 1:1 inversed with populate() before where() (uuid pk)', async () => { const qb = orm.em.createQueryBuilder(Book2); qb.select('*').populate(['test']).where({ test: 123 }); - expect(qb.getQuery()).toEqual('SELECT `e0`.*, `e1`.`id` AS `test_id` FROM `book2` AS `e0` LEFT JOIN `test2` AS `e1` ON `e0`.`uuid_pk` = `e1`.`book_uuid_pk` WHERE `e1`.`id` = ?'); + expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`id` as `test_id` from `book2` as `e0` left join `test2` as `e1` on `e0`.`uuid_pk` = `e1`.`book_uuid_pk` where `e1`.`id` = ?'); expect(qb.getParams()).toEqual([123]); }); test('select by m:n', async () => { const qb = orm.em.createQueryBuilder(Book2); qb.select('*').where({ tags: 123 }); - expect(qb.getQuery()).toEqual('SELECT `e0`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id` FROM `book2` AS `e0` LEFT JOIN `book2_to_book_tag2` AS `e1` ON `e0`.`uuid_pk` = `e1`.`book2_uuid_pk` WHERE `e1`.`book_tag2_id` = ?'); + expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id` from `book2` as `e0` left join `book2_to_book_tag2` as `e1` on `e0`.`uuid_pk` = `e1`.`book2_uuid_pk` where `e1`.`book_tag2_id` = ?'); expect(qb.getParams()).toEqual([123]); }); test('select by m:n inversed', async () => { const qb = orm.em.createQueryBuilder(BookTag2); qb.select('*').where({ books: 123 }); - expect(qb.getQuery()).toEqual('SELECT `e0`.*, `e1`.`book_tag2_id`, `e1`.`book2_uuid_pk` FROM `book_tag2` AS `e0` LEFT JOIN `book2_to_book_tag2` AS `e1` ON `e0`.`id` = `e1`.`book_tag2_id` WHERE `e1`.`book2_uuid_pk` = ?'); + expect(qb.getQuery()).toEqual('select `e0`.*, `e1`.`book_tag2_id`, `e1`.`book2_uuid_pk` from `book_tag2` as `e0` left join `book2_to_book_tag2` as `e1` on `e0`.`id` = `e1`.`book_tag2_id` where `e1`.`book2_uuid_pk` = ?'); expect(qb.getParams()).toEqual([123]); }); test('select by m:n with populate', async () => { const qb = orm.em.createQueryBuilder(Test2); qb.select('*').populate(['publisher2_to_test2']).where({ publisher2_id: { $in: [ 1, 2 ] } }).orderBy({ 'publisher2_to_test2.id': QueryOrder.ASC }); - let sql = 'SELECT `e0`.*, `e1`.`test2_id`, `e1`.`publisher2_id` FROM `test2` AS `e0`'; - sql += ' LEFT JOIN `publisher2_to_test2` AS `e1` ON `e0`.`id` = `e1`.`test2_id`'; - sql += ' WHERE `e1`.`publisher2_id` IN (?, ?)'; - sql += ' ORDER BY `e1`.`id` ASC'; + let sql = 'select `e0`.*, `e1`.`test2_id`, `e1`.`publisher2_id` from `test2` as `e0`'; + sql += ' left join `publisher2_to_test2` as `e1` on `e0`.`id` = `e1`.`test2_id`'; + sql += ' where `e1`.`publisher2_id` in (?, ?)'; + sql += ' order by `e1`.`id` asc'; expect(qb.getQuery()).toEqual(sql); expect(qb.getParams()).toEqual([1, 2]); }); @@ -329,21 +329,21 @@ describe('QueryBuilder', () => { test('select by m:n with unknown populate ignored', async () => { const qb = orm.em.createQueryBuilder(Test2); qb.select('*').populate(['not_existing']); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `test2` AS `e0`'); + expect(qb.getQuery()).toEqual('select `e0`.* from `test2` as `e0`'); expect(qb.getParams()).toEqual([]); }); test('select with operator (simple)', async () => { const qb = orm.em.createQueryBuilder(Test2); qb.select('*').where({ id: { $nin: [3, 4] } }); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `test2` AS `e0` WHERE `e0`.`id` NOT IN (?, ?)'); + expect(qb.getQuery()).toEqual('select `e0`.* from `test2` as `e0` where `e0`.`id` not in (?, ?)'); expect(qb.getParams()).toEqual([3, 4]); }); test('select with operator (wrapped)', async () => { const qb1 = orm.em.createQueryBuilder(Test2); qb1.select('*').where({ $and: [{ id: { $nin: [3, 4] } }, { id: { $gt: 2 } }] }); - expect(qb1.getQuery()).toEqual('SELECT `e0`.* FROM `test2` AS `e0` WHERE (`e0`.`id` NOT IN (?, ?) AND `e0`.`id` > ?)'); + expect(qb1.getQuery()).toEqual('select `e0`.* from `test2` as `e0` where (`e0`.`id` not in (?, ?) and `e0`.`id` > ?)'); expect(qb1.getParams()).toEqual([3, 4, 2]); const qb2 = orm.em.createQueryBuilder(Test2); @@ -352,30 +352,31 @@ describe('QueryBuilder', () => { expect(qb2.getParams()).toEqual(qb1.getParams()); }); - test('select with operator (NOT)', async () => { + test('select with operator (not)', async () => { const qb = orm.em.createQueryBuilder(Test2); qb.select('*').where({ $not: { id: { $in: [3, 4] } } }); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `test2` AS `e0` WHERE NOT (`e0`.`id` IN (?, ?))'); + expect(qb.getQuery()).toEqual('select `e0`.* from `test2` as `e0` where not (`e0`.`id` in (?, ?))'); expect(qb.getParams()).toEqual([3, 4]); }); test('select with unsupported operator', async () => { const qb = orm.em.createQueryBuilder(Test2); qb.select('*').where({ $test: { foo: 'bar'} }); - expect(qb.getParams()).toEqual([{ foo: 'bar'}]); + expect(qb.getQuery()).toEqual('select `e0`.* from `test2` as `e0`'); + expect(qb.getParams()).toEqual([]); }); test('select distinct id with left join', async () => { const qb = orm.em.createQueryBuilder(BookTag2, 't'); - qb.select(['DISTINCT b.uuid_pk', 'b.*', 't.*']) + qb.select(['distinct b.uuid_pk', 'b.*', 't.*']) .leftJoin('t.books', 'b') .where({ 'b.title': 'test 123' }) .limit(2, 1); - const sql = 'SELECT DISTINCT b.uuid_pk, `b`.*, `t`.*, `e1`.`book_tag2_id`, `e1`.`book2_uuid_pk` FROM `book_tag2` AS `t` ' + - 'LEFT JOIN `book2_to_book_tag2` AS `e1` ON `t`.`id` = `e1`.`book_tag2_id` ' + - 'LEFT JOIN `book2` AS `b` ON `e1`.`book2_uuid_pk` = `b`.`uuid_pk` ' + - 'WHERE `b`.`title` = ? ' + - 'LIMIT ? OFFSET ?'; + const sql = 'select distinct b.uuid_pk, `b`.*, `t`.*, `e1`.`book_tag2_id`, `e1`.`book2_uuid_pk` from `book_tag2` as `t` ' + + 'left join `book2_to_book_tag2` as `e1` on `t`.`id` = `e1`.`book_tag2_id` ' + + 'left join `book2` as `b` on `e1`.`book2_uuid_pk` = `b`.`uuid_pk` ' + + 'where `b`.`title` = ? ' + + 'limit ? offset ?'; expect(qb.getQuery()).toEqual(sql); expect(qb.getParams()).toEqual(['test 123', 2, 1]); }); @@ -386,11 +387,11 @@ describe('QueryBuilder', () => { .leftJoin('t.books', 'b') .where({ 'b.title': 'test 123' }) .limit(2, 1); - const sql = 'SELECT DISTINCT `b`.`uuid_pk`, `b`.*, `t`.*, `e1`.`book_tag2_id`, `e1`.`book2_uuid_pk` FROM `book_tag2` AS `t` ' + - 'LEFT JOIN `book2_to_book_tag2` AS `e1` ON `t`.`id` = `e1`.`book_tag2_id` ' + - 'LEFT JOIN `book2` AS `b` ON `e1`.`book2_uuid_pk` = `b`.`uuid_pk` ' + - 'WHERE `b`.`title` = ? ' + - 'LIMIT ? OFFSET ?'; + const sql = 'select distinct `b`.`uuid_pk`, `b`.*, `t`.*, `e1`.`book_tag2_id`, `e1`.`book2_uuid_pk` from `book_tag2` as `t` ' + + 'left join `book2_to_book_tag2` as `e1` on `t`.`id` = `e1`.`book_tag2_id` ' + + 'left join `book2` as `b` on `e1`.`book2_uuid_pk` = `b`.`uuid_pk` ' + + 'where `b`.`title` = ? ' + + 'limit ? offset ?'; expect(qb.getQuery()).toEqual(sql); expect(qb.getParams()).toEqual(['test 123', 2, 1]); }); @@ -399,58 +400,58 @@ describe('QueryBuilder', () => { const qb = orm.em.createQueryBuilder(BookTag2, 't'); qb.select(['b.*', 't.*']) .leftJoin('t.books', 'b') - .where('b.title = ? OR b.title = ?', ['test 123', 'lol 321']) + .where('b.title = ? or b.title = ?', ['test 123', 'lol 321']) .andWhere('1 = 1') .orWhere('1 = 2') .limit(2, 1); - const sql = 'SELECT `b`.*, `t`.*, `e1`.`book_tag2_id`, `e1`.`book2_uuid_pk` FROM `book_tag2` AS `t` ' + - 'LEFT JOIN `book2_to_book_tag2` AS `e1` ON `t`.`id` = `e1`.`book_tag2_id` ' + - 'LEFT JOIN `book2` AS `b` ON `e1`.`book2_uuid_pk` = `b`.`uuid_pk` ' + - 'WHERE (((b.title = ? OR b.title = ?) AND (1 = 1)) OR (1 = 2)) ' + - 'LIMIT ? OFFSET ?'; + const sql = 'select `b`.*, `t`.*, `e1`.`book_tag2_id`, `e1`.`book2_uuid_pk` from `book_tag2` as `t` ' + + 'left join `book2_to_book_tag2` as `e1` on `t`.`id` = `e1`.`book_tag2_id` ' + + 'left join `book2` as `b` on `e1`.`book2_uuid_pk` = `b`.`uuid_pk` ' + + 'where (((b.title = ? or b.title = ?) and (1 = 1)) or (1 = 2)) ' + + 'limit ? offset ?'; expect(qb.getQuery()).toEqual(sql); expect(qb.getParams()).toEqual(['test 123', 'lol 321', 2, 1]); }); test('select with group by and having', async () => { const qb = orm.em.createQueryBuilder(BookTag2, 't'); - qb.select(['b.*', 't.*', 'COUNT(t.id) as tags']) + qb.select(['b.*', 't.*', 'count(t.id) as tags']) .leftJoin('t.books', 'b') - .where('b.title = ? OR b.title = ?', ['test 123', 'lol 321']) + .where('b.title = ? or b.title = ?', ['test 123', 'lol 321']) .groupBy('b.uuid') .having('tags > ?', [0]) .limit(2, 1); - const sql = 'SELECT `b`.*, `t`.*, COUNT(t.id) as tags, `e1`.`book_tag2_id`, `e1`.`book2_uuid_pk` FROM `book_tag2` AS `t` ' + - 'LEFT JOIN `book2_to_book_tag2` AS `e1` ON `t`.`id` = `e1`.`book_tag2_id` ' + - 'LEFT JOIN `book2` AS `b` ON `e1`.`book2_uuid_pk` = `b`.`uuid_pk` ' + - 'WHERE (b.title = ? OR b.title = ?) ' + - 'GROUP BY `b`.`uuid_pk` ' + - 'HAVING (tags > ?) ' + - 'LIMIT ? OFFSET ?'; + const sql = 'select `b`.*, `t`.*, count(t.id) as tags, `e1`.`book_tag2_id`, `e1`.`book2_uuid_pk` from `book_tag2` as `t` ' + + 'left join `book2_to_book_tag2` as `e1` on `t`.`id` = `e1`.`book_tag2_id` ' + + 'left join `book2` as `b` on `e1`.`book2_uuid_pk` = `b`.`uuid_pk` ' + + 'where (b.title = ? or b.title = ?) ' + + 'group by `b`.`uuid_pk` ' + + 'having (tags > ?) ' + + 'limit ? offset ?'; expect(qb.getQuery()).toEqual(sql); expect(qb.getParams()).toEqual(['test 123', 'lol 321', 0, 2, 1]); }); test('select with group by and having with object', async () => { const qb = orm.em.createQueryBuilder(BookTag2, 't'); - qb.select(['b.*', 't.*', 'COUNT(t.id) as tags']) + qb.select(['b.*', 't.*', 'count(t.id) as tags']) .leftJoin('t.books', 'b') - .where('b.title = ? OR b.title = ?', ['test 123', 'lol 321']) + .where('b.title = ? or b.title = ?', ['test 123', 'lol 321']) .groupBy('b.uuid') - .having({ 'b.uuid': '...', 'COUNT(t.id)': { $gt: 0 } }) + .having({ $or: [{ 'b.uuid': '...', 'count(t.id)': { $gt: 0 } }, { 'b.title': 'my title' }] }) .limit(2, 1); - const sql = 'SELECT `b`.*, `t`.*, COUNT(t.id) as tags, `e1`.`book_tag2_id`, `e1`.`book2_uuid_pk` FROM `book_tag2` AS `t` ' + - 'LEFT JOIN `book2_to_book_tag2` AS `e1` ON `t`.`id` = `e1`.`book_tag2_id` ' + - 'LEFT JOIN `book2` AS `b` ON `e1`.`book2_uuid_pk` = `b`.`uuid_pk` ' + - 'WHERE (b.title = ? OR b.title = ?) ' + - 'GROUP BY `b`.`uuid_pk` ' + - 'HAVING `b`.`uuid_pk` = ? AND COUNT(t.id) > ? ' + - 'LIMIT ? OFFSET ?'; + const sql = 'select `b`.*, `t`.*, count(t.id) as tags, `e1`.`book_tag2_id`, `e1`.`book2_uuid_pk` from `book_tag2` as `t` ' + + 'left join `book2_to_book_tag2` as `e1` on `t`.`id` = `e1`.`book_tag2_id` ' + + 'left join `book2` as `b` on `e1`.`book2_uuid_pk` = `b`.`uuid_pk` ' + + 'where (b.title = ? or b.title = ?) ' + + 'group by `b`.`uuid_pk` ' + + 'having ((`b`.`uuid_pk` = ? and count(t.id) > ?) or `b`.`title` = ?) ' + + 'limit ? offset ?'; expect(qb.getQuery()).toEqual(sql); - expect(qb.getParams()).toEqual(['test 123', 'lol 321', '...', 0, 2, 1]); + expect(qb.getParams()).toEqual(['test 123', 'lol 321', '...', 0, 'my title', 2, 1]); }); - test('select with operator (AND)', async () => { + test('select with operator (and)', async () => { const qb = orm.em.createQueryBuilder(Test2); qb.select('*').where({ $and: [ { id: { $in: [1, 2, 7] }, }, @@ -462,19 +463,19 @@ describe('QueryBuilder', () => { { id: { $ne: 9 }, }, { $not: { id: { $eq: 10 } } }, ] }); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `test2` AS `e0` ' + - 'WHERE (`e0`.`id` IN (?, ?, ?) ' + - 'AND `e0`.`id` NOT IN (?, ?) ' + - 'AND `e0`.`id` > ? ' + - 'AND `e0`.`id` < ? ' + - 'AND `e0`.`id` >= ? ' + - 'AND `e0`.`id` <= ? ' + - 'AND `e0`.`id` != ? ' + - 'AND NOT (`e0`.`id` = ?))'); + expect(qb.getQuery()).toEqual('select `e0`.* from `test2` as `e0` ' + + 'where (`e0`.`id` in (?, ?, ?) ' + + 'and `e0`.`id` not in (?, ?) ' + + 'and `e0`.`id` > ? ' + + 'and `e0`.`id` < ? ' + + 'and `e0`.`id` >= ? ' + + 'and `e0`.`id` <= ? ' + + 'and `e0`.`id` != ? ' + + 'and not (`e0`.`id` = ?))'); expect(qb.getParams()).toEqual([1, 2, 7, 3, 4, 5, 10, 7, 8, 9, 10]); }); - test('select with operator (OR)', async () => { + test('select with operator (or)', async () => { const qb = orm.em.createQueryBuilder(Test2); qb.select('*').where({ $or: [ { id: { $in: [1, 2, 7] }, }, @@ -486,15 +487,15 @@ describe('QueryBuilder', () => { { id: { $ne: 9 }, }, { $not: { id: { $eq: 10 } } }, ] }); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `test2` AS `e0` ' + - 'WHERE (`e0`.`id` IN (?, ?, ?) ' + - 'OR `e0`.`id` NOT IN (?, ?) ' + - 'OR `e0`.`id` > ? ' + - 'OR `e0`.`id` < ? ' + - 'OR `e0`.`id` >= ? ' + - 'OR `e0`.`id` <= ? ' + - 'OR `e0`.`id` != ? ' + - 'OR NOT (`e0`.`id` = ?))'); + expect(qb.getQuery()).toEqual('select `e0`.* from `test2` as `e0` ' + + 'where (`e0`.`id` in (?, ?, ?) ' + + 'or `e0`.`id` not in (?, ?) ' + + 'or `e0`.`id` > ? ' + + 'or `e0`.`id` < ? ' + + 'or `e0`.`id` >= ? ' + + 'or `e0`.`id` <= ? ' + + 'or `e0`.`id` != ? ' + + 'or not (`e0`.`id` = ?))'); expect(qb.getParams()).toEqual([1, 2, 7, 3, 4, 5, 10, 7, 8, 9, 10]); }); @@ -509,56 +510,56 @@ describe('QueryBuilder', () => { 'key6:in': [6, 7], 'key7:nin': [8, 9], }); - expect(qb.getQuery()).toEqual('SELECT `e0`.* FROM `test2` AS `e0` ' + - 'WHERE `e0`.`key1` > ? ' + - 'AND `e0`.`key2` < ? ' + - 'AND `e0`.`key3` >= ? ' + - 'AND `e0`.`key4` <= ? ' + - 'AND `e0`.`key5` != ? ' + - 'AND `e0`.`key6` IN (?, ?) ' + - 'AND `e0`.`key7` NOT IN (?, ?)'); + expect(qb.getQuery()).toEqual('select `e0`.* from `test2` as `e0` ' + + 'where `e0`.`key1` > ? ' + + 'and `e0`.`key2` < ? ' + + 'and `e0`.`key3` >= ? ' + + 'and `e0`.`key4` <= ? ' + + 'and `e0`.`key5` != ? ' + + 'and `e0`.`key6` in (?, ?) ' + + 'and `e0`.`key7` not in (?, ?)'); expect(qb.getParams()).toEqual([1, 2, 3, 4, 5, 6, 7, 8, 9]); }); test('select count query', async () => { const qb = orm.em.createQueryBuilder(Publisher2); qb.count().where({ name: 'test 123', type: PublisherType.GLOBAL }); - expect(qb.getQuery()).toEqual('SELECT COUNT(`e0`.`id`) AS `count` FROM `publisher2` AS `e0` WHERE `e0`.`name` = ? AND `e0`.`type` = ?'); + expect(qb.getQuery()).toEqual('select count(`e0`.`id`) as `count` from `publisher2` as `e0` where `e0`.`name` = ? and `e0`.`type` = ?'); expect(qb.getParams()).toEqual(['test 123', PublisherType.GLOBAL]); }); test('select count distinct query', async () => { const qb = orm.em.createQueryBuilder(Publisher2); qb.count('id', true).where({ name: 'test 123', type: PublisherType.GLOBAL }); - expect(qb.getQuery()).toEqual('SELECT COUNT(DISTINCT `e0`.`id`) AS `count` FROM `publisher2` AS `e0` WHERE `e0`.`name` = ? AND `e0`.`type` = ?'); + expect(qb.getQuery()).toEqual('select count(distinct `e0`.`id`) as `count` from `publisher2` as `e0` where `e0`.`name` = ? and `e0`.`type` = ?'); expect(qb.getParams()).toEqual(['test 123', PublisherType.GLOBAL]); }); test('select count with non-standard PK field name (uuid_pk)', async () => { const qb = orm.em.createQueryBuilder(Book2); qb.count().where({ title: 'test 123' }); - expect(qb.getQuery()).toEqual('SELECT COUNT(`e0`.`uuid_pk`) AS `count` FROM `book2` AS `e0` WHERE `e0`.`title` = ?'); + expect(qb.getQuery()).toEqual('select count(`e0`.`uuid_pk`) as `count` from `book2` as `e0` where `e0`.`title` = ?'); expect(qb.getParams()).toEqual(['test 123']); }); test('select with locking', async () => { const qb1 = orm.em.createQueryBuilder(Test2); qb1.select('*').where({ title: 'test 123' }).setLockMode(LockMode.OPTIMISTIC); - expect(qb1.getQuery()).toEqual('SELECT `e0`.* FROM `test2` AS `e0` WHERE `e0`.`title` = ?'); + expect(qb1.getQuery()).toEqual('select `e0`.* from `test2` as `e0` where `e0`.`title` = ?'); - await orm.em.beginTransaction(); - const qb2 = orm.em.createQueryBuilder(Book2); - qb2.select('*').where({ title: 'test 123' }).setLockMode(LockMode.NONE); - expect(qb2.getQuery()).toEqual('SELECT `e0`.* FROM `book2` AS `e0` WHERE `e0`.`title` = ?'); + await orm.em.transactional(async em => { + const qb2 = em.createQueryBuilder(Book2); + qb2.select('*').where({ title: 'test 123' }).setLockMode(LockMode.NONE); + expect(qb2.getQuery()).toEqual('select `e0`.* from `book2` as `e0` where `e0`.`title` = ?'); - const qb3 = orm.em.createQueryBuilder(Book2); - qb3.select('*').where({ title: 'test 123' }).setLockMode(LockMode.PESSIMISTIC_READ); - expect(qb3.getQuery()).toEqual('SELECT `e0`.* FROM `book2` AS `e0` WHERE `e0`.`title` = ? LOCK IN SHARE MODE'); + const qb3 = em.createQueryBuilder(Book2); + qb3.select('*').where({ title: 'test 123' }).setLockMode(LockMode.PESSIMISTIC_READ); + expect(qb3.getQuery()).toEqual('select `e0`.* from `book2` as `e0` where `e0`.`title` = ? lock in share mode'); - const qb4 = orm.em.createQueryBuilder(Book2); - qb4.select('*').where({ title: 'test 123' }).setLockMode(LockMode.PESSIMISTIC_WRITE); - expect(qb4.getQuery()).toEqual('SELECT `e0`.* FROM `book2` AS `e0` WHERE `e0`.`title` = ? FOR UPDATE'); - await orm.em.commit(); + const qb4 = em.createQueryBuilder(Book2); + qb4.select('*').where({ title: 'test 123' }).setLockMode(LockMode.PESSIMISTIC_WRITE); + expect(qb4.getQuery()).toEqual('select `e0`.* from `book2` as `e0` where `e0`.`title` = ? for update'); + }); }); test('pessimistic locking requires active transaction', async () => { @@ -573,24 +574,24 @@ describe('QueryBuilder', () => { test('insert query', async () => { const qb1 = orm.em.createQueryBuilder(Publisher2); qb1.insert({ name: 'test 123', type: PublisherType.GLOBAL }); - expect(qb1.getQuery()).toEqual('INSERT INTO `publisher2` (`name`, `type`) VALUES (?, ?)'); + expect(qb1.getQuery()).toEqual('insert into `publisher2` (`name`, `type`) values (?, ?)'); expect(qb1.getParams()).toEqual(['test 123', PublisherType.GLOBAL]); const qb2 = orm.em.createQueryBuilder(Author2); qb2.insert({ name: 'test 123', favouriteBook: 2359, termsAccepted: true }); - expect(qb2.getQuery()).toEqual('INSERT INTO `author2` (`name`, `favourite_book_uuid_pk`, `terms_accepted`) VALUES (?, ?, ?)'); - expect(qb2.getParams()).toEqual(['test 123', 2359, true]); + expect(qb2.getQuery()).toEqual('insert into `author2` (`favourite_book_uuid_pk`, `name`, `terms_accepted`) values (?, ?, ?)'); + expect(qb2.getParams()).toEqual([2359, 'test 123', true]); const qb3 = orm.em.createQueryBuilder(BookTag2); qb3.insert({ books: 123 }); - expect(qb3.getQuery()).toEqual('INSERT INTO `book_tag2` (`books`) VALUES (?)'); + expect(qb3.getQuery()).toEqual('insert into `book_tag2` (`books`) values (?)'); expect(qb3.getParams()).toEqual([123]); }); test('update query', async () => { const qb = orm.em.createQueryBuilder(Publisher2); qb.update({ name: 'test 123', type: PublisherType.GLOBAL }).where({ id: 123, type: PublisherType.LOCAL }); - expect(qb.getQuery()).toEqual('UPDATE `publisher2` SET `name` = ?, `type` = ? WHERE `id` = ? AND `type` = ?'); + expect(qb.getQuery()).toEqual('update `publisher2` set `name` = ?, `type` = ? where `id` = ? and `type` = ?'); expect(qb.getParams()).toEqual(['test 123', PublisherType.GLOBAL, 123, PublisherType.LOCAL]); }); @@ -599,14 +600,14 @@ describe('QueryBuilder', () => { const test = Test2.create('test'); test.id = 321; qb.update({ name: 'test 123', test }).where({ id: 123, type: PublisherType.LOCAL }); - expect(qb.getQuery()).toEqual('UPDATE `publisher2` SET `name` = ?, `test` = ? WHERE `id` = ? AND `type` = ?'); + expect(qb.getQuery()).toEqual('update `publisher2` set `name` = ?, `test` = ? where `id` = ? and `type` = ?'); expect(qb.getParams()).toEqual(['test 123', 321, 123, PublisherType.LOCAL]); }); test('delete query', async () => { const qb = orm.em.createQueryBuilder(Publisher2); qb.delete({ name: 'test 123', type: PublisherType.GLOBAL }); - expect(qb.getQuery()).toEqual('DELETE FROM `publisher2` WHERE `name` = ? AND `type` = ?'); + expect(qb.getQuery()).toEqual('delete from `publisher2` where `name` = ? and `type` = ?'); expect(qb.getParams()).toEqual(['test 123', PublisherType.GLOBAL]); }); @@ -636,27 +637,26 @@ describe('QueryBuilder', () => { expect(clone['_offset']).toBe(qb['_offset']); clone.orWhere({ 'p.name': 'or this name' }).orderBy({ 'p.name': QueryOrder.ASC }).limit(10, 5); - clone.limit(10, 5); - - const sql = 'SELECT `p`.*, `b`.*, `a`.*, `t`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id` FROM `publisher2` AS `p` ' + - 'LEFT JOIN `book2` AS `b` ON `p`.`id` = `b`.`publisher_id` ' + - 'JOIN `author2` AS `a` ON `b`.`author_id` = `a`.`id` ' + - 'JOIN `book2_to_book_tag2` AS `e1` ON `b`.`uuid_pk` = `e1`.`book2_uuid_pk` ' + - 'JOIN `book_tag2` AS `t` ON `e1`.`book_tag2_id` = `t`.`id` ' + - 'WHERE `p`.`name` = ? AND `b`.`title` LIKE ? ' + - 'ORDER BY `b`.`title` DESC ' + - 'LIMIT ? OFFSET ?'; + + const sql = 'select `p`.*, `b`.*, `a`.*, `t`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id` from `publisher2` as `p` ' + + 'left join `book2` as `b` on `p`.`id` = `b`.`publisher_id` ' + + 'inner join `author2` as `a` on `b`.`author_id` = `a`.`id` ' + + 'inner join `book2_to_book_tag2` as `e1` on `b`.`uuid_pk` = `e1`.`book2_uuid_pk` ' + + 'inner join `book_tag2` as `t` on `e1`.`book_tag2_id` = `t`.`id` ' + + 'where `p`.`name` = ? and `b`.`title` like ? ' + + 'order by `b`.`title` desc ' + + 'limit ? offset ?'; expect(qb.getQuery()).toEqual(sql); expect(qb.getParams()).toEqual(['test 123', '%3', 2, 1]); - const sql2 = 'SELECT `p`.*, `b`.*, `a`.*, `t`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id` FROM `publisher2` AS `p` ' + - 'LEFT JOIN `book2` AS `b` ON `p`.`id` = `b`.`publisher_id` ' + - 'JOIN `author2` AS `a` ON `b`.`author_id` = `a`.`id` ' + - 'JOIN `book2_to_book_tag2` AS `e1` ON `b`.`uuid_pk` = `e1`.`book2_uuid_pk` ' + - 'JOIN `book_tag2` AS `t` ON `e1`.`book_tag2_id` = `t`.`id` ' + - 'WHERE ((`p`.`name` = ? AND `b`.`title` LIKE ?) OR `p`.`name` = ?) ' + - 'ORDER BY `p`.`name` ASC ' + - 'LIMIT ? OFFSET ?'; + const sql2 = 'select `p`.*, `b`.*, `a`.*, `t`.*, `e1`.`book2_uuid_pk`, `e1`.`book_tag2_id` from `publisher2` as `p` ' + + 'left join `book2` as `b` on `p`.`id` = `b`.`publisher_id` ' + + 'inner join `author2` as `a` on `b`.`author_id` = `a`.`id` ' + + 'inner join `book2_to_book_tag2` as `e1` on `b`.`uuid_pk` = `e1`.`book2_uuid_pk` ' + + 'inner join `book_tag2` as `t` on `e1`.`book_tag2_id` = `t`.`id` ' + + 'where ((`p`.`name` = ? and `b`.`title` like ?) or `p`.`name` = ?) ' + + 'order by `p`.`name` asc ' + + 'limit ? offset ?'; expect(clone.getQuery()).toEqual(sql2); expect(clone.getParams()).toEqual(['test 123', '%3', 'or this name', 10, 5]); }); diff --git a/tests/SchemaHelper.test.ts b/tests/SchemaHelper.test.ts index 4dfd322b21a9..c1d84653a43f 100644 --- a/tests/SchemaHelper.test.ts +++ b/tests/SchemaHelper.test.ts @@ -1,12 +1,6 @@ import { SchemaHelper } from '../lib/schema'; -class SchemaHelperTest extends SchemaHelper { - - supportsSequences(): boolean { - return true; - } - -} +class SchemaHelperTest extends SchemaHelper { } /** * @class SchemaHelperTest @@ -17,17 +11,7 @@ describe('SchemaHelper', () => { const helper = new SchemaHelperTest(); expect(helper.getSchemaBeginning()).toBe(''); expect(helper.getSchemaEnd()).toBe(''); - expect(helper.supportsSequences()).toBe(true); expect(helper.getTypeDefinition({ type: 'test' } as any)).toBe('test'); - - const meta = { - collection: 'test', - primaryKey: 'pk', - properties: { - pk: { name: 'pk', type: 'number' }, - }, - }; - expect(helper.dropTable(meta as any)).toBe('DROP TABLE IF EXISTS "test";\nDROP SEQUENCE IF EXISTS "test_seq";\n'); }); }); diff --git a/tests/__snapshots__/SchemaGenerator.test.ts.snap b/tests/__snapshots__/SchemaGenerator.test.ts.snap index 1e33985b0c1d..a3a96987601a 100644 --- a/tests/__snapshots__/SchemaGenerator.test.ts.snap +++ b/tests/__snapshots__/SchemaGenerator.test.ts.snap @@ -1,395 +1,176 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP exports[`SchemaGenerator generate schema from metadata [mysql]: mysql-schema-dump 1`] = ` -"SET NAMES utf8; -SET FOREIGN_KEY_CHECKS=0; - - -DROP TABLE IF EXISTS \`author2\`; +"set names utf8; +set foreign_key_checks = 0; -CREATE TABLE \`author2\` ( - \`id\` int(11) unsigned NOT NULL AUTO_INCREMENT, - \`created_at\` datetime(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3), - \`updated_at\` datetime(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3), - \`name\` varchar(255) NOT NULL, - \`email\` varchar(255) UNIQUE NOT NULL, - \`age\` int(11) DEFAULT NULL, - \`terms_accepted\` tinyint(1) NOT NULL DEFAULT 0, - \`identities\` json DEFAULT NULL, - \`born\` datetime DEFAULT NULL, - \`favourite_book_uuid_pk\` varchar(36) DEFAULT NULL, - \`favourite_author_id\` int(11) unsigned DEFAULT NULL, - PRIMARY KEY (\`id\`), - KEY \`favourite_book_uuid_pk\` (\`favourite_book_uuid_pk\`), - KEY \`favourite_author_id\` (\`favourite_author_id\`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +drop table if exists \`author2\`; +drop table if exists \`book2\`; +drop table if exists \`book_tag2\`; +drop table if exists \`publisher2\`; +drop table if exists \`test2\`; +drop table if exists \`foo_bar2\`; +drop table if exists \`foo_baz2\`; +drop table if exists \`book2_to_book_tag2\`; +drop table if exists \`publisher2_to_test2\`; +create table \`author2\` (\`id\` int unsigned not null auto_increment primary key, \`created_at\` datetime(3) not null default current_timestamp(3), \`updated_at\` datetime(3) not null default current_timestamp(3), \`name\` varchar(255) not null, \`email\` varchar(255) not null, \`age\` int(11) null, \`terms_accepted\` tinyint(1) not null default 0, \`identities\` json null, \`born\` datetime null, \`favourite_book_uuid_pk\` varchar(36) null, \`favourite_author_id\` int(11) unsigned null) default character set utf8 engine = InnoDB; +alter table \`author2\` add unique \`author2_email_unique\`(\`email\`); +alter table \`author2\` add index \`author2_favourite_book_uuid_pk_index\`(\`favourite_book_uuid_pk\`); +alter table \`author2\` add index \`author2_favourite_author_id_index\`(\`favourite_author_id\`); -DROP TABLE IF EXISTS \`book2\`; +create table \`book2\` (\`uuid_pk\` varchar(36) not null, \`created_at\` datetime(3) not null default current_timestamp(3), \`title\` varchar(255) null, \`perex\` text null, \`price\` float null, \`double\` double null, \`meta\` json null, \`author_id\` int(11) unsigned null, \`publisher_id\` int(11) unsigned null) default character set utf8 engine = InnoDB; +alter table \`book2\` add primary key \`book2_pkey\`(\`uuid_pk\`); +alter table \`book2\` add index \`book2_author_id_index\`(\`author_id\`); +alter table \`book2\` add index \`book2_publisher_id_index\`(\`publisher_id\`); -CREATE TABLE \`book2\` ( - \`uuid_pk\` varchar(36) NOT NULL, - \`created_at\` datetime(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3), - \`title\` varchar(255) DEFAULT NULL, - \`perex\` text DEFAULT NULL, - \`price\` float DEFAULT NULL, - \`double\` double DEFAULT NULL, - \`meta\` json DEFAULT NULL, - \`author_id\` int(11) unsigned DEFAULT NULL, - \`publisher_id\` int(11) unsigned DEFAULT NULL, - PRIMARY KEY (\`uuid_pk\`), - KEY \`author_id\` (\`author_id\`), - KEY \`publisher_id\` (\`publisher_id\`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +create table \`book_tag2\` (\`id\` int unsigned not null auto_increment primary key, \`name\` varchar(50) not null) default character set utf8 engine = InnoDB; +create table \`publisher2\` (\`id\` int unsigned not null auto_increment primary key, \`name\` varchar(255) not null, \`type\` varchar(10) not null) default character set utf8 engine = InnoDB; -DROP TABLE IF EXISTS \`book_tag2\`; +create table \`test2\` (\`id\` int unsigned not null auto_increment primary key, \`name\` varchar(255) null, \`book_uuid_pk\` varchar(36) null, \`version\` int(11) not null default 1) default character set utf8 engine = InnoDB; +alter table \`test2\` add unique \`test2_book_uuid_pk_unique\`(\`book_uuid_pk\`); +alter table \`test2\` add index \`test2_book_uuid_pk_index\`(\`book_uuid_pk\`); -CREATE TABLE \`book_tag2\` ( - \`id\` int(11) unsigned NOT NULL AUTO_INCREMENT, - \`name\` varchar(50) NOT NULL, - PRIMARY KEY (\`id\`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +create table \`foo_bar2\` (\`id\` int unsigned not null auto_increment primary key, \`name\` varchar(255) not null, \`baz_id\` int(11) unsigned null, \`foo_bar_id\` int(11) unsigned null, \`version\` datetime(3) not null default current_timestamp(3)) default character set utf8 engine = InnoDB; +alter table \`foo_bar2\` add unique \`foo_bar2_baz_id_unique\`(\`baz_id\`); +alter table \`foo_bar2\` add index \`foo_bar2_baz_id_index\`(\`baz_id\`); +alter table \`foo_bar2\` add unique \`foo_bar2_foo_bar_id_unique\`(\`foo_bar_id\`); +alter table \`foo_bar2\` add index \`foo_bar2_foo_bar_id_index\`(\`foo_bar_id\`); +create table \`foo_baz2\` (\`id\` int unsigned not null auto_increment primary key, \`name\` varchar(255) not null) default character set utf8 engine = InnoDB; -DROP TABLE IF EXISTS \`publisher2\`; +create table \`book2_to_book_tag2\` (\`id\` int unsigned not null auto_increment primary key, \`book2_uuid_pk\` varchar(36) not null, \`book_tag2_id\` int(11) unsigned not null) default character set utf8 engine = InnoDB; +alter table \`book2_to_book_tag2\` add index \`book2_to_book_tag2_book2_uuid_pk_index\`(\`book2_uuid_pk\`); +alter table \`book2_to_book_tag2\` add index \`book2_to_book_tag2_book_tag2_id_index\`(\`book_tag2_id\`); -CREATE TABLE \`publisher2\` ( - \`id\` int(11) unsigned NOT NULL AUTO_INCREMENT, - \`name\` varchar(255) NOT NULL, - \`type\` varchar(10) NOT NULL, - PRIMARY KEY (\`id\`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +create table \`publisher2_to_test2\` (\`id\` int unsigned not null auto_increment primary key, \`publisher2_id\` int(11) unsigned not null, \`test2_id\` int(11) unsigned not null) default character set utf8 engine = InnoDB; +alter table \`publisher2_to_test2\` add index \`publisher2_to_test2_publisher2_id_index\`(\`publisher2_id\`); +alter table \`publisher2_to_test2\` add index \`publisher2_to_test2_test2_id_index\`(\`test2_id\`); +alter table \`author2\` add constraint \`author2_favourite_book_uuid_pk_foreign\` foreign key (\`favourite_book_uuid_pk\`) references \`book2\` (\`uuid_pk\`) on update cascade on delete set null; +alter table \`author2\` add constraint \`author2_favourite_author_id_foreign\` foreign key (\`favourite_author_id\`) references \`author2\` (\`id\`) on update cascade on delete set null; -DROP TABLE IF EXISTS \`test2\`; +alter table \`book2\` add constraint \`book2_author_id_foreign\` foreign key (\`author_id\`) references \`author2\` (\`id\`) on delete set null; +alter table \`book2\` add constraint \`book2_publisher_id_foreign\` foreign key (\`publisher_id\`) references \`publisher2\` (\`id\`) on delete set null; -CREATE TABLE \`test2\` ( - \`id\` int(11) unsigned NOT NULL AUTO_INCREMENT, - \`name\` varchar(255) DEFAULT NULL, - \`book_uuid_pk\` varchar(36) UNIQUE DEFAULT NULL, - \`version\` int(11) NOT NULL DEFAULT 1, - PRIMARY KEY (\`id\`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +alter table \`test2\` add constraint \`test2_book_uuid_pk_foreign\` foreign key (\`book_uuid_pk\`) references \`book2\` (\`uuid_pk\`) on delete set null; +alter table \`foo_bar2\` add constraint \`foo_bar2_baz_id_foreign\` foreign key (\`baz_id\`) references \`foo_baz2\` (\`id\`) on update cascade on delete set null; +alter table \`foo_bar2\` add constraint \`foo_bar2_foo_bar_id_foreign\` foreign key (\`foo_bar_id\`) references \`foo_bar2\` (\`id\`) on update cascade on delete set null; -DROP TABLE IF EXISTS \`foo_bar2\`; +alter table \`book2_to_book_tag2\` add constraint \`book2_to_book_tag2_book2_uuid_pk_foreign\` foreign key (\`book2_uuid_pk\`) references \`book2\` (\`uuid_pk\`) on update cascade on delete cascade; +alter table \`book2_to_book_tag2\` add constraint \`book2_to_book_tag2_book_tag2_id_foreign\` foreign key (\`book_tag2_id\`) references \`book_tag2\` (\`id\`) on update cascade on delete cascade; -CREATE TABLE \`foo_bar2\` ( - \`id\` int(11) unsigned NOT NULL AUTO_INCREMENT, - \`name\` varchar(255) NOT NULL, - \`baz_id\` int(11) unsigned UNIQUE DEFAULT NULL, - \`foo_bar_id\` int(11) unsigned UNIQUE DEFAULT NULL, - \`version\` datetime(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3), - PRIMARY KEY (\`id\`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; +alter table \`publisher2_to_test2\` add constraint \`publisher2_to_test2_publisher2_id_foreign\` foreign key (\`publisher2_id\`) references \`publisher2\` (\`id\`) on update cascade on delete cascade; +alter table \`publisher2_to_test2\` add constraint \`publisher2_to_test2_test2_id_foreign\` foreign key (\`test2_id\`) references \`test2\` (\`id\`) on update cascade on delete cascade; - -DROP TABLE IF EXISTS \`foo_baz2\`; - -CREATE TABLE \`foo_baz2\` ( - \`id\` int(11) unsigned NOT NULL AUTO_INCREMENT, - \`name\` varchar(255) NOT NULL, - PRIMARY KEY (\`id\`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - - -DROP TABLE IF EXISTS \`book2_to_book_tag2\`; - -CREATE TABLE \`book2_to_book_tag2\` ( - \`id\` int(11) unsigned NOT NULL AUTO_INCREMENT, - \`book2_uuid_pk\` varchar(36) NOT NULL, - \`book_tag2_id\` int(11) unsigned NOT NULL, - PRIMARY KEY (\`id\`), - KEY \`book2_uuid_pk\` (\`book2_uuid_pk\`), - KEY \`book_tag2_id\` (\`book_tag2_id\`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - - -DROP TABLE IF EXISTS \`publisher2_to_test2\`; - -CREATE TABLE \`publisher2_to_test2\` ( - \`id\` int(11) unsigned NOT NULL AUTO_INCREMENT, - \`publisher2_id\` int(11) unsigned NOT NULL, - \`test2_id\` int(11) unsigned NOT NULL, - PRIMARY KEY (\`id\`), - KEY \`publisher2_id\` (\`publisher2_id\`), - KEY \`test2_id\` (\`test2_id\`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - - -ALTER TABLE \`author2\` - ADD CONSTRAINT \`author2_ibfk_1\` FOREIGN KEY (\`favourite_book_uuid_pk\`) REFERENCES \`book2\` (\`uuid_pk\`) ON DELETE SET NULL ON UPDATE CASCADE, - ADD CONSTRAINT \`author2_ibfk_2\` FOREIGN KEY (\`favourite_author_id\`) REFERENCES \`author2\` (\`id\`) ON DELETE SET NULL ON UPDATE CASCADE; - - -ALTER TABLE \`book2\` - ADD CONSTRAINT \`book2_ibfk_1\` FOREIGN KEY (\`author_id\`) REFERENCES \`author2\` (\`id\`) ON DELETE SET NULL, - ADD CONSTRAINT \`book2_ibfk_2\` FOREIGN KEY (\`publisher_id\`) REFERENCES \`publisher2\` (\`id\`) ON DELETE SET NULL; - - -ALTER TABLE \`test2\` - ADD CONSTRAINT \`test2_ibfk_1\` FOREIGN KEY (\`book_uuid_pk\`) REFERENCES \`book2\` (\`uuid_pk\`) ON DELETE SET NULL; - - -ALTER TABLE \`foo_bar2\` - ADD CONSTRAINT \`foo_bar2_ibfk_1\` FOREIGN KEY (\`baz_id\`) REFERENCES \`foo_baz2\` (\`id\`) ON DELETE SET NULL ON UPDATE CASCADE, - ADD CONSTRAINT \`foo_bar2_ibfk_2\` FOREIGN KEY (\`foo_bar_id\`) REFERENCES \`foo_bar2\` (\`id\`) ON DELETE SET NULL ON UPDATE CASCADE; - - -ALTER TABLE \`book2_to_book_tag2\` - ADD CONSTRAINT \`book2_to_book_tag2_ibfk_1\` FOREIGN KEY (\`book2_uuid_pk\`) REFERENCES \`book2\` (\`uuid_pk\`) ON DELETE CASCADE ON UPDATE CASCADE, - ADD CONSTRAINT \`book2_to_book_tag2_ibfk_2\` FOREIGN KEY (\`book_tag2_id\`) REFERENCES \`book_tag2\` (\`id\`) ON DELETE CASCADE ON UPDATE CASCADE; - - -ALTER TABLE \`publisher2_to_test2\` - ADD CONSTRAINT \`publisher2_to_test2_ibfk_1\` FOREIGN KEY (\`publisher2_id\`) REFERENCES \`publisher2\` (\`id\`) ON DELETE CASCADE ON UPDATE CASCADE, - ADD CONSTRAINT \`publisher2_to_test2_ibfk_2\` FOREIGN KEY (\`test2_id\`) REFERENCES \`test2\` (\`id\`) ON DELETE CASCADE ON UPDATE CASCADE; - - -SET FOREIGN_KEY_CHECKS=1; +set foreign_key_checks = 1; " `; exports[`SchemaGenerator generate schema from metadata [postgres]: postgres-schema-dump 1`] = ` -"SET NAMES 'utf8'; -SET session_replication_role = 'replica'; - - -DROP TABLE IF EXISTS \\"author2\\" CASCADE; -DROP SEQUENCE IF EXISTS \\"author2_seq\\"; - -CREATE SEQUENCE \\"author2_seq\\"; -CREATE TABLE \\"author2\\" ( - \\"id\\" int check (\\"id\\" > 0) NOT NULL DEFAULT NEXTVAL('author2_seq'), - \\"created_at\\" timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3), - \\"updated_at\\" timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3), - \\"name\\" varchar(255) NOT NULL, - \\"email\\" varchar(255) UNIQUE NOT NULL, - \\"age\\" int DEFAULT NULL, - \\"terms_accepted\\" boolean NOT NULL DEFAULT 0, - \\"identities\\" json DEFAULT NULL, - \\"born\\" timestamp DEFAULT NULL, - \\"favourite_book_uuid_pk\\" varchar(36) DEFAULT NULL, - \\"favourite_author_id\\" int check (\\"favourite_author_id\\" > 0) DEFAULT NULL, - PRIMARY KEY (\\"id\\") -); - - -DROP TABLE IF EXISTS \\"book2\\" CASCADE; -DROP SEQUENCE IF EXISTS \\"book2_seq\\"; - -CREATE TABLE \\"book2\\" ( - \\"uuid_pk\\" varchar(36) NOT NULL, - \\"created_at\\" timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3), - \\"title\\" varchar(255) DEFAULT NULL, - \\"perex\\" text DEFAULT NULL, - \\"price\\" float DEFAULT NULL, - \\"double\\" double precision DEFAULT NULL, - \\"meta\\" json DEFAULT NULL, - \\"author_id\\" int check (\\"author_id\\" > 0) DEFAULT NULL, - \\"publisher_id\\" int check (\\"publisher_id\\" > 0) DEFAULT NULL, - PRIMARY KEY (\\"uuid_pk\\") -); - - -DROP TABLE IF EXISTS \\"book_tag2\\" CASCADE; -DROP SEQUENCE IF EXISTS \\"book_tag2_seq\\"; - -CREATE SEQUENCE \\"book_tag2_seq\\"; -CREATE TABLE \\"book_tag2\\" ( - \\"id\\" int check (\\"id\\" > 0) NOT NULL DEFAULT NEXTVAL('book_tag2_seq'), - \\"name\\" varchar(50) NOT NULL, - PRIMARY KEY (\\"id\\") -); - - -DROP TABLE IF EXISTS \\"publisher2\\" CASCADE; -DROP SEQUENCE IF EXISTS \\"publisher2_seq\\"; +"set names 'utf8'; +set session_replication_role = 'replica'; -CREATE SEQUENCE \\"publisher2_seq\\"; -CREATE TABLE \\"publisher2\\" ( - \\"id\\" int check (\\"id\\" > 0) NOT NULL DEFAULT NEXTVAL('publisher2_seq'), - \\"name\\" varchar(255) NOT NULL, - \\"type\\" varchar(10) NOT NULL, - PRIMARY KEY (\\"id\\") -); +drop table if exists \\"author2\\"; +drop table if exists \\"book2\\"; +drop table if exists \\"book_tag2\\"; +drop table if exists \\"publisher2\\"; +drop table if exists \\"test2\\"; +drop table if exists \\"foo_bar2\\"; +drop table if exists \\"foo_baz2\\"; +drop table if exists \\"book2_to_book_tag2\\"; +drop table if exists \\"publisher2_to_test2\\"; +create table \\"author2\\" (\\"id\\" serial primary key, \\"created_at\\" timestamp(3) not null default current_timestamp(3), \\"updated_at\\" timestamp(3) not null default current_timestamp(3), \\"name\\" varchar(255) not null, \\"email\\" varchar(255) not null, \\"age\\" int null, \\"terms_accepted\\" boolean not null default 0, \\"identities\\" json null, \\"born\\" timestamp null, \\"favourite_book_uuid_pk\\" varchar(36) null, \\"favourite_author_id\\" int null); +alter table \\"author2\\" add constraint \\"author2_email_unique\\" unique (\\"email\\"); -DROP TABLE IF EXISTS \\"test2\\" CASCADE; -DROP SEQUENCE IF EXISTS \\"test2_seq\\"; +create table \\"book2\\" (\\"uuid_pk\\" varchar(36) not null, \\"created_at\\" timestamp(3) not null default current_timestamp(3), \\"title\\" varchar(255) null, \\"perex\\" text null, \\"price\\" float null, \\"double\\" double precision null, \\"meta\\" json null, \\"author_id\\" int null, \\"publisher_id\\" int null); +alter table \\"book2\\" add constraint \\"book2_pkey\\" primary key (\\"uuid_pk\\"); -CREATE SEQUENCE \\"test2_seq\\"; -CREATE TABLE \\"test2\\" ( - \\"id\\" int check (\\"id\\" > 0) NOT NULL DEFAULT NEXTVAL('test2_seq'), - \\"name\\" varchar(255) DEFAULT NULL, - \\"book_uuid_pk\\" varchar(36) UNIQUE DEFAULT NULL, - \\"version\\" int NOT NULL DEFAULT 1, - PRIMARY KEY (\\"id\\") -); +create table \\"book_tag2\\" (\\"id\\" serial primary key, \\"name\\" varchar(50) not null); +create table \\"publisher2\\" (\\"id\\" serial primary key, \\"name\\" varchar(255) not null, \\"type\\" varchar(10) not null); -DROP TABLE IF EXISTS \\"foo_bar2\\" CASCADE; -DROP SEQUENCE IF EXISTS \\"foo_bar2_seq\\"; +create table \\"test2\\" (\\"id\\" serial primary key, \\"name\\" varchar(255) null, \\"book_uuid_pk\\" varchar(36) null, \\"version\\" int not null default 1); +alter table \\"test2\\" add constraint \\"test2_book_uuid_pk_unique\\" unique (\\"book_uuid_pk\\"); -CREATE SEQUENCE \\"foo_bar2_seq\\"; -CREATE TABLE \\"foo_bar2\\" ( - \\"id\\" int check (\\"id\\" > 0) NOT NULL DEFAULT NEXTVAL('foo_bar2_seq'), - \\"name\\" varchar(255) NOT NULL, - \\"baz_id\\" int check (\\"baz_id\\" > 0) UNIQUE DEFAULT NULL, - \\"foo_bar_id\\" int check (\\"foo_bar_id\\" > 0) UNIQUE DEFAULT NULL, - \\"version\\" timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3), - PRIMARY KEY (\\"id\\") -); +create table \\"foo_bar2\\" (\\"id\\" serial primary key, \\"name\\" varchar(255) not null, \\"baz_id\\" int null, \\"foo_bar_id\\" int null, \\"version\\" timestamp(3) not null default current_timestamp(3)); +alter table \\"foo_bar2\\" add constraint \\"foo_bar2_baz_id_unique\\" unique (\\"baz_id\\"); +alter table \\"foo_bar2\\" add constraint \\"foo_bar2_foo_bar_id_unique\\" unique (\\"foo_bar_id\\"); +create table \\"foo_baz2\\" (\\"id\\" serial primary key, \\"name\\" varchar(255) not null); -DROP TABLE IF EXISTS \\"foo_baz2\\" CASCADE; -DROP SEQUENCE IF EXISTS \\"foo_baz2_seq\\"; +create table \\"book2_to_book_tag2\\" (\\"id\\" serial primary key, \\"book2_uuid_pk\\" varchar(36) not null, \\"book_tag2_id\\" int not null); -CREATE SEQUENCE \\"foo_baz2_seq\\"; -CREATE TABLE \\"foo_baz2\\" ( - \\"id\\" int check (\\"id\\" > 0) NOT NULL DEFAULT NEXTVAL('foo_baz2_seq'), - \\"name\\" varchar(255) NOT NULL, - PRIMARY KEY (\\"id\\") -); +create table \\"publisher2_to_test2\\" (\\"id\\" serial primary key, \\"publisher2_id\\" int not null, \\"test2_id\\" int not null); +alter table \\"author2\\" add constraint \\"author2_favourite_book_uuid_pk_foreign\\" foreign key (\\"favourite_book_uuid_pk\\") references \\"book2\\" (\\"uuid_pk\\") on update cascade on delete set null; +alter table \\"author2\\" add constraint \\"author2_favourite_author_id_foreign\\" foreign key (\\"favourite_author_id\\") references \\"author2\\" (\\"id\\") on update cascade on delete set null; -DROP TABLE IF EXISTS \\"book2_to_book_tag2\\" CASCADE; -DROP SEQUENCE IF EXISTS \\"book2_to_book_tag2_seq\\"; +alter table \\"book2\\" add constraint \\"book2_author_id_foreign\\" foreign key (\\"author_id\\") references \\"author2\\" (\\"id\\") on delete set null; +alter table \\"book2\\" add constraint \\"book2_publisher_id_foreign\\" foreign key (\\"publisher_id\\") references \\"publisher2\\" (\\"id\\") on delete set null; -CREATE SEQUENCE \\"book2_to_book_tag2_seq\\"; -CREATE TABLE \\"book2_to_book_tag2\\" ( - \\"id\\" int check (\\"id\\" > 0) NOT NULL DEFAULT NEXTVAL('book2_to_book_tag2_seq'), - \\"book2_uuid_pk\\" varchar(36) NOT NULL, - \\"book_tag2_id\\" int check (\\"book_tag2_id\\" > 0) NOT NULL, - PRIMARY KEY (\\"id\\") -); +alter table \\"test2\\" add constraint \\"test2_book_uuid_pk_foreign\\" foreign key (\\"book_uuid_pk\\") references \\"book2\\" (\\"uuid_pk\\") on delete set null; +alter table \\"foo_bar2\\" add constraint \\"foo_bar2_baz_id_foreign\\" foreign key (\\"baz_id\\") references \\"foo_baz2\\" (\\"id\\") on update cascade on delete set null; +alter table \\"foo_bar2\\" add constraint \\"foo_bar2_foo_bar_id_foreign\\" foreign key (\\"foo_bar_id\\") references \\"foo_bar2\\" (\\"id\\") on update cascade on delete set null; -DROP TABLE IF EXISTS \\"publisher2_to_test2\\" CASCADE; -DROP SEQUENCE IF EXISTS \\"publisher2_to_test2_seq\\"; +alter table \\"book2_to_book_tag2\\" add constraint \\"book2_to_book_tag2_book2_uuid_pk_foreign\\" foreign key (\\"book2_uuid_pk\\") references \\"book2\\" (\\"uuid_pk\\") on update cascade on delete cascade; +alter table \\"book2_to_book_tag2\\" add constraint \\"book2_to_book_tag2_book_tag2_id_foreign\\" foreign key (\\"book_tag2_id\\") references \\"book_tag2\\" (\\"id\\") on update cascade on delete cascade; -CREATE SEQUENCE \\"publisher2_to_test2_seq\\"; -CREATE TABLE \\"publisher2_to_test2\\" ( - \\"id\\" int check (\\"id\\" > 0) NOT NULL DEFAULT NEXTVAL('publisher2_to_test2_seq'), - \\"publisher2_id\\" int check (\\"publisher2_id\\" > 0) NOT NULL, - \\"test2_id\\" int check (\\"test2_id\\" > 0) NOT NULL, - PRIMARY KEY (\\"id\\") -); +alter table \\"publisher2_to_test2\\" add constraint \\"publisher2_to_test2_publisher2_id_foreign\\" foreign key (\\"publisher2_id\\") references \\"publisher2\\" (\\"id\\") on update cascade on delete cascade; +alter table \\"publisher2_to_test2\\" add constraint \\"publisher2_to_test2_test2_id_foreign\\" foreign key (\\"test2_id\\") references \\"test2\\" (\\"id\\") on update cascade on delete cascade; - -ALTER TABLE \\"author2\\" - ADD CONSTRAINT \\"author2_ibfk_1\\" FOREIGN KEY (\\"favourite_book_uuid_pk\\") REFERENCES \\"book2\\" (\\"uuid_pk\\") ON DELETE SET NULL ON UPDATE CASCADE, - ADD CONSTRAINT \\"author2_ibfk_2\\" FOREIGN KEY (\\"favourite_author_id\\") REFERENCES \\"author2\\" (\\"id\\") ON DELETE SET NULL ON UPDATE CASCADE; - - -ALTER TABLE \\"book2\\" - ADD CONSTRAINT \\"book2_ibfk_1\\" FOREIGN KEY (\\"author_id\\") REFERENCES \\"author2\\" (\\"id\\") ON DELETE SET NULL, - ADD CONSTRAINT \\"book2_ibfk_2\\" FOREIGN KEY (\\"publisher_id\\") REFERENCES \\"publisher2\\" (\\"id\\") ON DELETE SET NULL; - - -ALTER TABLE \\"test2\\" - ADD CONSTRAINT \\"test2_ibfk_1\\" FOREIGN KEY (\\"book_uuid_pk\\") REFERENCES \\"book2\\" (\\"uuid_pk\\") ON DELETE SET NULL; - - -ALTER TABLE \\"foo_bar2\\" - ADD CONSTRAINT \\"foo_bar2_ibfk_1\\" FOREIGN KEY (\\"baz_id\\") REFERENCES \\"foo_baz2\\" (\\"id\\") ON DELETE SET NULL ON UPDATE CASCADE, - ADD CONSTRAINT \\"foo_bar2_ibfk_2\\" FOREIGN KEY (\\"foo_bar_id\\") REFERENCES \\"foo_bar2\\" (\\"id\\") ON DELETE SET NULL ON UPDATE CASCADE; - - -ALTER TABLE \\"book2_to_book_tag2\\" - ADD CONSTRAINT \\"book2_to_book_tag2_ibfk_1\\" FOREIGN KEY (\\"book2_uuid_pk\\") REFERENCES \\"book2\\" (\\"uuid_pk\\") ON DELETE CASCADE ON UPDATE CASCADE, - ADD CONSTRAINT \\"book2_to_book_tag2_ibfk_2\\" FOREIGN KEY (\\"book_tag2_id\\") REFERENCES \\"book_tag2\\" (\\"id\\") ON DELETE CASCADE ON UPDATE CASCADE; - - -ALTER TABLE \\"publisher2_to_test2\\" - ADD CONSTRAINT \\"publisher2_to_test2_ibfk_1\\" FOREIGN KEY (\\"publisher2_id\\") REFERENCES \\"publisher2\\" (\\"id\\") ON DELETE CASCADE ON UPDATE CASCADE, - ADD CONSTRAINT \\"publisher2_to_test2_ibfk_2\\" FOREIGN KEY (\\"test2_id\\") REFERENCES \\"test2\\" (\\"id\\") ON DELETE CASCADE ON UPDATE CASCADE; - - -SET session_replication_role = 'origin'; +set session_replication_role = 'origin'; " `; exports[`SchemaGenerator generate schema from metadata [sqlite]: sqlite-schema-dump 1`] = ` -"PRAGMA foreign_keys=OFF; - - -DROP TABLE IF EXISTS \\"author3\\"; - -CREATE TABLE \\"author3\\" ( - \\"id\\" INTEGER PRIMARY KEY AUTOINCREMENT, - \\"created_at\\" TEXT DEFAULT NULL, - \\"updated_at\\" TEXT DEFAULT NULL, - \\"name\\" TEXT NOT NULL, - \\"email\\" TEXT UNIQUE NOT NULL, - \\"age\\" INTEGER DEFAULT NULL, - \\"terms_accepted\\" INTEGER NOT NULL DEFAULT 0, - \\"identities\\" TEXT DEFAULT NULL, - \\"born\\" TEXT DEFAULT NULL -); - - -DROP TABLE IF EXISTS \\"book3\\"; - -CREATE TABLE \\"book3\\" ( - \\"id\\" INTEGER PRIMARY KEY AUTOINCREMENT, - \\"title\\" TEXT NOT NULL -); - - -DROP TABLE IF EXISTS \\"book_tag3\\"; - -CREATE TABLE \\"book_tag3\\" ( - \\"id\\" INTEGER PRIMARY KEY AUTOINCREMENT, - \\"name\\" TEXT NOT NULL, - \\"version\\" TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP -); - - -DROP TABLE IF EXISTS \\"publisher3\\"; - -CREATE TABLE \\"publisher3\\" ( - \\"id\\" INTEGER PRIMARY KEY AUTOINCREMENT, - \\"name\\" TEXT NOT NULL, - \\"type\\" TEXT NOT NULL -); - - -DROP TABLE IF EXISTS \\"test3\\"; +"pragma foreign_keys = off; -CREATE TABLE \\"test3\\" ( - \\"id\\" INTEGER PRIMARY KEY AUTOINCREMENT, - \\"name\\" TEXT DEFAULT NULL, - \\"version\\" INTEGER NOT NULL DEFAULT 1 -); +drop table if exists \`author3\`; +drop table if exists \`book3\`; +drop table if exists \`book_tag3\`; +drop table if exists \`publisher3\`; +drop table if exists \`test3\`; +drop table if exists \`book3_to_book_tag3\`; +drop table if exists \`publisher3_to_test3\`; +create table \`author3\` (\`id\` integer not null primary key autoincrement, \`created_at\` text null, \`updated_at\` text null, \`name\` text not null, \`email\` text not null, \`age\` integer null, \`terms_accepted\` integer not null default 0, \`identities\` text null, \`born\` text null); +create unique index \`author3_email_unique\` on \`author3\` (\`email\`); -DROP TABLE IF EXISTS \\"book3_to_book_tag3\\"; +create table \`book3\` (\`id\` integer not null primary key autoincrement, \`title\` text not null); -CREATE TABLE \\"book3_to_book_tag3\\" ( - \\"id\\" INTEGER PRIMARY KEY AUTOINCREMENT -); +create table \`book_tag3\` (\`id\` integer not null primary key autoincrement, \`name\` text not null, \`version\` text not null default current_timestamp); +create table \`publisher3\` (\`id\` integer not null primary key autoincrement, \`name\` text not null, \`type\` text not null); -DROP TABLE IF EXISTS \\"publisher3_to_test3\\"; +create table \`test3\` (\`id\` integer not null primary key autoincrement, \`name\` text null, \`version\` integer not null default 1); -CREATE TABLE \\"publisher3_to_test3\\" ( - \\"id\\" INTEGER PRIMARY KEY AUTOINCREMENT -); +create table \`book3_to_book_tag3\` (\`id\` integer not null primary key autoincrement); +create table \`publisher3_to_test3\` (\`id\` integer not null primary key autoincrement); -ALTER TABLE \\"author3\\" ADD \\"favourite_book_id\\" INTEGER DEFAULT NULL REFERENCES \\"book3\\" (\\"id\\") ON DELETE SET NULL ON UPDATE CASCADE; +alter table \`author3\` add column \`favourite_book_id\` integer null; +create index \`author3_favourite_book_id_index\` on \`author3\` (\`favourite_book_id\`); -ALTER TABLE \\"book3\\" ADD \\"author_id\\" INTEGER DEFAULT NULL REFERENCES \\"author3\\" (\\"id\\") ON DELETE SET NULL ON UPDATE CASCADE; -ALTER TABLE \\"book3\\" ADD \\"publisher_id\\" INTEGER DEFAULT NULL REFERENCES \\"publisher3\\" (\\"id\\") ON DELETE SET NULL ON UPDATE CASCADE; +alter table \`book3\` add column \`author_id\` integer null; +alter table \`book3\` add column \`publisher_id\` integer null; +create index \`book3_author_id_index\` on \`book3\` (\`author_id\`); +create index \`book3_publisher_id_index\` on \`book3\` (\`publisher_id\`); -ALTER TABLE \\"book3_to_book_tag3\\" ADD \\"book3_id\\" INTEGER DEFAULT NULL REFERENCES \\"book3\\" (\\"id\\") ON DELETE CASCADE ON UPDATE CASCADE; -ALTER TABLE \\"book3_to_book_tag3\\" ADD \\"book_tag3_id\\" INTEGER DEFAULT NULL REFERENCES \\"book_tag3\\" (\\"id\\") ON DELETE CASCADE ON UPDATE CASCADE; +alter table \`book3_to_book_tag3\` add column \`book3_id\` integer null; +alter table \`book3_to_book_tag3\` add column \`book_tag3_id\` integer null; +create index \`book3_to_book_tag3_book3_id_index\` on \`book3_to_book_tag3\` (\`book3_id\`); +create index \`book3_to_book_tag3_book_tag3_id_index\` on \`book3_to_book_tag3\` (\`book_tag3_id\`); -ALTER TABLE \\"publisher3_to_test3\\" ADD \\"publisher3_id\\" INTEGER DEFAULT NULL REFERENCES \\"publisher3\\" (\\"id\\") ON DELETE CASCADE ON UPDATE CASCADE; -ALTER TABLE \\"publisher3_to_test3\\" ADD \\"test3_id\\" INTEGER DEFAULT NULL REFERENCES \\"test3\\" (\\"id\\") ON DELETE CASCADE ON UPDATE CASCADE; +alter table \`publisher3_to_test3\` add column \`publisher3_id\` integer null; +alter table \`publisher3_to_test3\` add column \`test3_id\` integer null; +create index \`publisher3_to_test3_publisher3_id_index\` on \`publisher3_to_test3\` (\`publisher3_id\`); +create index \`publisher3_to_test3_test3_id_index\` on \`publisher3_to_test3\` (\`test3_id\`); -PRAGMA foreign_keys=ON; +pragma foreign_keys = on; " `; diff --git a/tests/bootstrap.ts b/tests/bootstrap.ts index 56fc49cdf9ca..513857294311 100644 --- a/tests/bootstrap.ts +++ b/tests/bootstrap.ts @@ -1,6 +1,6 @@ import { EntityManager, JavaScriptMetadataProvider, MikroORM } from '../lib'; import { Author, Book, BookTag, Publisher, Test } from './entities'; -import { Author2, Book2, BookTag2, FooBaz2, Publisher2, Test2, FooBar2 } from './entities-sql'; +import { Author2, Book2, BookTag2, FooBar2, FooBaz2, Publisher2, Test2 } from './entities-sql'; import { SqliteDriver } from '../lib/drivers/SqliteDriver'; import { MySqlConnection } from '../lib/connections/MySqlConnection'; import { SqliteConnection } from '../lib/connections/SqliteConnection'; diff --git a/tests/entities-sql/Author2.ts b/tests/entities-sql/Author2.ts index 6bf9cb5ba125..f15552c3af3d 100644 --- a/tests/entities-sql/Author2.ts +++ b/tests/entities-sql/Author2.ts @@ -12,10 +12,10 @@ export class Author2 extends BaseEntity2 { static beforeDestroyCalled = 0; static afterDestroyCalled = 0; - @Property({ length: 3, default: 'CURRENT_TIMESTAMP(3)' }) + @Property({ length: 3, default: 'current_timestamp(3)' }) createdAt = new Date(); - @Property({ onUpdate: () => new Date(), length: 3, default: 'CURRENT_TIMESTAMP(3)' }) + @Property({ onUpdate: () => new Date(), length: 3, default: 'current_timestamp(3)' }) updatedAt = new Date(); @Property() diff --git a/tests/entities-sql/Book2.ts b/tests/entities-sql/Book2.ts index 85afb354d6eb..4d82bb879188 100644 --- a/tests/entities-sql/Book2.ts +++ b/tests/entities-sql/Book2.ts @@ -11,7 +11,7 @@ export class Book2 { @PrimaryKey({ fieldName: 'uuid_pk', length: 36 }) uuid = v4(); - @Property({ default: 'CURRENT_TIMESTAMP(3)', length: 3 }) + @Property({ default: 'current_timestamp(3)', length: 3 }) createdAt = new Date(); @Property({ nullable: true }) diff --git a/yarn.lock b/yarn.lock index 55a3342546eb..cbb9498c5704 100644 --- a/yarn.lock +++ b/yarn.lock @@ -958,6 +958,11 @@ array-differ@^3.0.0: resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-3.0.0.tgz#3cbb3d0f316810eafcc47624734237d6aee4ae6b" integrity sha512-THtfYS6KtME/yIAhKjZ2ul7XI96lQGHRputJQHO80LAWQnuGP4iCIN8vdMRboGbIEYBwU33q8Tch1os2+X0kMg== +array-each@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/array-each/-/array-each-1.0.1.tgz#a794af0c05ab1752846ee753a1f211a05ba0c44f" + integrity sha1-p5SvDAWrF1KEbudTofIRoFugxE8= + array-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/array-equal/-/array-equal-1.0.0.tgz#8c2a5ef2472fd9ea742b04c77a75093ba2757c93" @@ -973,6 +978,11 @@ array-ify@^1.0.0: resolved "https://registry.yarnpkg.com/array-ify/-/array-ify-1.0.0.tgz#9e528762b4a9066ad163a6962a364418e9626ece" integrity sha1-nlKHYrSpBmrRY6aWKjZEGOlibs4= +array-slice@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/array-slice/-/array-slice-1.1.0.tgz#e368ea15f89bc7069f7ffb89aec3a6c7d4ac22d4" + integrity sha512-B1qMD3RBP7O8o0H2KbrXDyB0IccejMF15+87Lvlor12ONPRHP6gTjXMNkt/d3ZuOGbAe66hFmaCfECI24Ufp6w== + array-union@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" @@ -1596,6 +1606,11 @@ color-name@1.1.3: resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= +colorette@1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.0.8.tgz#421ff11c80b7414027ebed922396bc1833d1903c" + integrity sha512-X6Ck90ReaF+EfKdVGB7vdIQ3dr651BbIrBwY5YBKg13fjH+940sTtp7/Pkx33C6ntYfQcRumOs/aUQhaRPpbTQ== + colors@1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/colors/-/colors-1.0.3.tgz#0433f44d809680fdeb60ed260f1b0c262e82a40b" @@ -1897,6 +1912,13 @@ debug@3.1.0: dependencies: ms "2.0.0" +debug@4.1.1, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" + integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== + dependencies: + ms "^2.1.1" + debug@^2.2.0, debug@^2.3.3: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" @@ -1911,13 +1933,6 @@ debug@^3.1.0, debug@^3.2.6: dependencies: ms "^2.1.1" -debug@^4.0.0, debug@^4.1.0, debug@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" - integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== - dependencies: - ms "^2.1.1" - debuglog@*, debuglog@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492" @@ -2037,6 +2052,11 @@ deprecation@^2.0.0: resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919" integrity sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ== +detect-file@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" + integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc= + detect-indent@~5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-5.0.0.tgz#3871cc0a6a002e8c3e5b3cf7f336264675f06b9d" @@ -2333,6 +2353,13 @@ expand-brackets@^2.1.4: snapdragon "^0.8.1" to-regex "^3.0.1" +expand-tilde@^2.0.0, expand-tilde@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" + integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI= + dependencies: + homedir-polyfill "^1.0.1" + expect@^24.8.0: version "24.8.0" resolved "https://registry.yarnpkg.com/expect/-/expect-24.8.0.tgz#471f8ec256b7b6129ca2524b2a62f030df38718d" @@ -2360,7 +2387,7 @@ extend-shallow@^3.0.0, extend-shallow@^3.0.2: assign-symbols "^1.0.0" is-extendable "^1.0.1" -extend@~3.0.2: +extend@^3.0.0, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== @@ -2509,6 +2536,32 @@ find-versions@^3.0.0: array-uniq "^2.1.0" semver-regex "^2.0.0" +findup-sync@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" + integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg== + dependencies: + detect-file "^1.0.0" + is-glob "^4.0.0" + micromatch "^3.0.4" + resolve-dir "^1.0.1" + +fined@^1.0.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/fined/-/fined-1.2.0.tgz#d00beccf1aa2b475d16d423b0238b713a2c4a37b" + integrity sha512-ZYDqPLGxDkDhDZBjZBb+oD1+j0rA4E0pXY50eplAAOPg2N/gUBSSk5IM1/QhPfyVo19lJ+CvXpqfvk+b2p/8Ng== + dependencies: + expand-tilde "^2.0.2" + is-plain-object "^2.0.3" + object.defaults "^1.1.0" + object.pick "^1.2.0" + parse-filepath "^1.0.1" + +flagged-respawn@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/flagged-respawn/-/flagged-respawn-1.0.1.tgz#e7de6f1279ddd9ca9aac8a5971d618606b3aab41" + integrity sha512-lNaHNVymajmk0OJMBn8fVUAU1BtDeKIqKoVhk4xAALB57aALg6b4W0MfJ/cUE0g9YBXy5XhSlPIpYIJ7HaY/3Q== + flush-write-stream@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" @@ -2517,11 +2570,18 @@ flush-write-stream@^1.0.0: inherits "^2.0.3" readable-stream "^2.3.6" -for-in@^1.0.2: +for-in@^1.0.1, for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= +for-own@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/for-own/-/for-own-1.0.0.tgz#c63332f415cedc4b04dbfe70cf836494c53cb44b" + integrity sha1-xjMy9BXO3EsE2/5wz4NklMU8tEs= + dependencies: + for-in "^1.0.1" + forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" @@ -2701,6 +2761,11 @@ get-value@^2.0.3, get-value@^2.0.6: resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= +getopts@2.2.4: + version "2.2.4" + resolved "https://registry.yarnpkg.com/getopts/-/getopts-2.2.4.tgz#3137fe8a5fddf304904059a851bdc1c22f0f54fb" + integrity sha512-Rz7DGyomZjrenu9Jx4qmzdlvJgvrEFHXHvjK0FcZtcTC1U5FmES7OdZHUwMuSnEE6QvBvwse1JODKj7TgbSEjQ== + getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" @@ -2757,6 +2822,26 @@ global-dirs@^0.1.0, global-dirs@^0.1.1: dependencies: ini "^1.3.4" +global-modules@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" + integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== + dependencies: + global-prefix "^1.0.1" + is-windows "^1.0.1" + resolve-dir "^1.0.0" + +global-prefix@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" + integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4= + dependencies: + expand-tilde "^2.0.2" + homedir-polyfill "^1.0.1" + ini "^1.3.4" + is-windows "^1.0.1" + which "^1.2.14" + globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" @@ -2908,6 +2993,13 @@ has@^1.0.1, has@^1.0.3: dependencies: function-bind "^1.1.1" +homedir-polyfill@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" + integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== + dependencies: + parse-passwd "^1.0.0" + hook-std@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/hook-std/-/hook-std-2.0.0.tgz#ff9aafdebb6a989a354f729bb6445cf4a3a7077c" @@ -3061,7 +3153,7 @@ inflight@^1.0.4, inflight@~1.0.6: once "^1.3.0" wrappy "1" -inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: +inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3, inherits@~2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -3085,6 +3177,11 @@ init-package-json@^1.10.3: validate-npm-package-license "^3.0.1" validate-npm-package-name "^3.0.0" +interpret@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.2.0.tgz#d5061a6224be58e8083985f5014d844359576296" + integrity sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw== + into-stream@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/into-stream/-/into-stream-5.1.0.tgz#b05f37d8fed05c06a0b43b556d74e53e5af23878" @@ -3254,7 +3351,7 @@ is-generator-fn@^2.0.0: resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== -is-glob@^4.0.1: +is-glob@^4.0.0, is-glob@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== @@ -4021,6 +4118,27 @@ kleur@^3.0.2: resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== +knex@^0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/knex/-/knex-0.19.0.tgz#3c0383e70f01b35836079b8ef837fc6566c1aab3" + integrity sha512-4acpjPAugogM5KmffJ6wG2e9nrmDq6Xg4tWk0pEHJKfAnqBefMPD0Si9oA2aYzg3Fsfey5FvZYXEDNBsNHeVkw== + dependencies: + bluebird "^3.5.5" + colorette "1.0.8" + commander "^2.20.0" + debug "4.1.1" + getopts "2.2.4" + inherits "~2.0.4" + interpret "^1.2.0" + liftoff "3.1.0" + lodash "^4.17.14" + mkdirp "^0.5.1" + pg-connection-string "2.0.0" + tarn "^2.0.0" + tildify "2.0.0" + uuid "^3.3.2" + v8flags "^3.1.3" + latest-version@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-3.1.0.tgz#a205383fea322b33b5ae3b18abee0dc2f356ee15" @@ -4204,6 +4322,20 @@ libnpx@^10.2.0: y18n "^4.0.0" yargs "^11.0.0" +liftoff@3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/liftoff/-/liftoff-3.1.0.tgz#c9ba6081f908670607ee79062d700df062c52ed3" + integrity sha512-DlIPlJUkCV0Ips2zf2pJP0unEoT1kwYhiiPUGF3s/jtxTCjziNLoiVVh+jqWOWeFi6mmwQ5fNxvAUyPad4Dfog== + dependencies: + extend "^3.0.0" + findup-sync "^3.0.0" + fined "^1.0.1" + flagged-respawn "^1.0.0" + is-plain-object "^2.0.4" + object.map "^1.0.0" + rechoir "^0.6.2" + resolve "^1.1.7" + lines-and-columns@^1.1.6: version "1.1.6" resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" @@ -4460,7 +4592,7 @@ lodash@4.17.11: resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d" integrity sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg== -lodash@^4.17.11, lodash@^4.17.4, lodash@^4.2.1: +lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.4, lodash@^4.2.1: version "4.17.14" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.14.tgz#9ce487ae66c96254fe20b599f21b6816028078ba" integrity sha512-mmKYbW3GLuJeX+iGP+Y7Gp1AiGHGbXHCOh/jZmrawMmsE7MS4znI3RL2FsjbqOyMayHInjOeykW7PEajUk1/xw== @@ -4575,6 +4707,13 @@ make-fetch-happen@^4.0.1, make-fetch-happen@^4.0.2: socks-proxy-agent "^4.0.0" ssri "^6.0.0" +make-iterator@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/make-iterator/-/make-iterator-1.0.1.tgz#29b33f312aa8f547c4a5e490f56afcec99133ad6" + integrity sha512-pxiuXh0iVEq7VM7KMIhs5gxsfxCux2URptUQaXo4iZZJxBAzTPOLE2BumO5dbfVYq/hBJFBR/a1mFDmOx5AGmw== + dependencies: + kind-of "^6.0.2" + makeerror@1.0.x: version "1.0.11" resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.11.tgz#e01a5c9109f2af79660e4e8b9587790184f5a96c" @@ -4589,7 +4728,7 @@ map-age-cleaner@^0.1.1: dependencies: p-defer "^1.0.0" -map-cache@^0.2.2: +map-cache@^0.2.0, map-cache@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= @@ -4701,7 +4840,7 @@ merge2@^1.2.3: resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.2.3.tgz#7ee99dbd69bb6481689253f018488a1b902b0ed5" integrity sha512-gdUU1Fwj5ep4kplwcmftruWofEFt6lfpkkr3h860CXbAB9c3hGb55EOL2ali0Td5oebvW0E1+3Sr+Ur7XfKpRA== -micromatch@^3.1.10, micromatch@^3.1.4: +micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: version "3.1.10" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== @@ -5376,6 +5515,16 @@ object-visit@^1.0.0: dependencies: isobject "^3.0.0" +object.defaults@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/object.defaults/-/object.defaults-1.1.0.tgz#3a7f868334b407dea06da16d88d5cd29e435fecf" + integrity sha1-On+GgzS0B96gbaFtiNXNKeQ1/s8= + dependencies: + array-each "^1.0.1" + array-slice "^1.0.0" + for-own "^1.0.0" + isobject "^3.0.0" + object.getownpropertydescriptors@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" @@ -5384,7 +5533,15 @@ object.getownpropertydescriptors@^2.0.3: define-properties "^1.1.2" es-abstract "^1.5.1" -object.pick@^1.3.0: +object.map@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/object.map/-/object.map-1.0.1.tgz#cf83e59dc8fcc0ad5f4250e1f78b3b81bd801d37" + integrity sha1-z4Plncj8wK1fQlDh94s7gb2AHTc= + dependencies: + for-own "^1.0.0" + make-iterator "^1.0.0" + +object.pick@^1.2.0, object.pick@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= @@ -5657,6 +5814,15 @@ parent-module@^1.0.0: dependencies: callsites "^3.0.0" +parse-filepath@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/parse-filepath/-/parse-filepath-1.0.2.tgz#a632127f53aaf3d15876f5872f3ffac763d6c891" + integrity sha1-pjISf1Oq89FYdvWHLz/6x2PWyJE= + dependencies: + is-absolute "^1.0.0" + map-cache "^0.2.0" + path-root "^0.1.1" + parse-github-url@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/parse-github-url/-/parse-github-url-1.0.2.tgz#242d3b65cbcdda14bb50439e3242acf6971db395" @@ -5680,6 +5846,11 @@ parse-json@^5.0.0: json-parse-better-errors "^1.0.1" lines-and-columns "^1.1.6" +parse-passwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" + integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= + parse5@4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/parse5/-/parse5-4.0.0.tgz#6d78656e3da8d78b4ec0b906f7c08ef1dfe3f608" @@ -5725,6 +5896,18 @@ path-parse@^1.0.6: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== +path-root-regex@^0.1.0: + version "0.1.2" + resolved "https://registry.yarnpkg.com/path-root-regex/-/path-root-regex-0.1.2.tgz#bfccdc8df5b12dc52c8b43ec38d18d72c04ba96d" + integrity sha1-v8zcjfWxLcUsi0PsONGNcsBLqW0= + +path-root@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/path-root/-/path-root-0.1.1.tgz#9a4a6814cac1c0cd73360a95f32083c8ea4745b7" + integrity sha1-mkpoFMrBwM1zNgqV8yCDyOpHRbc= + dependencies: + path-root-regex "^0.1.0" + path-type@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f" @@ -5752,6 +5935,11 @@ pg-connection-string@0.1.3: resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-0.1.3.tgz#da1847b20940e42ee1492beaf65d49d91b245df7" integrity sha1-2hhHsglA5C7hSSvq9l1J2RskXfc= +pg-connection-string@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.0.0.tgz#3eefe5997e06d94821e4d502e42b6a1c73f8df82" + integrity sha1-Pu/lmX4G2Ugh5NUC5CtqHHP434I= + pg-int8@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c" @@ -6199,6 +6387,13 @@ realpath-native@^1.1.0: dependencies: util.promisify "^1.0.0" +rechoir@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" + integrity sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q= + dependencies: + resolve "^1.1.6" + redent@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-2.0.0.tgz#c1b2007b42d57eb1389079b3c8333639d5e1ccaa" @@ -6342,6 +6537,14 @@ resolve-cwd@^2.0.0: dependencies: resolve-from "^3.0.0" +resolve-dir@^1.0.0, resolve-dir@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" + integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M= + dependencies: + expand-tilde "^2.0.0" + global-modules "^1.0.0" + resolve-from@5.0.0, resolve-from@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" @@ -6379,7 +6582,7 @@ resolve@1.1.7: resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= -resolve@1.x, resolve@^1.10.0, resolve@^1.3.2: +resolve@1.x, resolve@^1.1.6, resolve@^1.1.7, resolve@^1.10.0, resolve@^1.3.2: version "1.11.1" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.11.1.tgz#ea10d8110376982fef578df8fc30b9ac30a07a3e" integrity sha512-vIpgF6wfuJOZI7KKKSP+HmiKggadPQAdsp5HiC1mvqnfp0gF1vdwgBWZIdrVft9pgqoMFQN+R7BSWZiBxx+BBw== @@ -7105,6 +7308,11 @@ tar@^4, tar@^4.4.10, tar@^4.4.8: safe-buffer "^5.1.2" yallist "^3.0.3" +tarn@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/tarn/-/tarn-2.0.0.tgz#c68499f69881f99ae955b4317ca7d212d942fdee" + integrity sha512-7rNMCZd3s9bhQh47ksAQd92ADFcJUjjbyOvyFjNLwTPpGieFHMC84S+LOzw0fx1uh6hnDz/19r8CPMnIjJlMMA== + term-size@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/term-size/-/term-size-1.2.0.tgz#458b83887f288fc56d6fffbfad262e26638efa69" @@ -7150,6 +7358,11 @@ through@2, "through@>=2.2.7 <3": resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= +tildify@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/tildify/-/tildify-2.0.0.tgz#f205f3674d677ce698b7067a99e949ce03b4754a" + integrity sha512-Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw== + timed-out@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f" @@ -7524,6 +7737,13 @@ uuid@^3.3.2: resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131" integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA== +v8flags@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-3.1.3.tgz#fc9dc23521ca20c5433f81cc4eb9b3033bb105d8" + integrity sha512-amh9CCg3ZxkzQ48Mhcb8iX7xpAfYJgePHxWMQCBWECpOSqJUXgY26ncA61UTV0BkPqfhcy6mzwCIoP4ygxpW8w== + dependencies: + homedir-polyfill "^1.0.1" + validate-npm-package-license@^3.0.1, validate-npm-package-license@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" @@ -7609,7 +7829,7 @@ which-module@^2.0.0: resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= -which@1, which@^1.2.9, which@^1.3.0, which@^1.3.1: +which@1, which@^1.2.14, which@^1.2.9, which@^1.3.0, which@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==