From 63634201e2e2522f25172c8bc3707745e2a83d3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Ad=C3=A1mek?= Date: Fri, 15 Mar 2019 02:21:09 +0100 Subject: [PATCH 1/2] feat(drivers): add support for PostgreSQL This adds PostgreSqlDriver and its dependencies. Under the hood it uses pg driver. Closes #17 --- .travis.yml | 2 + README.md | 4 +- TODO.md | 1 - docs/installation.md | 2 + docs/usage-with-sql.md | 3 +- lib/connections/Connection.ts | 42 +- lib/connections/MySqlConnection.ts | 41 +- lib/connections/PostgreSqlConnection.ts | 69 +++ lib/connections/SqliteConnection.ts | 33 +- lib/drivers/PostgreSqlDriver.ts | 28 + lib/platforms/Platform.ts | 8 + lib/platforms/PostgreSqlPlatform.ts | 26 + lib/query/QueryBuilder.ts | 4 +- lib/query/QueryBuilderHelper.ts | 21 +- package.json | 3 + tests/EntityManager.postgre.test.ts | 781 ++++++++++++++++++++++++ tests/bootstrap.ts | 29 + tests/postgre-schema.sql | 101 +++ 18 files changed, 1141 insertions(+), 57 deletions(-) create mode 100644 lib/connections/PostgreSqlConnection.ts create mode 100644 lib/drivers/PostgreSqlDriver.ts create mode 100644 lib/platforms/PostgreSqlPlatform.ts create mode 100644 tests/EntityManager.postgre.test.ts create mode 100644 tests/postgre-schema.sql diff --git a/.travis.yml b/.travis.yml index 50573869a1ff..005d1eeffb79 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,6 +9,7 @@ node_js: services: - mongodb - mysql + - postgresql cache: directories: @@ -20,6 +21,7 @@ script: before_install: - mysql -u root -e 'CREATE DATABASE mikro_orm_test;' + - psql -c 'create database mikro_orm_test;' -U postgres - curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter - chmod +x ./cc-test-reporter - ./cc-test-reporter before-build diff --git a/README.md b/README.md index 6ee05a6895ef..a8fc69543a7e 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # MikroORM Simple typescript ORM for node.js based on data-mapper, unit-of-work and identity-map patterns. Supports MongoDB, -MySQL and SQLite databases. +MySQL, PostgreSQL and SQLite databases. Heavily inspired by [Doctrine](https://www.doctrine-project.org/) and [Nextras Orm](https://nextras.org/orm/). @@ -38,6 +38,7 @@ First install the module via `yarn` or `npm` and do not forget to install the da ``` $ yarn add mikro-orm mongodb # for mongo $ yarn add mikro-orm mysql2 # for mysql +$ yarn add mikro-orm pg # for postgresql $ yarn add mikro-orm sqlite # for sqlite ``` @@ -46,6 +47,7 @@ or ``` $ npm i -s mikro-orm mongodb # for mongo $ npm i -s mikro-orm mysql2 # for mysql +$ npm i -s mikro-orm pg # for postgresql $ npm i -s mikro-orm sqlite # for sqlite ``` diff --git a/TODO.md b/TODO.md index 7eadeb45c01f..72a4c27b0080 100644 --- a/TODO.md +++ b/TODO.md @@ -1,6 +1,5 @@ # TODO list -- postgres driver - schema generator for SQL drivers - single table inheritance - implement transactions in mongo driver diff --git a/docs/installation.md b/docs/installation.md index 54386887163c..c9ad88f3a274 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -8,6 +8,7 @@ First install the module via `yarn` or `npm` and do not forget to install the da ``` $ yarn add mikro-orm mongodb # for mongo $ yarn add mikro-orm mysql2 # for mysql +$ yarn add mikro-orm pg # for postgresql $ yarn add mikro-orm sqlite # for sqlite ``` @@ -16,6 +17,7 @@ or ``` $ npm i -s mikro-orm mongodb # for mongo $ npm i -s mikro-orm mysql2 # for mysql +$ npm i -s mikro-orm pg # for postgresql $ npm i -s mikro-orm sqlite # for sqlite ``` diff --git a/docs/usage-with-sql.md b/docs/usage-with-sql.md index b2c8d37ddd2a..01c3806d671d 100644 --- a/docs/usage-with-sql.md +++ b/docs/usage-with-sql.md @@ -6,7 +6,8 @@ To use `mikro-orm` with MySQL database, do not forget to install `mysql2` dependency and provide `MySqlDriver` class when initializing ORM. -Similarly for SQLite install `sqlite` dependency and provide `SqliteDriver`. +Similarly for SQLite install `sqlite` dependency and provide `SqliteDriver`. For PostgreSQL +install `pg` and provide `PostgreSqlDriver`. Then call `MikroORM.init` as part of bootstrapping your app: diff --git a/lib/connections/Connection.ts b/lib/connections/Connection.ts index 21e3fc6cfea6..d9dd86c0ff27 100644 --- a/lib/connections/Connection.ts +++ b/lib/connections/Connection.ts @@ -1,8 +1,10 @@ +import { URL } from 'url'; import { Configuration } from '../utils'; export abstract class Connection { protected readonly logger = this.config.getLogger(); + protected abstract client: any; constructor(protected readonly config: Configuration) { } @@ -47,7 +49,37 @@ export abstract class Connection { throw new Error(`Transactions are not supported by current driver`); } - abstract async execute(query: string, params?: any[], method?: string): Promise; + abstract async execute(query: string, params: any[], method?: 'all' | 'get' | 'run'): Promise; + + getConnectionOptions(): ConnectionConfig { + const ret: ConnectionConfig = {}; + const url = new URL(this.config.getClientUrl()); + ret.host = this.config.get('host', url.hostname); + ret.port = this.config.get('port', +url.port); + ret.user = this.config.get('user', url.username); + ret.password = this.config.get('password', url.password); + ret.database = this.config.get('dbName', url.pathname.replace(/^\//, '')); + + return ret; + } + + protected async executeQuery(query: string, params: any[], cb: () => Promise): Promise { + try { + const now = Date.now(); + const res = await cb(); + this.logQuery(query + ` [took ${Date.now() - now} ms]`); + + return res; + } catch (e) { + e.message += `\n in query: ${query}`; + + if (params && params.length) { + e.message += `\n with params: ${JSON.stringify(params)}`; + } + + throw e; + } + } protected logQuery(query: string): void { this.logger.debug(`[query-logger] ${query}`); @@ -59,3 +91,11 @@ export interface QueryResult { affectedRows: number; insertId: number; } + +export interface ConnectionConfig { + host?: string; + port?: number; + user?: string; + password?: string; + database?: string; +} diff --git a/lib/connections/MySqlConnection.ts b/lib/connections/MySqlConnection.ts index 183a5568c493..4ce41f676468 100644 --- a/lib/connections/MySqlConnection.ts +++ b/lib/connections/MySqlConnection.ts @@ -1,23 +1,22 @@ import { Connection as MySql2Connection, ConnectionOptions, createConnection } from 'mysql2/promise'; import { readFileSync } from 'fs'; -import { URL } from 'url'; import { Connection, QueryResult } from './Connection'; export class MySqlConnection extends Connection { - private connection: MySql2Connection; + protected client: MySql2Connection; async connect(): Promise { - this.connection = await createConnection(this.getConnectionOptions()); + this.client = await createConnection(this.getConnectionOptions()); } async close(force?: boolean): Promise { - await this.connection.end({ force }); + await this.client.end({ force }); } async isConnected(): Promise { try { - await this.connection.query('SELECT 1'); + await this.client.query('SELECT 1'); return true; } catch { return false; @@ -41,35 +40,17 @@ export class MySqlConnection extends Connection { } async execute(query: string, params: any[] = [], method: 'all' | 'get' | 'run' = 'all'): Promise { - try { - const now = Date.now(); - const res = await this.connection.execute(query, params); - this.logQuery(query + ` [took ${Date.now() - now} ms]`); - - if (method === 'get') { - return (res as QueryResult[][])[0][0]; - } - - return res[0]; - } catch (e) { - e.message += `\n in query: ${query}`; + const res = await this.executeQuery(query, params, () => this.client.execute(query, params)); - if (params && params.length) { - e.message += `\n with params: ${JSON.stringify(params)}`; - } - - throw e; + if (method === 'get') { + return (res as QueryResult[][])[0][0]; } + + return res[0]; } getConnectionOptions(): ConnectionOptions { - const ret = {} as ConnectionOptions; - const url = new URL(this.config.getClientUrl()); - ret.host = this.config.get('host', url.hostname); - ret.port = this.config.get('port', +url.port); - ret.user = this.config.get('user', url.username); - ret.password = this.config.get('password', url.password); - ret.database = this.config.get('dbName', url.pathname.replace(/^\//, '')); + const ret: ConnectionOptions = super.getConnectionOptions(); if (this.config.get('multipleStatements')) { ret.multipleStatements = this.config.get('multipleStatements'); @@ -85,7 +66,7 @@ export class MySqlConnection extends Connection { private async query(sql: string): Promise { const now = Date.now(); - await this.connection.query(sql); + await this.client.query(sql); this.logQuery(`${sql} [took ${Date.now() - now} ms]`); } diff --git a/lib/connections/PostgreSqlConnection.ts b/lib/connections/PostgreSqlConnection.ts new file mode 100644 index 000000000000..97b65d3f285f --- /dev/null +++ b/lib/connections/PostgreSqlConnection.ts @@ -0,0 +1,69 @@ +import { Client } from 'pg'; +import { readFileSync } from 'fs'; +import { Connection, QueryResult } from './Connection'; +import { EntityData, IEntity } from '../decorators'; + +export class PostgreSqlConnection extends Connection { + + protected client: Client; + + async connect(): Promise { + this.client = new Client(this.getConnectionOptions()); + await this.client.connect(); + } + + async close(force?: boolean): Promise { + await this.client.end(); + } + + async isConnected(): Promise { + try { + await this.client.query('SELECT 1'); + return true; + } catch { + return false; + } + } + + async beginTransaction(savepoint?: string): Promise { + await this.execute(savepoint ? `SAVEPOINT ${savepoint}` : 'START TRANSACTION', [], 'run'); + } + + async commit(savepoint?: string): Promise { + await this.execute(savepoint ? `RELEASE SAVEPOINT ${savepoint}` : 'COMMIT', [], 'run'); + } + + async rollback(savepoint?: string): Promise { + await this.execute(savepoint ? `ROLLBACK TO SAVEPOINT ${savepoint}` : 'ROLLBACK', [], 'run'); + } + + getDefaultClientUrl(): string { + return 'postgre://postgres@127.0.0.1:5432'; + } + + async execute(query: string, params: any[] = [], method: 'all' | 'get' | 'run' = 'all'): Promise { + const res = await this.executeQuery(query, params, () => this.client.query(query, params)); + return this.transformResult(res, method); + } + + async loadFile(path: string): Promise { + const file = readFileSync(path); + await this.execute(file.toString()); + } + + private transformResult(res: any, method: 'all' | 'get' | 'run'): QueryResult | EntityData | EntityData[] { + if (method === 'get') { + return res.rows[0]; + } + + if (method === 'run') { + return { + affectedRows: res.rowCount, + insertId: res.rows[0] ? res.rows[0].id : 0, + }; + } + + return res.rows; + } + +} diff --git a/lib/connections/SqliteConnection.ts b/lib/connections/SqliteConnection.ts index b4019eb6e891..eec2206222c0 100644 --- a/lib/connections/SqliteConnection.ts +++ b/lib/connections/SqliteConnection.ts @@ -7,19 +7,19 @@ import { EntityData, IEntity } from '../decorators'; export class SqliteConnection extends Connection { - private connection: SqliteDatabase; + protected client: SqliteDatabase; async connect(): Promise { - this.connection = await sqlite.open(this.config.get('dbName')) as SqliteDatabase; - await this.connection.exec('PRAGMA foreign_keys = ON'); + this.client = await sqlite.open(this.config.get('dbName')) as SqliteDatabase; + await this.client.exec('PRAGMA foreign_keys = ON'); } async close(force?: boolean): Promise { - await this.connection.close(); + await this.client.close(); } async isConnected(): Promise { - return this.connection['driver']['open']; + return this.client['driver']['open']; } async beginTransaction(savepoint?: string): Promise { @@ -51,28 +51,20 @@ export class SqliteConnection extends Connection { return p; }); - try { - const now = Date.now(); - const statement = await this.connection.prepare(query); - const res = await statement[method](...params); + const res = await this.executeQuery(query, params, async () => { + const statement = await this.client.prepare(query); + const result = await statement[method](...params); await statement.finalize(); - this.logQuery(query + ` [took ${Date.now() - now} ms]`); - return this.transformResult(res, method); - } catch (e) { - e.message += `\n in query: ${query}`; - - if (params.length) { - e.message += `\n with params: ${JSON.stringify(params)}`; - } + return result; + }); - throw e; - } + return this.transformResult(res, method); } async loadFile(path: string): Promise { const file = readFileSync(path); - await this.connection.exec(file.toString()); + await this.client.exec(file.toString()); } private transformResult(res: any, method: 'all' | 'get' | 'run'): QueryResult | EntityData | EntityData[] { @@ -85,6 +77,7 @@ export class SqliteConnection extends Connection { return res; } + } export type SqliteDatabase = Database & { driver: { open: boolean } }; diff --git a/lib/drivers/PostgreSqlDriver.ts b/lib/drivers/PostgreSqlDriver.ts new file mode 100644 index 000000000000..2b300609be64 --- /dev/null +++ b/lib/drivers/PostgreSqlDriver.ts @@ -0,0 +1,28 @@ +import { PostgreSqlConnection } from '../connections/PostgreSqlConnection'; +import { AbstractSqlDriver } from './AbstractSqlDriver'; +import { EntityData, IEntityType } from '../decorators'; +import { QueryType } from '../query'; +import { PostgreSqlPlatform } from '../platforms/PostgreSqlPlatform'; + +export class PostgreSqlDriver extends AbstractSqlDriver { + + protected readonly connection = new PostgreSqlConnection(this.config); + protected readonly platform = new PostgreSqlPlatform(); + + async nativeInsert>(entityName: string, data: EntityData): Promise { + const collections = this.extractManyToMany(entityName, data); + const qb = this.createQueryBuilder(entityName).insert(data); + const params = qb.getParams(); + let sql = qb.getQuery(); + + if (qb.type === QueryType.INSERT && Object.keys(params).length === 0) { + sql = sql.replace('() VALUES ()', '("id") VALUES (DEFAULT)'); + } + + const res = await this.connection.execute(sql, params, 'run'); + await this.processManyToMany(entityName, res.insertId, collections); + + return res.insertId; + } + +} diff --git a/lib/platforms/Platform.ts b/lib/platforms/Platform.ts index 33a5ba4a3346..f6cc261d1a73 100644 --- a/lib/platforms/Platform.ts +++ b/lib/platforms/Platform.ts @@ -22,4 +22,12 @@ export abstract class Platform { return '"'; } + getParameterPlaceholder(index?: number): string { + return '?'; + } + + usesReturningStatement(): boolean { + return false; + } + } diff --git a/lib/platforms/PostgreSqlPlatform.ts b/lib/platforms/PostgreSqlPlatform.ts new file mode 100644 index 000000000000..72a1c2d03842 --- /dev/null +++ b/lib/platforms/PostgreSqlPlatform.ts @@ -0,0 +1,26 @@ +import { NamingStrategy, UnderscoreNamingStrategy } from '../naming-strategy'; +import { Platform } from './Platform'; + +export class PostgreSqlPlatform extends Platform { + + supportsSavePoints(): boolean { + return true; + } + + getNamingStrategy(): { new(): NamingStrategy} { + return UnderscoreNamingStrategy; + } + + getIdentifierQuoteCharacter(): string { + return '"'; + } + + getParameterPlaceholder(index?: number): string { + return '$' + index; + } + + usesReturningStatement(): boolean { + return true; + } + +} diff --git a/lib/query/QueryBuilder.ts b/lib/query/QueryBuilder.ts index c26476ce3445..63241691dd7e 100644 --- a/lib/query/QueryBuilder.ts +++ b/lib/query/QueryBuilder.ts @@ -113,7 +113,7 @@ export class QueryBuilder { sql += this.helper.getQueryPagination(this._limit, this._offset); - return sql; + return this.helper.finalize(this.type, sql, this.metadata[this.entityName]); } getParams(): any[] { @@ -140,7 +140,7 @@ export class QueryBuilder { return ret; } - async execute(method?: string): Promise { + async execute(method: 'all' | 'get' | 'run' = 'all'): Promise { return this.connection.execute(this.getQuery(), this.getParams(), method); } diff --git a/lib/query/QueryBuilderHelper.ts b/lib/query/QueryBuilderHelper.ts index e0affc893632..d2cbd6d86ae3 100644 --- a/lib/query/QueryBuilderHelper.ts +++ b/lib/query/QueryBuilderHelper.ts @@ -112,7 +112,7 @@ export class QueryBuilderHelper { } processJoins(leftJoins: Record): string { - return Object.values(leftJoins).map(([table, alias, column, joinColumn, pk]) => { + return Object.values(leftJoins).map(([table, alias, column, , pk]) => { return ` LEFT JOIN ${this.wrap(table)} AS ${this.wrap(alias)} ON ${this.wrap(this.alias)}.${this.wrap(pk)} = ${this.wrap(alias)}.${this.wrap(column)}`; }).join(''); } @@ -206,6 +206,25 @@ export class QueryBuilderHelper { return pagination; } + finalize(type: QueryType, sql: string, meta?: EntityMetadata): string { + let append = ''; + const useReturningStatement = type === QueryType.INSERT && this.platform.usesReturningStatement(); + + if (useReturningStatement && meta && meta.properties[meta.primaryKey]) { + const prop = meta.properties[meta.primaryKey]; + append = ` RETURNING ${this.wrap(prop.fieldName)}`; + } + + if (this.platform.getParameterPlaceholder() === '?') { + return sql + append; + } + + let index = 1; + return sql.replace(/(\?)/g, () => { + return this.platform.getParameterPlaceholder(index++); + }) + append; + } + private processValue(value: any): string | undefined { if (value instanceof RegExp) { return ' LIKE ?'; diff --git a/package.json b/package.json index 45e184143bef..2e5537f119a7 100644 --- a/package.json +++ b/package.json @@ -87,6 +87,7 @@ "peerDependencies": { "mongodb": "^3.1.13", "mysql2": "^1.6.5", + "pg": "^7.8.2", "sqlite": "^3.0.2" }, "devDependencies": { @@ -98,6 +99,7 @@ "@types/mongodb": "^3.1.21", "@types/mysql2": "types/mysql2", "@types/node": "^11.10.5", + "@types/pg": "^7.4.13", "@types/uuid": "^3.4.4", "coveralls": "^3.0.3", "husky": "^1.3.1", @@ -105,6 +107,7 @@ "lint-staged": "^8.1.5", "mongodb": "^3.1.13", "mysql2": "^1.6.5", + "pg": "^7.8.2", "rimraf": "^2.6.3", "semantic-release": "^15.13.3", "sqlite": "^3.0.2", diff --git a/tests/EntityManager.postgre.test.ts b/tests/EntityManager.postgre.test.ts new file mode 100644 index 000000000000..741b525ec6f7 --- /dev/null +++ b/tests/EntityManager.postgre.test.ts @@ -0,0 +1,781 @@ +import { Collection, Configuration, EntityManager, MikroORM, Utils } from '../lib'; +import { Author2, Book2, BookTag2, Publisher2, PublisherType, Test2 } from './entities-sql'; +import { initORMPostgreSql, wipeDatabasePostgreSql } from './bootstrap'; +import { PostgreSqlDriver } from '../lib/drivers/PostgreSqlDriver'; +import { Logger } from '../lib/utils'; + +/** + * @class EntityManagerPostgreTest + */ +describe('EntityManagerPostgre', () => { + + let orm: MikroORM; + + beforeAll(async () => orm = await initORMPostgreSql()); + beforeEach(async () => wipeDatabasePostgreSql(orm.em)); + + test('isConnected()', async () => { + expect(await orm.isConnected()).toBe(true); + await orm.close(true); + expect(await orm.isConnected()).toBe(false); + await orm.connect(); + expect(await orm.isConnected()).toBe(true); + }); + + test('getConnectionOptions()', async () => { + const config = new Configuration({ + clientUrl: 'postgre://root@127.0.0.1:1234/db_name', + host: '127.0.0.10', + password: 'secret', + user: 'user', + logger: jest.fn(), + } as any, false); + const driver = new PostgreSqlDriver(config); + expect(driver.getConnection().getConnectionOptions()).toEqual({ + database: 'db_name', + host: '127.0.0.10', + password: 'secret', + port: 1234, + user: 'user', + }); + }); + + test('should return postgre driver', async () => { + const driver = orm.em.getDriver(); + expect(driver instanceof PostgreSqlDriver).toBe(true); + expect(await driver.findOne(Book2.name, { foo: 'bar' })).toBeNull(); + expect(await driver.nativeInsert(Book2.name, { tags: [1] })).not.toBeNull(); + const res = await driver.getConnection().execute('SELECT 1 as count'); + expect(res[0]).toEqual({ count: 1 }); + expect(driver.denormalizePrimaryKey(1)).toBe(1); + expect(driver.denormalizePrimaryKey('1')).toBe('1'); + expect(await driver.find(BookTag2.name, { books: { $in: [1] } })).not.toBeNull(); + }); + + test('driver appends errored query', async () => { + const driver = orm.em.getDriver(); + const err1 = `relation "not_existing" does not exist\n in query: INSERT INTO "not_existing" ("foo") VALUES ($1)\n with params: ["bar"]`; + await expect(driver.nativeInsert('not_existing', { foo: 'bar' })).rejects.toThrowError(err1); + const err2 = `relation "not_existing" does not exist\n in query: DELETE FROM "not_existing"`; + await expect(driver.nativeDelete('not_existing', {})).rejects.toThrowError(err2); + }); + + test('should throw when trying to search by entity instead of identifier', async () => { + const repo = orm.em.getRepository(Author2); + const author = new Author2('name', 'email'); + author.termsAccepted = true; + await repo.persist(author); + await expect(repo.find(author)).rejects.toThrowError('Author2 entity provided in search condition. Please provide identifier instead.'); + await expect(repo.find({ author })).rejects.toThrowError(`Author2 entity provided in search condition in field 'author'. Please provide identifier instead.`); + expect(await repo.findOne({ termsAccepted: false })).toBeNull(); + }); + + test('transactions', async () => { + const god1 = new Author2('God1', 'hello@heaven.god'); + await orm.em.beginTransaction(); + await orm.em.persist(god1); + await orm.em.rollback(); + const res1 = await orm.em.findOne(Author2, { name: 'God1' }); + expect(res1).toBeNull(); + + await orm.em.beginTransaction(); + const god2 = new Author2('God2', 'hello@heaven.god'); + await orm.em.persist(god2); + await orm.em.commit(); + const res2 = await orm.em.findOne(Author2, { name: 'God2' }); + expect(res2).not.toBeNull(); + + await orm.em.transactional(async em => { + const god3 = new Author2('God3', 'hello@heaven.god'); + await em.persist(god3); + }); + const res3 = await orm.em.findOne(Author2, { name: 'God3' }); + expect(res3).not.toBeNull(); + + const err = new Error('Test'); + + try { + await orm.em.transactional(async em => { + const god4 = new Author2('God4', 'hello@heaven.god'); + await em.persist(god4); + throw err; + }); + } catch (e) { + expect(e).toBe(err); + const res4 = await orm.em.findOne(Author2, { name: 'God4' }); + expect(res4).toBeNull(); + } + }); + + test('nested transactions with save-points', async () => { + await orm.em.transactional(async em => { + const driver = em.getDriver(); + const god1 = new Author2('God1', 'hello@heaven.god'); + await driver.beginTransaction(); + await em.persist(god1); + await driver.rollback(); + const res1 = await em.findOne(Author2, { name: 'God1' }); + expect(res1).toBeNull(); + + await driver.beginTransaction(); + const god2 = new Author2('God2', 'hello@heaven.god'); + await em.persist(god2); + await driver.commit(); + const res2 = await em.findOne(Author2, { name: 'God2' }); + expect(res2).not.toBeNull(); + }); + }); + + test('nested transaction rollback with save-points will commit the outer one', async () => { + const mock = jest.fn(); + const logger = new Logger(mock, true); + Object.assign(orm.em.getConnection(), { logger }); + + // start outer transaction + const transaction = orm.em.transactional(async em => { + // do stuff inside inner transaction and rollback + await em.beginTransaction(); + await em.persist(new Author2('God', 'hello@heaven.god')); + await em.rollback(); + + await em.persist(new Author2('God Persisted!', 'hello-persisted@heaven.god')); + }); + + // try to commit the outer transaction + await expect(transaction).resolves.toBeUndefined(); + expect(mock.mock.calls.length).toBe(6); + expect(mock.mock.calls[0][0]).toMatch('START TRANSACTION'); + expect(mock.mock.calls[1][0]).toMatch('SAVEPOINT PostgreSqlDriver_2'); + expect(mock.mock.calls[2][0]).toMatch('INSERT INTO "author2" ("name", "email", "created_at", "updated_at", "terms_accepted") VALUES ($1, $2, $3, $4, $5) RETURNING "id"'); + expect(mock.mock.calls[3][0]).toMatch('ROLLBACK TO SAVEPOINT PostgreSqlDriver_2'); + expect(mock.mock.calls[4][0]).toMatch('INSERT INTO "author2" ("name", "email", "created_at", "updated_at", "terms_accepted") VALUES ($1, $2, $3, $4, $5) RETURNING "id"'); + expect(mock.mock.calls[5][0]).toMatch('[query-logger] COMMIT'); + expect(await orm.em.findOne(Author2, { name: 'God Persisted!' })).not.toBeNull(); + }); + + test('should load entities', async () => { + expect(orm).toBeInstanceOf(MikroORM); + expect(orm.em).toBeInstanceOf(EntityManager); + + const god = new Author2('God', 'hello@heaven.god'); + const bible = new Book2('Bible', god); + await orm.em.persist(bible); + + const author = new Author2('Jon Snow', 'snow@wall.st'); + author.born = new Date(); + author.favouriteBook = bible; + + const publisher = new Publisher2('7K publisher', PublisherType.GLOBAL); + + const book1 = new Book2('My Life on The Wall, part 1', author); + book1.publisher = publisher; + const book2 = new Book2('My Life on The Wall, part 2', author); + book2.publisher = publisher; + const book3 = new Book2('My Life on The Wall, part 3', author); + book3.publisher = publisher; + + const repo = orm.em.getRepository(Book2); + await repo.persist(book1, false); + await repo.persist(book2, false); + await repo.persist(book3, false); + await repo.flush(); + orm.em.clear(); + + const publisher7k = (await orm.em.getRepository(Publisher2).findOne({ name: '7K publisher' }))!; + expect(publisher7k).not.toBeNull(); + expect(publisher7k.tests).toBeInstanceOf(Collection); + expect(publisher7k.tests.isInitialized()).toBe(false); + orm.em.clear(); + + const authorRepository = orm.em.getRepository(Author2); + const booksRepository = orm.em.getRepository(Book2); + const books = await booksRepository.findAll(['author']); + expect(books[0].author.isInitialized()).toBe(true); + expect(await authorRepository.findOne({ favouriteBook: bible.id })).not.toBe(null); + orm.em.clear(); + + const noBooks = await booksRepository.find({ title: 'not existing' }, ['author']); + expect(noBooks.length).toBe(0); + orm.em.clear(); + + const jon = (await authorRepository.findOne({ name: 'Jon Snow' }, ['books', 'favouriteBook']))!; + const authors = await authorRepository.findAll(['books', 'favouriteBook']); + expect(await authorRepository.findOne({ email: 'not existing' })).toBeNull(); + + // count test + const count = await authorRepository.count(); + expect(count).toBe(authors.length); + + // identity map test + authors.shift(); // shift the god away, as that entity is detached from IM + expect(jon).toBe(authors[0]); + expect(jon).toBe(await authorRepository.findOne(jon.id)); + + // serialization test + const o = jon.toJSON(); + expect(o).toMatchObject({ + id: jon.id, + createdAt: jon.createdAt, + updatedAt: jon.updatedAt, + books: [ + { author: jon.id, publisher: publisher.id, title: 'My Life on The Wall, part 1' }, + { author: jon.id, publisher: publisher.id, title: 'My Life on The Wall, part 2' }, + { author: jon.id, publisher: publisher.id, title: 'My Life on The Wall, part 3' }, + ], + favouriteBook: { author: god.id, title: 'Bible' }, + born: jon.born, + email: 'snow@wall.st', + name: 'Jon Snow', + }); + expect(jon.toJSON()).toEqual(o); + expect(jon.books.getIdentifiers()).toBeInstanceOf(Array); + expect(typeof jon.books.getIdentifiers()[0]).toBe('number'); + + for (const author of authors) { + expect(author.books).toBeInstanceOf(Collection); + expect(author.books.isInitialized()).toBe(true); + + // iterator test + for (const book of author.books) { + expect(book.title).toMatch(/My Life on The Wall, part \d/); + + expect(book.author).toBeInstanceOf(Author2); + expect(book.author.isInitialized()).toBe(true); + expect(book.publisher).toBeInstanceOf(Publisher2); + expect(book.publisher.isInitialized()).toBe(false); + } + } + + const booksByTitleAsc = await booksRepository.find({ author: jon.id }, [], { title: 1 }); + expect(booksByTitleAsc[0].title).toBe('My Life on The Wall, part 1'); + expect(booksByTitleAsc[1].title).toBe('My Life on The Wall, part 2'); + expect(booksByTitleAsc[2].title).toBe('My Life on The Wall, part 3'); + + const booksByTitleDesc = await booksRepository.find({ author: jon.id }, [], { title: -1 }); + expect(booksByTitleDesc[0].title).toBe('My Life on The Wall, part 3'); + expect(booksByTitleDesc[1].title).toBe('My Life on The Wall, part 2'); + expect(booksByTitleDesc[2].title).toBe('My Life on The Wall, part 1'); + + const twoBooks = await booksRepository.find({ author: jon.id }, [], { title: -1 }, 2); + expect(twoBooks.length).toBe(2); + expect(twoBooks[0].title).toBe('My Life on The Wall, part 3'); + expect(twoBooks[1].title).toBe('My Life on The Wall, part 2'); + + const lastBook = await booksRepository.find({ author: jon.id }, ['author'], { title: -1 }, 2, 2); + expect(lastBook.length).toBe(1); + expect(lastBook[0].title).toBe('My Life on The Wall, part 1'); + expect(lastBook[0].author).toBeInstanceOf(Author2); + expect(lastBook[0].author.isInitialized()).toBe(true); + await orm.em.getRepository(Book2).remove(lastBook[0].id); + }); + + test('findOne should initialize entity that is already in IM', async () => { + const god = new Author2('God', 'hello@heaven.god'); + const bible = new Book2('Bible', god); + await orm.em.persist(bible); + orm.em.clear(); + + const ref = orm.em.getReference(Author2, god.id); + expect(ref.isInitialized()).toBe(false); + const newGod = await orm.em.findOne(Author2, god.id); + expect(ref).toBe(newGod); + expect(ref.isInitialized()).toBe(true); + }); + + test('findOne supports regexps', async () => { + const author1 = new Author2('Author 1', 'a1@example.com'); + const author2 = new Author2('Author 2', 'a2@example.com'); + const author3 = new Author2('Author 3', 'a3@example.com'); + await orm.em.persist([author1, author2, author3]); + orm.em.clear(); + + const authors = await orm.em.find(Author2, { email: /exa.*le\.c.m$/ }); + expect(authors.length).toBe(3); + expect(authors[0].name).toBe('Author 1'); + expect(authors[1].name).toBe('Author 2'); + expect(authors[2].name).toBe('Author 3'); + }); + + test('stable results of serialization', async () => { + const god = new Author2('God', 'hello@heaven.god'); + const bible = new Book2('Bible', god); + const bible2 = new Book2('Bible pt. 2', god); + const bible3 = new Book2('Bible pt. 3', new Author2('Lol', 'lol@lol.lol')); + await orm.em.persist([bible, bible2, bible3]); + orm.em.clear(); + + const newGod = (await orm.em.findOne(Author2, god.id))!; + const books = await orm.em.find(Book2, {}); + await newGod.init(false); + + for (const book of books) { + expect(book.toJSON()).toMatchObject({ + author: book.author.id, + }); + } + }); + + test('stable results of serialization (collection)', async () => { + const pub = new Publisher2('Publisher2'); + await orm.em.persist(pub); + const god = new Author2('God', 'hello@heaven.god'); + const bible = new Book2('Bible', god); + bible.publisher = pub; + const bible2 = new Book2('Bible pt. 2', god); + bible2.publisher = pub; + const bible3 = new Book2('Bible pt. 3', new Author2('Lol', 'lol@lol.lol')); + bible3.publisher = pub; + await orm.em.persist([bible, bible2, bible3]); + orm.em.clear(); + + const newGod = orm.em.getReference(Author2, god.id); + const publisher = (await orm.em.findOne(Publisher2, pub.id, ['books']))!; + await newGod.init(); + + const json = publisher.toJSON().books; + + for (const book of publisher.books) { + expect(json.find((b: Book2) => b.id === book.id)).toMatchObject({ + author: book.author.id, + }); + } + }); + + test('findOne by id', async () => { + const authorRepository = orm.em.getRepository(Author2); + const jon = new Author2('Jon Snow', 'snow@wall.st'); + await authorRepository.persist(jon); + + orm.em.clear(); + let author = (await authorRepository.findOne(jon.id))!; + expect(author).not.toBeNull(); + expect(author.name).toBe('Jon Snow'); + + orm.em.clear(); + author = (await authorRepository.findOne({ id: jon.id }))!; + expect(author).not.toBeNull(); + expect(author.name).toBe('Jon Snow'); + }); + + test('populate ManyToOne relation', async () => { + const authorRepository = orm.em.getRepository(Author2); + const god = new Author2('God', 'hello@heaven.god'); + const bible = new Book2('Bible', god); + await orm.em.persist(bible); + + let jon = new Author2('Jon Snow', 'snow@wall.st'); + jon.born = new Date(); + jon.favouriteBook = bible; + await orm.em.persist(jon); + orm.em.clear(); + + jon = (await authorRepository.findOne(jon.id))!; + expect(jon).not.toBeNull(); + expect(jon.name).toBe('Jon Snow'); + expect(jon.favouriteBook).toBeInstanceOf(Book2); + expect(jon.favouriteBook.isInitialized()).toBe(false); + + await jon.favouriteBook.init(); + expect(jon.favouriteBook).toBeInstanceOf(Book2); + expect(jon.favouriteBook.isInitialized()).toBe(true); + expect(jon.favouriteBook.title).toBe('Bible'); + }); + + test('many to many relation', async () => { + const author = new Author2('Jon Snow', 'snow@wall.st'); + const book1 = new Book2('My Life on The Wall, part 1', author); + const book2 = new Book2('My Life on The Wall, part 2', author); + const book3 = new Book2('My Life on The Wall, part 3', author); + const tag1 = new BookTag2('silly'); + const tag2 = new BookTag2('funny'); + const tag3 = new BookTag2('sick'); + const tag4 = new BookTag2('strange'); + const tag5 = new BookTag2('sexy'); + book1.tags.add(tag1, tag3); + book2.tags.add(tag1, tag2, tag5); + book3.tags.add(tag2, tag4, tag5); + + await orm.em.persist(book1, false); + await orm.em.persist(book2, false); + await orm.em.persist(book3); + + expect(tag1.id).toBeDefined(); + expect(tag2.id).toBeDefined(); + expect(tag3.id).toBeDefined(); + expect(tag4.id).toBeDefined(); + expect(tag5.id).toBeDefined(); + + // test inverse side + const tagRepository = orm.em.getRepository(BookTag2); + let tags = await tagRepository.findAll(); + expect(tags).toBeInstanceOf(Array); + expect(tags.length).toBe(5); + expect(tags[0]).toBeInstanceOf(BookTag2); + expect(tags[0].name).toBe('silly'); + expect(tags[0].books).toBeInstanceOf(Collection); + expect(tags[0].books.isInitialized()).toBe(true); + expect(tags[0].books.isDirty()).toBe(false); + expect(tags[0].books.count()).toBe(2); + expect(tags[0].books.length).toBe(2); + + orm.em.clear(); + tags = await orm.em.find(BookTag2); + expect(tags[0].books.isInitialized()).toBe(false); + expect(tags[0].books.isDirty()).toBe(false); + expect(() => tags[0].books.getItems()).toThrowError(/Collection Book2\[] of entity BookTag2\[\d+] not initialized/); + expect(() => tags[0].books.add(book1)).toThrowError(/Collection Book2\[] of entity BookTag2\[\d+] not initialized/); + expect(() => tags[0].books.remove(book1, book2)).toThrowError(/Collection Book2\[] of entity BookTag2\[\d+] not initialized/); + expect(() => tags[0].books.removeAll()).toThrowError(/Collection Book2\[] of entity BookTag2\[\d+] not initialized/); + expect(() => tags[0].books.contains(book1)).toThrowError(/Collection Book2\[] of entity BookTag2\[\d+] not initialized/); + + // test M:N lazy init + orm.em.clear(); + tags = await tagRepository.findAll(); + await tags[0].books.init(); + expect(tags[0].books.count()).toBe(2); + expect(tags[0].books.getItems()[0]).toBeInstanceOf(Book2); + expect(tags[0].books.getItems()[0].id).toBeDefined(); + expect(tags[0].books.getItems()[0].isInitialized()).toBe(true); + expect(tags[0].books.isInitialized()).toBe(true); + const old = tags[0]; + expect(tags[1].books.isInitialized()).toBe(false); + tags = await tagRepository.findAll(['books']); + expect(tags[1].books.isInitialized()).toBe(true); + expect(tags[0].id).toBe(old.id); + expect(tags[0]).toBe(old); + expect(tags[0].books).toBe(old.books); + + // test M:N lazy init + orm.em.clear(); + let book = (await orm.em.findOne(Book2, { tags: tag1.id }))!; + expect(book.tags.isInitialized()).toBe(false); + await book.tags.init(); + expect(book.tags.isInitialized()).toBe(true); + expect(book.tags.count()).toBe(2); + expect(book.tags.getItems()[0]).toBeInstanceOf(BookTag2); + expect(book.tags.getItems()[0].id).toBeDefined(); + expect(book.tags.getItems()[0].isInitialized()).toBe(true); + + // test collection CRUD + // remove + expect(book.tags.count()).toBe(2); + book.tags.remove(tag1); + await orm.em.persist(book); + orm.em.clear(); + book = (await orm.em.findOne(Book2, book.id, ['tags']))!; + expect(book.tags.count()).toBe(1); + + // add + book.tags.add(tag1, new BookTag2('fresh')); + await orm.em.persist(book); + orm.em.clear(); + book = (await orm.em.findOne(Book2, book.id, ['tags']))!; + expect(book.tags.count()).toBe(3); + + // contains + expect(book.tags.contains(tag1)).toBe(true); + expect(book.tags.contains(tag2)).toBe(false); + expect(book.tags.contains(tag3)).toBe(true); + expect(book.tags.contains(tag4)).toBe(false); + expect(book.tags.contains(tag5)).toBe(false); + + // removeAll + book.tags.removeAll(); + await orm.em.persist(book); + orm.em.clear(); + book = (await orm.em.findOne(Book2, book.id, ['tags']))!; + expect(book.tags.count()).toBe(0); + }); + + test('populating many to many relation', async () => { + const p1 = new Publisher2('foo'); + expect(p1.tests).toBeInstanceOf(Collection); + expect(p1.tests.isInitialized()).toBe(true); + expect(p1.tests.isDirty()).toBe(false); + expect(p1.tests.count()).toBe(0); + const p2 = new Publisher2('bar'); + p2.tests.add(new Test2(), new Test2()); + await orm.em.persist([p1, p2]); + const repo = orm.em.getRepository(Publisher2); + + orm.em.clear(); + const publishers = await repo.findAll(['tests']); + expect(publishers).toBeInstanceOf(Array); + expect(publishers.length).toBe(2); + expect(publishers[0]).toBeInstanceOf(Publisher2); + expect(publishers[0].tests).toBeInstanceOf(Collection); + expect(publishers[0].tests.isInitialized()).toBe(true); + expect(publishers[0].tests.isDirty()).toBe(false); + expect(publishers[0].tests.count()).toBe(0); + await publishers[0].tests.init(); // empty many to many on owning side should not make db calls + expect(publishers[1].tests.getItems()[0].isInitialized()).toBe(true); + }); + + test('populating many to many relation on inverse side', async () => { + const author = new Author2('Jon Snow', 'snow@wall.st'); + const book1 = new Book2('My Life on The Wall, part 1', author); + const book2 = new Book2('My Life on The Wall, part 2', author); + const book3 = new Book2('My Life on The Wall, part 3', author); + const tag1 = new BookTag2('silly'); + const tag2 = new BookTag2('funny'); + const tag3 = new BookTag2('sick'); + const tag4 = new BookTag2('strange'); + const tag5 = new BookTag2('sexy'); + book1.tags.add(tag1, tag3); + book2.tags.add(tag1, tag2, tag5); + book3.tags.add(tag2, tag4, tag5); + await orm.em.persist([book1, book2, book3]); + const repo = orm.em.getRepository(BookTag2); + + orm.em.clear(); + const tags = await repo.findAll(['books']); + expect(tags).toBeInstanceOf(Array); + expect(tags.length).toBe(5); + expect(tags[0]).toBeInstanceOf(BookTag2); + expect(tags[0].books).toBeInstanceOf(Collection); + expect(tags[0].books.isInitialized()).toBe(true); + expect(tags[0].books.isDirty()).toBe(false); + expect(tags[0].books.count()).toBe(2); + expect(tags[0].books.getItems()[0].isInitialized()).toBe(true); + }); + + test('nested populating', async () => { + const author = new Author2('Jon Snow', 'snow@wall.st'); + const book1 = new Book2('My Life on The Wall, part 1', author); + const book2 = new Book2('My Life on The Wall, part 2', author); + const book3 = new Book2('My Life on The Wall, part 3', author); + book1.publisher = new Publisher2('B1 publisher'); + book1.publisher.tests.add(Test2.create('t11'), Test2.create('t12')); + book2.publisher = new Publisher2('B2 publisher'); + book2.publisher.tests.add(Test2.create('t21'), Test2.create('t22')); + book3.publisher = new Publisher2('B3 publisher'); + book3.publisher.tests.add(Test2.create('t31'), Test2.create('t32')); + const tag1 = new BookTag2('silly'); + const tag2 = new BookTag2('funny'); + const tag3 = new BookTag2('sick'); + const tag4 = new BookTag2('strange'); + const tag5 = new BookTag2('sexy'); + book1.tags.add(tag1, tag3); + book2.tags.add(tag1, tag2, tag5); + book3.tags.add(tag2, tag4, tag5); + await orm.em.persist([book1, book2, book3]); + const repo = orm.em.getRepository(BookTag2); + + orm.em.clear(); + const tags = await repo.findAll(['books.publisher.tests', 'books.author']); + expect(tags.length).toBe(5); + expect(tags[0]).toBeInstanceOf(BookTag2); + expect(tags[0].books.isInitialized()).toBe(true); + expect(tags[0].books.count()).toBe(2); + expect(tags[0].books[0].isInitialized()).toBe(true); + expect(tags[0].books[0].author).toBeInstanceOf(Author2); + expect(tags[0].books[0].author.isInitialized()).toBe(true); + expect(tags[0].books[0].author.name).toBe('Jon Snow'); + expect(tags[0].books[0].publisher).toBeInstanceOf(Publisher2); + expect(tags[0].books[0].publisher.isInitialized()).toBe(true); + expect(tags[0].books[0].publisher.tests.isInitialized(true)).toBe(true); + expect(tags[0].books[0].publisher.tests.count()).toBe(2); + expect(tags[0].books[0].publisher.tests[0].name).toBe('t11'); + expect(tags[0].books[0].publisher.tests[1].name).toBe('t12'); + + orm.em.clear(); + const books = await orm.em.find(Book2, {}, ['publisher.tests', 'author']); + expect(books.length).toBe(3); + expect(books[0]).toBeInstanceOf(Book2); + expect(books[0].isInitialized()).toBe(true); + expect(books[0].author).toBeInstanceOf(Author2); + expect(books[0].author.isInitialized()).toBe(true); + expect(books[0].author.name).toBe('Jon Snow'); + expect(books[0].publisher).toBeInstanceOf(Publisher2); + expect(books[0].publisher.isInitialized()).toBe(true); + expect(books[0].publisher.tests.isInitialized(true)).toBe(true); + expect(books[0].publisher.tests.count()).toBe(2); + expect(books[0].publisher.tests[0].name).toBe('t11'); + expect(books[0].publisher.tests[1].name).toBe('t12'); + }); + + test('hooks', async () => { + Author2.beforeDestroyCalled = 0; + Author2.afterDestroyCalled = 0; + const repo = orm.em.getRepository(Author2); + const author = new Author2('Jon Snow', 'snow@wall.st'); + expect(author.id).toBeUndefined(); + expect(author.version).toBeUndefined(); + expect(author.versionAsString).toBeUndefined(); + + await repo.persist(author); + expect(author.id).toBeDefined(); + expect(author.version).toBe(1); + expect(author.versionAsString).toBe('v1'); + + author.name = 'John Snow'; + await repo.persist(author); + expect(author.version).toBe(2); + expect(author.versionAsString).toBe('v2'); + + expect(Author2.beforeDestroyCalled).toBe(0); + expect(Author2.afterDestroyCalled).toBe(0); + await repo.remove(author); + expect(Author2.beforeDestroyCalled).toBe(1); + expect(Author2.afterDestroyCalled).toBe(1); + + const author2 = new Author2('Johny Cash', 'johny@cash.com'); + await repo.persist(author2); + await repo.remove(author2); + expect(Author2.beforeDestroyCalled).toBe(2); + expect(Author2.afterDestroyCalled).toBe(2); + }); + + test('trying to populate non-existing or non-reference property will throw', async () => { + const repo = orm.em.getRepository(Author2); + const author = new Author2('Johny Cash', 'johny@cash.com'); + await repo.persist(author); + orm.em.clear(); + + await expect(repo.findAll(['tests'])).rejects.toThrowError(`Entity 'Author2' does not have property 'tests'`); + await expect(repo.findOne(author.id, ['tests'])).rejects.toThrowError(`Entity 'Author2' does not have property 'tests'`); + }); + + test('many to many collection does have fixed order', async () => { + const repo = orm.em.getRepository(Publisher2); + const publisher = new Publisher2(); + const t1 = Test2.create('t1'); + const t2 = Test2.create('t2'); + const t3 = Test2.create('t3'); + await orm.em.persist([t1, t2, t3]); + publisher.tests.add(t2, t1, t3); + await repo.persist(publisher); + orm.em.clear(); + + const ent = (await repo.findOne(publisher.id, ['tests']))!; + await expect(ent.tests.count()).toBe(3); + await expect(ent.tests.getIdentifiers()).toEqual([t2.id, t1.id, t3.id]); + + await ent.tests.init(); + await expect(ent.tests.getIdentifiers()).toEqual([t2.id, t1.id, t3.id]); + }); + + test('property onUpdate hook (updatedAt field)', async () => { + const repo = orm.em.getRepository(Author2); + const author = new Author2('name', 'email'); + await expect(author.createdAt).toBeDefined(); + await expect(author.updatedAt).toBeDefined(); + // allow 1 ms difference as updated time is recalculated when persisting + await expect(+author.updatedAt - +author.createdAt).toBeLessThanOrEqual(1); + await repo.persist(author); + + author.name = 'name1'; + await repo.persist(author); + await expect(author.createdAt).toBeDefined(); + await expect(author.updatedAt).toBeDefined(); + await expect(author.updatedAt).not.toEqual(author.createdAt); + await expect(author.updatedAt > author.createdAt).toBe(true); + + orm.em.clear(); + const ent = (await repo.findOne(author.id))!; + await expect(ent.createdAt).toBeDefined(); + await expect(ent.updatedAt).toBeDefined(); + await expect(ent.updatedAt).not.toEqual(ent.createdAt); + await expect(ent.updatedAt > ent.createdAt).toBe(true); + }); + + test('EM supports native insert/update/delete', async () => { + orm.config.getLogger().setDebugMode(false); + const res1 = await orm.em.nativeInsert(Author2, { name: 'native name 1' }); + expect(typeof res1).toBe('number'); + + const res2 = await orm.em.nativeUpdate(Author2, { name: 'native name 1' }, { name: 'new native name' }); + expect(res2).toBe(1); + + const res3 = await orm.em.nativeDelete(Author2, { name: 'new native name' }); + expect(res3).toBe(1); + + const res4 = await orm.em.nativeInsert(Author2, { createdAt: new Date('1989-11-17'), updatedAt: new Date('2018-10-28'), name: 'native name 2' }); + expect(typeof res4).toBe('number'); + + const res5 = await orm.em.nativeUpdate(Author2, { name: 'native name 2' }, { name: 'new native name', updatedAt: new Date('2018-10-28') }); + expect(res5).toBe(1); + + await expect(orm.em.aggregate(Author2, [])).rejects.toThrowError('Aggregations are not supported by PostgreSqlDriver driver'); + }); + + test('Utils.prepareEntity changes entity to number id', async () => { + const author1 = new Author2('Name 1', 'e-mail'); + const book = new Book2('test', author1); + const author2 = new Author2('Name 2', 'e-mail'); + author2.favouriteBook = book; + author2.version = 123; + await orm.em.persist([author1, author2, book]); + const diff = Utils.diffEntities(author1, author2); + expect(diff).toMatchObject({ name: 'Name 2', favouriteBook: book.id }); + expect(typeof diff.favouriteBook).toBe('number'); + }); + + test('self referencing (2 step)', async () => { + const author = new Author2('name', 'email'); + const b1 = new Book2('b1', author); + const b2 = new Book2('b2', author); + const b3 = new Book2('b3', author); + await orm.em.persist([b1, b2, b3]); + author.favouriteAuthor = author; + await orm.em.persist(author); + orm.em.clear(); + + const a1 = (await orm.em.findOne(Author2, { id: author.id }))!; + expect(a1).toBe(a1.favouriteAuthor); + expect(a1.id).not.toBeNull(); + expect(a1.toJSON()).toMatchObject({ favouriteAuthor: a1.id }); + }); + + test('self referencing (1 step)', async () => { + const mock = jest.fn(); + const logger = new Logger(mock, true); + Object.assign(orm.em.getConnection(), { logger }); + + const author = new Author2('name', 'email'); + author.favouriteAuthor = author; + const b1 = new Book2('b1', author); + const b2 = new Book2('b2', author); + const b3 = new Book2('b3', author); + await orm.em.persist([b1, b2, b3]); + orm.em.clear(); + + const a1 = (await orm.em.findOne(Author2, { id: author.id }))!; + expect(a1).toBe(a1.favouriteAuthor); + expect(a1.id).not.toBeNull(); + expect(a1.toJSON()).toMatchObject({ favouriteAuthor: a1.id }); + + // check fired queries + expect(mock.mock.calls.length).toBe(8); + expect(mock.mock.calls[0][0]).toMatch('START TRANSACTION'); + expect(mock.mock.calls[1][0]).toMatch('INSERT INTO "author2" ("name", "email", "created_at", "updated_at", "terms_accepted") VALUES ($1, $2, $3, $4, $5)'); + expect(mock.mock.calls[2][0]).toMatch('INSERT INTO "book2" ("title", "author_id") VALUES ($1, $2)'); + expect(mock.mock.calls[3][0]).toMatch('INSERT INTO "book2" ("title", "author_id") VALUES ($1, $2)'); + expect(mock.mock.calls[4][0]).toMatch('INSERT INTO "book2" ("title", "author_id") VALUES ($1, $2)'); + expect(mock.mock.calls[5][0]).toMatch('UPDATE "author2" SET "favourite_author_id" = $1, "updated_at" = $2 WHERE "id" = $3'); + expect(mock.mock.calls[6][0]).toMatch('COMMIT'); + expect(mock.mock.calls[7][0]).toMatch('SELECT "e0".* FROM "author2" AS "e0" WHERE "e0"."id" = $1'); + }); + + test('EM supports smart search conditions', async () => { + const author = new Author2('name', 'email'); + const b1 = new Book2('b1', author); + const b2 = new Book2('b2', author); + const b3 = new Book2('b3', author); + await orm.em.persist([b1, b2, b3]); + orm.em.clear(); + + const a1 = (await orm.em.findOne(Author2, { 'id:ne': 10 }))!; + expect(a1).not.toBeNull(); + expect(a1.id).toBe(author.id); + const a2 = (await orm.em.findOne(Author2, { 'id>=': 1 }))!; + expect(a2).not.toBeNull(); + expect(a2.id).toBe(author.id); + const a3 = (await orm.em.findOne(Author2, { 'id:nin': [2, 3, 4] }))!; + expect(a3).not.toBeNull(); + expect(a3.id).toBe(author.id); + }); + + afterAll(async () => orm.close(true)); + +}); diff --git a/tests/bootstrap.ts b/tests/bootstrap.ts index b5b542f98a89..1d6a5253a712 100644 --- a/tests/bootstrap.ts +++ b/tests/bootstrap.ts @@ -8,6 +8,8 @@ import { SqliteConnection } from '../lib/connections/SqliteConnection'; import { BaseEntity2 } from './entities-sql/BaseEntity2'; import { FooBar2 } from './entities-sql/FooBar2'; import { BaseEntity22 } from './entities-sql/BaseEntity22'; +import { PostgreSqlDriver } from '../lib/drivers/PostgreSqlDriver'; +import { PostgreSqlConnection } from '../lib/connections/PostgreSqlConnection'; const { BaseEntity4, Author3, Book3, BookTag3, Publisher3, Test3 } = require('./entities-js'); @@ -53,6 +55,22 @@ export async function initORMMySql() { return orm; } +export async function initORMPostgreSql() { + const orm = await MikroORM.init({ + entities: [Author2, Book2, BookTag2, Publisher2, Test2, FooBar2, BaseEntity2, BaseEntity22], + tsConfigPath: BASE_DIR + '/tsconfig.test.json', + dbName: `mikro_orm_test`, + baseDir: BASE_DIR, + driver: PostgreSqlDriver, + debug: true, + }); + + const connection = orm.em.getConnection(); + await connection.loadFile(__dirname + '/postgre-schema.sql'); + + return orm; +} + export async function initORMSqlite() { const orm = await MikroORM.init({ entities: [Author3, Book3, BookTag3, Publisher3, Test3, BaseEntity4], @@ -89,6 +107,17 @@ export async function wipeDatabaseMySql(em: EntityManager) { em.clear(); } +export async function wipeDatabasePostgreSql(em: EntityManager) { + await em.createQueryBuilder(Author2).truncate().execute(); + await em.createQueryBuilder(Book2).truncate().execute(); + await em.createQueryBuilder(BookTag2).truncate().execute(); + await em.createQueryBuilder(Publisher2).truncate().execute(); + await em.createQueryBuilder(Test2).truncate().execute(); + await em.createQueryBuilder('book2_to_book_tag2').truncate().execute(); + await em.createQueryBuilder('publisher2_to_test2').truncate().execute(); + em.clear(); +} + export async function wipeDatabaseSqlite(em: EntityManager) { await em.createQueryBuilder(Author3.entity).delete().execute('run'); await em.createQueryBuilder(Book3.entity).delete().execute('run'); diff --git a/tests/postgre-schema.sql b/tests/postgre-schema.sql new file mode 100644 index 000000000000..5a5f4e7616ac --- /dev/null +++ b/tests/postgre-schema.sql @@ -0,0 +1,101 @@ +DROP TABLE IF EXISTS author2; +DROP SEQUENCE IF EXISTS author2_seq; + +CREATE SEQUENCE author2_seq; + +CREATE TABLE author2 ( + id int check (id > 0) NOT NULL DEFAULT NEXTVAL ('author2_seq'), + created_at timestamp(3) DEFAULT NULL, + updated_at timestamp(3) DEFAULT NULL, + terms_accepted boolean DEFAULT NULL, + name varchar(255) DEFAULT NULL, + email varchar(255) DEFAULT NULL, + born timestamp(0) DEFAULT NULL, + favourite_book_id int DEFAULT NULL, + favourite_author_id int DEFAULT NULL, + PRIMARY KEY (id) +); + + + +DROP TABLE IF EXISTS book2_to_book_tag2; +DROP SEQUENCE IF EXISTS book2_to_book_tag2_seq; + +CREATE SEQUENCE book2_to_book_tag2_seq; + +CREATE TABLE book2_to_book_tag2 ( + id int check (id > 0) NOT NULL DEFAULT NEXTVAL ('book2_to_book_tag2_seq'), + book2_id int DEFAULT NULL, + book_tag2_id int DEFAULT NULL, + PRIMARY KEY (id) +); + + + +DROP TABLE IF EXISTS book_tag2; +DROP SEQUENCE IF EXISTS book_tag2_seq; + +CREATE SEQUENCE book_tag2_seq; + +CREATE TABLE book_tag2 ( + id int check (id > 0) NOT NULL DEFAULT NEXTVAL ('book_tag2_seq'), + name varchar(50) DEFAULT NULL, + PRIMARY KEY (id) +); + + + +DROP TABLE IF EXISTS book2; +DROP SEQUENCE IF EXISTS book2_seq; + +CREATE SEQUENCE book2_seq; + +CREATE TABLE book2 ( + id int check (id > 0) NOT NULL DEFAULT NEXTVAL ('book2_seq'), + title varchar(255) DEFAULT NULL, + foo varchar(255) DEFAULT NULL, + author_id int DEFAULT NULL, + publisher_id int DEFAULT NULL, + PRIMARY KEY (id) +); + + + +DROP TABLE IF EXISTS publisher2_to_test2; +DROP SEQUENCE IF EXISTS publisher2_to_test2_seq; + +CREATE SEQUENCE publisher2_to_test2_seq; + +CREATE TABLE publisher2_to_test2 ( + id int check (id > 0) NOT NULL DEFAULT NEXTVAL ('publisher2_to_test2_seq'), + publisher2_id int DEFAULT NULL, + test2_id int DEFAULT NULL, + PRIMARY KEY (id) +); + + + +DROP TABLE IF EXISTS publisher2; +DROP SEQUENCE IF EXISTS publisher2_seq; + +CREATE SEQUENCE publisher2_seq; + +CREATE TABLE publisher2 ( + id int check (id > 0) NOT NULL DEFAULT NEXTVAL ('publisher2_seq'), + name varchar(255) DEFAULT NULL, + type varchar(255) DEFAULT NULL, + PRIMARY KEY (id) +); + + + +DROP TABLE IF EXISTS test2; +DROP SEQUENCE IF EXISTS test2_seq; + +CREATE SEQUENCE test2_seq; + +CREATE TABLE test2 ( + id int check (id > 0) NOT NULL DEFAULT NEXTVAL ('test2_seq'), + name varchar(255) DEFAULT NULL, + PRIMARY KEY (id) +); From 71e02202479736a95c72480b752d4c21eecb487a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Ad=C3=A1mek?= Date: Mon, 18 Mar 2019 18:17:30 +0100 Subject: [PATCH 2/2] test: fix tests after merging upstream --- README.md | 12 +++--- docker-compose.yml | 11 ++++- docs/installation.md | 14 ++++--- lib/connections/MySqlConnection.ts | 2 +- lib/connections/PostgreSqlConnection.ts | 3 +- lib/drivers/AbstractSqlDriver.ts | 6 ++- lib/drivers/PostgreSqlDriver.ts | 10 +++-- tests/EntityManager.postgre.test.ts | 53 +++++++++++++------------ tests/mysql-schema.sql | 2 +- tests/postgre-schema.sql | 12 +++--- 10 files changed, 72 insertions(+), 53 deletions(-) diff --git a/README.md b/README.md index a8fc69543a7e..8e4090b42d25 100644 --- a/README.md +++ b/README.md @@ -37,18 +37,18 @@ First install the module via `yarn` or `npm` and do not forget to install the da ``` $ yarn add mikro-orm mongodb # for mongo -$ yarn add mikro-orm mysql2 # for mysql -$ yarn add mikro-orm pg # for postgresql -$ yarn add mikro-orm sqlite # for sqlite +$ yarn add mikro-orm mysql2 # for mysql +$ yarn add mikro-orm pg # for postgresql +$ yarn add mikro-orm sqlite # for sqlite ``` or ``` $ npm i -s mikro-orm mongodb # for mongo -$ npm i -s mikro-orm mysql2 # for mysql -$ npm i -s mikro-orm pg # for postgresql -$ npm i -s mikro-orm sqlite # for sqlite +$ npm i -s mikro-orm mysql2 # for mysql +$ npm i -s mikro-orm pg # for postgresql +$ npm i -s mikro-orm sqlite # for sqlite ``` Next you will need to enable support for [decorators](https://www.typescriptlang.org/docs/handbook/decorators.html) diff --git a/docker-compose.yml b/docker-compose.yml index c3a46e690a68..f663a9e2e3db 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -14,12 +14,21 @@ services: image: mysql:5.7 restart: unless-stopped ports: - - "127.0.0.1:3307:3306" + - 3307:3306 environment: MYSQL_ALLOW_EMPTY_PASSWORD: 1 volumes: - mysql:/var/lib/mysql + postgre: + container_name: postgre + image: postgres:11.2 + ports: + - 5432:5432 + volumes: + - postgre:/var/lib/postgresql/data + volumes: mongo: mysql: + postgre: diff --git a/docs/installation.md b/docs/installation.md index c9ad88f3a274..022d3ca33c88 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -7,18 +7,18 @@ First install the module via `yarn` or `npm` and do not forget to install the da ``` $ yarn add mikro-orm mongodb # for mongo -$ yarn add mikro-orm mysql2 # for mysql -$ yarn add mikro-orm pg # for postgresql -$ yarn add mikro-orm sqlite # for sqlite +$ yarn add mikro-orm mysql2 # for mysql +$ yarn add mikro-orm pg # for postgresql +$ yarn add mikro-orm sqlite # for sqlite ``` or ``` $ npm i -s mikro-orm mongodb # for mongo -$ npm i -s mikro-orm mysql2 # for mysql -$ npm i -s mikro-orm pg # for postgresql -$ npm i -s mikro-orm sqlite # for sqlite +$ npm i -s mikro-orm mysql2 # for mysql +$ npm i -s mikro-orm pg # for postgresql +$ npm i -s mikro-orm sqlite # for sqlite ``` Next you will need to enable support for [decorators](https://www.typescriptlang.org/docs/handbook/decorators.html) @@ -78,6 +78,8 @@ const orm = await MikroORM.init({ }); ``` +## Request context + Then you will need to fork entity manager for each request so their identity maps will not collide. To do so, use the `RequestContext` helper: diff --git a/lib/connections/MySqlConnection.ts b/lib/connections/MySqlConnection.ts index 2030c3be36be..c1d4db55206e 100644 --- a/lib/connections/MySqlConnection.ts +++ b/lib/connections/MySqlConnection.ts @@ -60,7 +60,7 @@ export class MySqlConnection extends Connection { } async loadFile(path: string): Promise { - await this.connection.query(readFileSync(path).toString()); + await this.client.query(readFileSync(path).toString()); } private async query(sql: string): Promise { diff --git a/lib/connections/PostgreSqlConnection.ts b/lib/connections/PostgreSqlConnection.ts index 97b65d3f285f..2b8e43c7a219 100644 --- a/lib/connections/PostgreSqlConnection.ts +++ b/lib/connections/PostgreSqlConnection.ts @@ -47,8 +47,7 @@ export class PostgreSqlConnection extends Connection { } async loadFile(path: string): Promise { - const file = readFileSync(path); - await this.execute(file.toString()); + await this.client.query(readFileSync(path).toString()); } private transformResult(res: any, method: 'all' | 'get' | 'run'): QueryResult | EntityData | EntityData[] { diff --git a/lib/drivers/AbstractSqlDriver.ts b/lib/drivers/AbstractSqlDriver.ts index f0e3380f0ff9..80fff5ffe801 100644 --- a/lib/drivers/AbstractSqlDriver.ts +++ b/lib/drivers/AbstractSqlDriver.ts @@ -43,7 +43,7 @@ export abstract class AbstractSqlDriver extends DatabaseDr async nativeInsert>(entityName: string, data: EntityData): Promise { const collections = this.extractManyToMany(entityName, data); - const pk = this.metadata[entityName] ? this.metadata[entityName].primaryKey : this.config.getNamingStrategy().referenceColumnName(); + const pk = this.getPrimaryKeyField(entityName); if (Object.keys(data).length === 0) { data[pk] = null; @@ -135,4 +135,8 @@ export abstract class AbstractSqlDriver extends DatabaseDr } } + protected getPrimaryKeyField(entityName: string): string { + return this.metadata[entityName] ? this.metadata[entityName].primaryKey : this.config.getNamingStrategy().referenceColumnName(); + } + } diff --git a/lib/drivers/PostgreSqlDriver.ts b/lib/drivers/PostgreSqlDriver.ts index 2b300609be64..178f47e3a467 100644 --- a/lib/drivers/PostgreSqlDriver.ts +++ b/lib/drivers/PostgreSqlDriver.ts @@ -16,13 +16,17 @@ export class PostgreSqlDriver extends AbstractSqlDriver { let sql = qb.getQuery(); if (qb.type === QueryType.INSERT && Object.keys(params).length === 0) { - sql = sql.replace('() VALUES ()', '("id") VALUES (DEFAULT)'); + const pk = this.getPrimaryKeyField(entityName); + const prop = this.metadata[entityName].properties[pk]; + sql = sql.replace('() VALUES ()', `("${prop.fieldName}") VALUES (DEFAULT)`); } const res = await this.connection.execute(sql, params, 'run'); - await this.processManyToMany(entityName, res.insertId, collections); + const pk = this.getPrimaryKeyField(entityName); + const id = res.insertId || data[pk]; + await this.processManyToMany(entityName, id, collections); - return res.insertId; + return id; } } diff --git a/tests/EntityManager.postgre.test.ts b/tests/EntityManager.postgre.test.ts index 741b525ec6f7..ba137e1424dc 100644 --- a/tests/EntityManager.postgre.test.ts +++ b/tests/EntityManager.postgre.test.ts @@ -1,3 +1,4 @@ +import { v4 } from 'uuid'; import { Collection, Configuration, EntityManager, MikroORM, Utils } from '../lib'; import { Author2, Book2, BookTag2, Publisher2, PublisherType, Test2 } from './entities-sql'; import { initORMPostgreSql, wipeDatabasePostgreSql } from './bootstrap'; @@ -15,11 +16,11 @@ describe('EntityManagerPostgre', () => { beforeEach(async () => wipeDatabasePostgreSql(orm.em)); test('isConnected()', async () => { - expect(await orm.isConnected()).toBe(true); + await expect(orm.isConnected()).resolves.toBe(true); await orm.close(true); - expect(await orm.isConnected()).toBe(false); + await expect(orm.isConnected()).resolves.toBe(false); await orm.connect(); - expect(await orm.isConnected()).toBe(true); + await expect(orm.isConnected()).resolves.toBe(true); }); test('getConnectionOptions()', async () => { @@ -42,14 +43,14 @@ describe('EntityManagerPostgre', () => { test('should return postgre driver', async () => { const driver = orm.em.getDriver(); - expect(driver instanceof PostgreSqlDriver).toBe(true); - expect(await driver.findOne(Book2.name, { foo: 'bar' })).toBeNull(); - expect(await driver.nativeInsert(Book2.name, { tags: [1] })).not.toBeNull(); + expect(driver).toBeInstanceOf(PostgreSqlDriver); + await expect(driver.findOne(Book2.name, { foo: 'bar' })).resolves.toBeNull(); + await expect(driver.nativeInsert(Book2.name, { uuid: v4(), tags: [1] })).resolves.not.toBeNull(); const res = await driver.getConnection().execute('SELECT 1 as count'); expect(res[0]).toEqual({ count: 1 }); - expect(driver.denormalizePrimaryKey(1)).toBe(1); - expect(driver.denormalizePrimaryKey('1')).toBe('1'); - expect(await driver.find(BookTag2.name, { books: { $in: [1] } })).not.toBeNull(); + expect(driver.getPlatform().denormalizePrimaryKey(1)).toBe(1); + expect(driver.getPlatform().denormalizePrimaryKey('1')).toBe('1'); + await expect(driver.find(BookTag2.name, { books: { $in: [1] } })).resolves.not.toBeNull(); }); test('driver appends errored query', async () => { @@ -67,7 +68,7 @@ describe('EntityManagerPostgre', () => { await repo.persist(author); await expect(repo.find(author)).rejects.toThrowError('Author2 entity provided in search condition. Please provide identifier instead.'); await expect(repo.find({ author })).rejects.toThrowError(`Author2 entity provided in search condition in field 'author'. Please provide identifier instead.`); - expect(await repo.findOne({ termsAccepted: false })).toBeNull(); + await expect(repo.findOne({ termsAccepted: false })).resolves.toBeNull(); }); test('transactions', async () => { @@ -150,7 +151,7 @@ describe('EntityManagerPostgre', () => { expect(mock.mock.calls[3][0]).toMatch('ROLLBACK TO SAVEPOINT PostgreSqlDriver_2'); expect(mock.mock.calls[4][0]).toMatch('INSERT INTO "author2" ("name", "email", "created_at", "updated_at", "terms_accepted") VALUES ($1, $2, $3, $4, $5) RETURNING "id"'); expect(mock.mock.calls[5][0]).toMatch('[query-logger] COMMIT'); - expect(await orm.em.findOne(Author2, { name: 'God Persisted!' })).not.toBeNull(); + await expect(orm.em.findOne(Author2, { name: 'God Persisted!' })).resolves.not.toBeNull(); }); test('should load entities', async () => { @@ -191,7 +192,7 @@ describe('EntityManagerPostgre', () => { const booksRepository = orm.em.getRepository(Book2); const books = await booksRepository.findAll(['author']); expect(books[0].author.isInitialized()).toBe(true); - expect(await authorRepository.findOne({ favouriteBook: bible.id })).not.toBe(null); + await expect(authorRepository.findOne({ favouriteBook: bible.uuid })).resolves.not.toBe(null); orm.em.clear(); const noBooks = await booksRepository.find({ title: 'not existing' }, ['author']); @@ -200,7 +201,7 @@ describe('EntityManagerPostgre', () => { const jon = (await authorRepository.findOne({ name: 'Jon Snow' }, ['books', 'favouriteBook']))!; const authors = await authorRepository.findAll(['books', 'favouriteBook']); - expect(await authorRepository.findOne({ email: 'not existing' })).toBeNull(); + await expect(authorRepository.findOne({ email: 'not existing' })).resolves.toBeNull(); // count test const count = await authorRepository.count(); @@ -229,7 +230,8 @@ describe('EntityManagerPostgre', () => { }); expect(jon.toJSON()).toEqual(o); expect(jon.books.getIdentifiers()).toBeInstanceOf(Array); - expect(typeof jon.books.getIdentifiers()[0]).toBe('number'); + expect(typeof jon.books.getIdentifiers()[0]).toBe('string'); + expect(jon.books.getIdentifiers()[0]).toBe(book1.uuid); for (const author of authors) { expect(author.books).toBeInstanceOf(Collection); @@ -266,7 +268,7 @@ describe('EntityManagerPostgre', () => { expect(lastBook[0].title).toBe('My Life on The Wall, part 1'); expect(lastBook[0].author).toBeInstanceOf(Author2); expect(lastBook[0].author.isInitialized()).toBe(true); - await orm.em.getRepository(Book2).remove(lastBook[0].id); + await orm.em.getRepository(Book2).remove(lastBook[0].uuid); }); test('findOne should initialize entity that is already in IM', async () => { @@ -335,7 +337,7 @@ describe('EntityManagerPostgre', () => { const json = publisher.toJSON().books; for (const book of publisher.books) { - expect(json.find((b: Book2) => b.id === book.id)).toMatchObject({ + expect(json.find((b: Book2) => b.uuid === book.uuid)).toMatchObject({ author: book.author.id, }); } @@ -434,7 +436,7 @@ describe('EntityManagerPostgre', () => { await tags[0].books.init(); expect(tags[0].books.count()).toBe(2); expect(tags[0].books.getItems()[0]).toBeInstanceOf(Book2); - expect(tags[0].books.getItems()[0].id).toBeDefined(); + expect(tags[0].books.getItems()[0].uuid).toBeDefined(); expect(tags[0].books.getItems()[0].isInitialized()).toBe(true); expect(tags[0].books.isInitialized()).toBe(true); const old = tags[0]; @@ -462,14 +464,14 @@ describe('EntityManagerPostgre', () => { book.tags.remove(tag1); await orm.em.persist(book); orm.em.clear(); - book = (await orm.em.findOne(Book2, book.id, ['tags']))!; + book = (await orm.em.findOne(Book2, book.uuid, ['tags']))!; expect(book.tags.count()).toBe(1); // add book.tags.add(tag1, new BookTag2('fresh')); await orm.em.persist(book); orm.em.clear(); - book = (await orm.em.findOne(Book2, book.id, ['tags']))!; + book = (await orm.em.findOne(Book2, book.uuid, ['tags']))!; expect(book.tags.count()).toBe(3); // contains @@ -483,7 +485,7 @@ describe('EntityManagerPostgre', () => { book.tags.removeAll(); await orm.em.persist(book); orm.em.clear(); - book = (await orm.em.findOne(Book2, book.id, ['tags']))!; + book = (await orm.em.findOne(Book2, book.uuid, ['tags']))!; expect(book.tags.count()).toBe(0); }); @@ -707,8 +709,9 @@ describe('EntityManagerPostgre', () => { author2.version = 123; await orm.em.persist([author1, author2, book]); const diff = Utils.diffEntities(author1, author2); - expect(diff).toMatchObject({ name: 'Name 2', favouriteBook: book.id }); - expect(typeof diff.favouriteBook).toBe('number'); + expect(diff).toMatchObject({ name: 'Name 2', favouriteBook: book.uuid }); + expect(typeof diff.favouriteBook).toBe('string'); + expect(diff.favouriteBook).toBe(book.uuid); }); test('self referencing (2 step)', async () => { @@ -749,9 +752,9 @@ describe('EntityManagerPostgre', () => { expect(mock.mock.calls.length).toBe(8); expect(mock.mock.calls[0][0]).toMatch('START TRANSACTION'); expect(mock.mock.calls[1][0]).toMatch('INSERT INTO "author2" ("name", "email", "created_at", "updated_at", "terms_accepted") VALUES ($1, $2, $3, $4, $5)'); - expect(mock.mock.calls[2][0]).toMatch('INSERT INTO "book2" ("title", "author_id") VALUES ($1, $2)'); - expect(mock.mock.calls[3][0]).toMatch('INSERT INTO "book2" ("title", "author_id") VALUES ($1, $2)'); - expect(mock.mock.calls[4][0]).toMatch('INSERT INTO "book2" ("title", "author_id") VALUES ($1, $2)'); + expect(mock.mock.calls[2][0]).toMatch('INSERT INTO "book2" ("title", "uuid_pk", "created_at", "author_id") VALUES ($1, $2, $3, $4)'); + expect(mock.mock.calls[2][0]).toMatch('INSERT INTO "book2" ("title", "uuid_pk", "created_at", "author_id") VALUES ($1, $2, $3, $4)'); + expect(mock.mock.calls[2][0]).toMatch('INSERT INTO "book2" ("title", "uuid_pk", "created_at", "author_id") VALUES ($1, $2, $3, $4)'); expect(mock.mock.calls[5][0]).toMatch('UPDATE "author2" SET "favourite_author_id" = $1, "updated_at" = $2 WHERE "id" = $3'); expect(mock.mock.calls[6][0]).toMatch('COMMIT'); expect(mock.mock.calls[7][0]).toMatch('SELECT "e0".* FROM "author2" AS "e0" WHERE "e0"."id" = $1'); diff --git a/tests/mysql-schema.sql b/tests/mysql-schema.sql index 00a38644b236..75faa1a25f38 100644 --- a/tests/mysql-schema.sql +++ b/tests/mysql-schema.sql @@ -74,7 +74,7 @@ DROP TABLE IF EXISTS `book2`; CREATE TABLE `book2` ( `uuid_pk` varchar(36) NOT NULL, - `created_at` datetime(3) DEFAULT NULL, + `created_at` datetime(3) DEFAULT CURRENT_TIMESTAMP(3), `title` varchar(255) DEFAULT NULL, `foo` varchar(255) DEFAULT NULL, `author_id` int(11) DEFAULT NULL, diff --git a/tests/postgre-schema.sql b/tests/postgre-schema.sql index 5a5f4e7616ac..6c66f9445bba 100644 --- a/tests/postgre-schema.sql +++ b/tests/postgre-schema.sql @@ -11,7 +11,7 @@ CREATE TABLE author2 ( name varchar(255) DEFAULT NULL, email varchar(255) DEFAULT NULL, born timestamp(0) DEFAULT NULL, - favourite_book_id int DEFAULT NULL, + favourite_book_id varchar(36) DEFAULT NULL, favourite_author_id int DEFAULT NULL, PRIMARY KEY (id) ); @@ -25,7 +25,7 @@ CREATE SEQUENCE book2_to_book_tag2_seq; CREATE TABLE book2_to_book_tag2 ( id int check (id > 0) NOT NULL DEFAULT NEXTVAL ('book2_to_book_tag2_seq'), - book2_id int DEFAULT NULL, + book2_uuid_pk varchar(36) DEFAULT NULL, book_tag2_id int DEFAULT NULL, PRIMARY KEY (id) ); @@ -46,17 +46,15 @@ CREATE TABLE book_tag2 ( DROP TABLE IF EXISTS book2; -DROP SEQUENCE IF EXISTS book2_seq; - -CREATE SEQUENCE book2_seq; CREATE TABLE book2 ( - id int check (id > 0) NOT NULL DEFAULT NEXTVAL ('book2_seq'), + uuid_pk varchar(36) NOT NULL, + created_at timestamp(3) DEFAULT NOW(), title varchar(255) DEFAULT NULL, foo varchar(255) DEFAULT NULL, author_id int DEFAULT NULL, publisher_id int DEFAULT NULL, - PRIMARY KEY (id) + PRIMARY KEY (uuid_pk) );