Skip to content

Commit

Permalink
test: fix tests after merging upstream
Browse files Browse the repository at this point in the history
  • Loading branch information
B4nan committed Mar 18, 2019
1 parent e89ede0 commit 71e0220
Show file tree
Hide file tree
Showing 10 changed files with 72 additions and 53 deletions.
12 changes: 6 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,18 +37,18 @@ First install the module via `yarn` or `npm` and do not forget to install the da

```
$ yarn add mikro-orm mongodb # for mongo
$ yarn add mikro-orm mysql2 # for mysql
$ yarn add mikro-orm pg # for postgresql
$ yarn add mikro-orm sqlite # for sqlite
$ yarn add mikro-orm mysql2 # for mysql
$ yarn add mikro-orm pg # for postgresql
$ yarn add mikro-orm sqlite # for sqlite
```

or

```
$ npm i -s mikro-orm mongodb # for mongo
$ npm i -s mikro-orm mysql2 # for mysql
$ npm i -s mikro-orm pg # for postgresql
$ npm i -s mikro-orm sqlite # for sqlite
$ npm i -s mikro-orm mysql2 # for mysql
$ npm i -s mikro-orm pg # for postgresql
$ npm i -s mikro-orm sqlite # for sqlite
```

Next you will need to enable support for [decorators](https://www.typescriptlang.org/docs/handbook/decorators.html)
Expand Down
11 changes: 10 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,21 @@ services:
image: mysql:5.7
restart: unless-stopped
ports:
- "127.0.0.1:3307:3306"
- 3307:3306
environment:
MYSQL_ALLOW_EMPTY_PASSWORD: 1
volumes:
- mysql:/var/lib/mysql

postgre:
container_name: postgre
image: postgres:11.2
ports:
- 5432:5432
volumes:
- postgre:/var/lib/postgresql/data

volumes:
mongo:
mysql:
postgre:
14 changes: 8 additions & 6 deletions docs/installation.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,18 @@ First install the module via `yarn` or `npm` and do not forget to install the da

```
$ yarn add mikro-orm mongodb # for mongo
$ yarn add mikro-orm mysql2 # for mysql
$ yarn add mikro-orm pg # for postgresql
$ yarn add mikro-orm sqlite # for sqlite
$ yarn add mikro-orm mysql2 # for mysql
$ yarn add mikro-orm pg # for postgresql
$ yarn add mikro-orm sqlite # for sqlite
```

or

```
$ npm i -s mikro-orm mongodb # for mongo
$ npm i -s mikro-orm mysql2 # for mysql
$ npm i -s mikro-orm pg # for postgresql
$ npm i -s mikro-orm sqlite # for sqlite
$ npm i -s mikro-orm mysql2 # for mysql
$ npm i -s mikro-orm pg # for postgresql
$ npm i -s mikro-orm sqlite # for sqlite
```

Next you will need to enable support for [decorators](https://www.typescriptlang.org/docs/handbook/decorators.html)
Expand Down Expand Up @@ -78,6 +78,8 @@ const orm = await MikroORM.init({
});
```

## Request context

Then you will need to fork entity manager for each request so their identity maps will not
collide. To do so, use the `RequestContext` helper:

Expand Down
2 changes: 1 addition & 1 deletion lib/connections/MySqlConnection.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ export class MySqlConnection extends Connection {
}

async loadFile(path: string): Promise<void> {
await this.connection.query(readFileSync(path).toString());
await this.client.query(readFileSync(path).toString());
}

private async query(sql: string): Promise<void> {
Expand Down
3 changes: 1 addition & 2 deletions lib/connections/PostgreSqlConnection.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,7 @@ export class PostgreSqlConnection extends Connection {
}

async loadFile(path: string): Promise<void> {
const file = readFileSync(path);
await this.execute(file.toString());
await this.client.query(readFileSync(path).toString());
}

private transformResult(res: any, method: 'all' | 'get' | 'run'): QueryResult | EntityData<IEntity> | EntityData<IEntity>[] {
Expand Down
6 changes: 5 additions & 1 deletion lib/drivers/AbstractSqlDriver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ export abstract class AbstractSqlDriver<C extends Connection> extends DatabaseDr

async nativeInsert<T extends IEntityType<T>>(entityName: string, data: EntityData<T>): Promise<number> {
const collections = this.extractManyToMany(entityName, data);
const pk = this.metadata[entityName] ? this.metadata[entityName].primaryKey : this.config.getNamingStrategy().referenceColumnName();
const pk = this.getPrimaryKeyField(entityName);

if (Object.keys(data).length === 0) {
data[pk] = null;
Expand Down Expand Up @@ -135,4 +135,8 @@ export abstract class AbstractSqlDriver<C extends Connection> extends DatabaseDr
}
}

protected getPrimaryKeyField(entityName: string): string {
return this.metadata[entityName] ? this.metadata[entityName].primaryKey : this.config.getNamingStrategy().referenceColumnName();
}

}
10 changes: 7 additions & 3 deletions lib/drivers/PostgreSqlDriver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,17 @@ export class PostgreSqlDriver extends AbstractSqlDriver<PostgreSqlConnection> {
let sql = qb.getQuery();

if (qb.type === QueryType.INSERT && Object.keys(params).length === 0) {
sql = sql.replace('() VALUES ()', '("id") VALUES (DEFAULT)');
const pk = this.getPrimaryKeyField(entityName);
const prop = this.metadata[entityName].properties[pk];
sql = sql.replace('() VALUES ()', `("${prop.fieldName}") VALUES (DEFAULT)`);
}

const res = await this.connection.execute(sql, params, 'run');
await this.processManyToMany(entityName, res.insertId, collections);
const pk = this.getPrimaryKeyField(entityName);
const id = res.insertId || data[pk];
await this.processManyToMany(entityName, id, collections);

return res.insertId;
return id;
}

}
53 changes: 28 additions & 25 deletions tests/EntityManager.postgre.test.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { v4 } from 'uuid';
import { Collection, Configuration, EntityManager, MikroORM, Utils } from '../lib';
import { Author2, Book2, BookTag2, Publisher2, PublisherType, Test2 } from './entities-sql';
import { initORMPostgreSql, wipeDatabasePostgreSql } from './bootstrap';
Expand All @@ -15,11 +16,11 @@ describe('EntityManagerPostgre', () => {
beforeEach(async () => wipeDatabasePostgreSql(orm.em));

test('isConnected()', async () => {
expect(await orm.isConnected()).toBe(true);
await expect(orm.isConnected()).resolves.toBe(true);
await orm.close(true);
expect(await orm.isConnected()).toBe(false);
await expect(orm.isConnected()).resolves.toBe(false);
await orm.connect();
expect(await orm.isConnected()).toBe(true);
await expect(orm.isConnected()).resolves.toBe(true);
});

test('getConnectionOptions()', async () => {
Expand All @@ -42,14 +43,14 @@ describe('EntityManagerPostgre', () => {

test('should return postgre driver', async () => {
const driver = orm.em.getDriver<PostgreSqlDriver>();
expect(driver instanceof PostgreSqlDriver).toBe(true);
expect(await driver.findOne(Book2.name, { foo: 'bar' })).toBeNull();
expect(await driver.nativeInsert(Book2.name, { tags: [1] })).not.toBeNull();
expect(driver).toBeInstanceOf(PostgreSqlDriver);
await expect(driver.findOne(Book2.name, { foo: 'bar' })).resolves.toBeNull();
await expect(driver.nativeInsert(Book2.name, { uuid: v4(), tags: [1] })).resolves.not.toBeNull();
const res = await driver.getConnection().execute('SELECT 1 as count');
expect(res[0]).toEqual({ count: 1 });
expect(driver.denormalizePrimaryKey(1)).toBe(1);
expect(driver.denormalizePrimaryKey('1')).toBe('1');
expect(await driver.find(BookTag2.name, { books: { $in: [1] } })).not.toBeNull();
expect(driver.getPlatform().denormalizePrimaryKey(1)).toBe(1);
expect(driver.getPlatform().denormalizePrimaryKey('1')).toBe('1');
await expect(driver.find(BookTag2.name, { books: { $in: [1] } })).resolves.not.toBeNull();
});

test('driver appends errored query', async () => {
Expand All @@ -67,7 +68,7 @@ describe('EntityManagerPostgre', () => {
await repo.persist(author);
await expect(repo.find(author)).rejects.toThrowError('Author2 entity provided in search condition. Please provide identifier instead.');
await expect(repo.find({ author })).rejects.toThrowError(`Author2 entity provided in search condition in field 'author'. Please provide identifier instead.`);
expect(await repo.findOne({ termsAccepted: false })).toBeNull();
await expect(repo.findOne({ termsAccepted: false })).resolves.toBeNull();
});

test('transactions', async () => {
Expand Down Expand Up @@ -150,7 +151,7 @@ describe('EntityManagerPostgre', () => {
expect(mock.mock.calls[3][0]).toMatch('ROLLBACK TO SAVEPOINT PostgreSqlDriver_2');
expect(mock.mock.calls[4][0]).toMatch('INSERT INTO "author2" ("name", "email", "created_at", "updated_at", "terms_accepted") VALUES ($1, $2, $3, $4, $5) RETURNING "id"');
expect(mock.mock.calls[5][0]).toMatch('[query-logger] COMMIT');
expect(await orm.em.findOne(Author2, { name: 'God Persisted!' })).not.toBeNull();
await expect(orm.em.findOne(Author2, { name: 'God Persisted!' })).resolves.not.toBeNull();
});

test('should load entities', async () => {
Expand Down Expand Up @@ -191,7 +192,7 @@ describe('EntityManagerPostgre', () => {
const booksRepository = orm.em.getRepository(Book2);
const books = await booksRepository.findAll(['author']);
expect(books[0].author.isInitialized()).toBe(true);
expect(await authorRepository.findOne({ favouriteBook: bible.id })).not.toBe(null);
await expect(authorRepository.findOne({ favouriteBook: bible.uuid })).resolves.not.toBe(null);
orm.em.clear();

const noBooks = await booksRepository.find({ title: 'not existing' }, ['author']);
Expand All @@ -200,7 +201,7 @@ describe('EntityManagerPostgre', () => {

const jon = (await authorRepository.findOne({ name: 'Jon Snow' }, ['books', 'favouriteBook']))!;
const authors = await authorRepository.findAll(['books', 'favouriteBook']);
expect(await authorRepository.findOne({ email: 'not existing' })).toBeNull();
await expect(authorRepository.findOne({ email: 'not existing' })).resolves.toBeNull();

// count test
const count = await authorRepository.count();
Expand Down Expand Up @@ -229,7 +230,8 @@ describe('EntityManagerPostgre', () => {
});
expect(jon.toJSON()).toEqual(o);
expect(jon.books.getIdentifiers()).toBeInstanceOf(Array);
expect(typeof jon.books.getIdentifiers()[0]).toBe('number');
expect(typeof jon.books.getIdentifiers()[0]).toBe('string');
expect(jon.books.getIdentifiers()[0]).toBe(book1.uuid);

for (const author of authors) {
expect(author.books).toBeInstanceOf(Collection);
Expand Down Expand Up @@ -266,7 +268,7 @@ describe('EntityManagerPostgre', () => {
expect(lastBook[0].title).toBe('My Life on The Wall, part 1');
expect(lastBook[0].author).toBeInstanceOf(Author2);
expect(lastBook[0].author.isInitialized()).toBe(true);
await orm.em.getRepository(Book2).remove(lastBook[0].id);
await orm.em.getRepository(Book2).remove(lastBook[0].uuid);
});

test('findOne should initialize entity that is already in IM', async () => {
Expand Down Expand Up @@ -335,7 +337,7 @@ describe('EntityManagerPostgre', () => {
const json = publisher.toJSON().books;

for (const book of publisher.books) {
expect(json.find((b: Book2) => b.id === book.id)).toMatchObject({
expect(json.find((b: Book2) => b.uuid === book.uuid)).toMatchObject({
author: book.author.id,
});
}
Expand Down Expand Up @@ -434,7 +436,7 @@ describe('EntityManagerPostgre', () => {
await tags[0].books.init();
expect(tags[0].books.count()).toBe(2);
expect(tags[0].books.getItems()[0]).toBeInstanceOf(Book2);
expect(tags[0].books.getItems()[0].id).toBeDefined();
expect(tags[0].books.getItems()[0].uuid).toBeDefined();
expect(tags[0].books.getItems()[0].isInitialized()).toBe(true);
expect(tags[0].books.isInitialized()).toBe(true);
const old = tags[0];
Expand Down Expand Up @@ -462,14 +464,14 @@ describe('EntityManagerPostgre', () => {
book.tags.remove(tag1);
await orm.em.persist(book);
orm.em.clear();
book = (await orm.em.findOne(Book2, book.id, ['tags']))!;
book = (await orm.em.findOne(Book2, book.uuid, ['tags']))!;
expect(book.tags.count()).toBe(1);

// add
book.tags.add(tag1, new BookTag2('fresh'));
await orm.em.persist(book);
orm.em.clear();
book = (await orm.em.findOne(Book2, book.id, ['tags']))!;
book = (await orm.em.findOne(Book2, book.uuid, ['tags']))!;
expect(book.tags.count()).toBe(3);

// contains
Expand All @@ -483,7 +485,7 @@ describe('EntityManagerPostgre', () => {
book.tags.removeAll();
await orm.em.persist(book);
orm.em.clear();
book = (await orm.em.findOne(Book2, book.id, ['tags']))!;
book = (await orm.em.findOne(Book2, book.uuid, ['tags']))!;
expect(book.tags.count()).toBe(0);
});

Expand Down Expand Up @@ -707,8 +709,9 @@ describe('EntityManagerPostgre', () => {
author2.version = 123;
await orm.em.persist([author1, author2, book]);
const diff = Utils.diffEntities(author1, author2);
expect(diff).toMatchObject({ name: 'Name 2', favouriteBook: book.id });
expect(typeof diff.favouriteBook).toBe('number');
expect(diff).toMatchObject({ name: 'Name 2', favouriteBook: book.uuid });
expect(typeof diff.favouriteBook).toBe('string');
expect(diff.favouriteBook).toBe(book.uuid);
});

test('self referencing (2 step)', async () => {
Expand Down Expand Up @@ -749,9 +752,9 @@ describe('EntityManagerPostgre', () => {
expect(mock.mock.calls.length).toBe(8);
expect(mock.mock.calls[0][0]).toMatch('START TRANSACTION');
expect(mock.mock.calls[1][0]).toMatch('INSERT INTO "author2" ("name", "email", "created_at", "updated_at", "terms_accepted") VALUES ($1, $2, $3, $4, $5)');
expect(mock.mock.calls[2][0]).toMatch('INSERT INTO "book2" ("title", "author_id") VALUES ($1, $2)');
expect(mock.mock.calls[3][0]).toMatch('INSERT INTO "book2" ("title", "author_id") VALUES ($1, $2)');
expect(mock.mock.calls[4][0]).toMatch('INSERT INTO "book2" ("title", "author_id") VALUES ($1, $2)');
expect(mock.mock.calls[2][0]).toMatch('INSERT INTO "book2" ("title", "uuid_pk", "created_at", "author_id") VALUES ($1, $2, $3, $4)');
expect(mock.mock.calls[2][0]).toMatch('INSERT INTO "book2" ("title", "uuid_pk", "created_at", "author_id") VALUES ($1, $2, $3, $4)');
expect(mock.mock.calls[2][0]).toMatch('INSERT INTO "book2" ("title", "uuid_pk", "created_at", "author_id") VALUES ($1, $2, $3, $4)');
expect(mock.mock.calls[5][0]).toMatch('UPDATE "author2" SET "favourite_author_id" = $1, "updated_at" = $2 WHERE "id" = $3');
expect(mock.mock.calls[6][0]).toMatch('COMMIT');
expect(mock.mock.calls[7][0]).toMatch('SELECT "e0".* FROM "author2" AS "e0" WHERE "e0"."id" = $1');
Expand Down
2 changes: 1 addition & 1 deletion tests/mysql-schema.sql
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ DROP TABLE IF EXISTS `book2`;

CREATE TABLE `book2` (
`uuid_pk` varchar(36) NOT NULL,
`created_at` datetime(3) DEFAULT NULL,
`created_at` datetime(3) DEFAULT CURRENT_TIMESTAMP(3),
`title` varchar(255) DEFAULT NULL,
`foo` varchar(255) DEFAULT NULL,
`author_id` int(11) DEFAULT NULL,
Expand Down
12 changes: 5 additions & 7 deletions tests/postgre-schema.sql
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ CREATE TABLE author2 (
name varchar(255) DEFAULT NULL,
email varchar(255) DEFAULT NULL,
born timestamp(0) DEFAULT NULL,
favourite_book_id int DEFAULT NULL,
favourite_book_id varchar(36) DEFAULT NULL,
favourite_author_id int DEFAULT NULL,
PRIMARY KEY (id)
);
Expand All @@ -25,7 +25,7 @@ CREATE SEQUENCE book2_to_book_tag2_seq;

CREATE TABLE book2_to_book_tag2 (
id int check (id > 0) NOT NULL DEFAULT NEXTVAL ('book2_to_book_tag2_seq'),
book2_id int DEFAULT NULL,
book2_uuid_pk varchar(36) DEFAULT NULL,
book_tag2_id int DEFAULT NULL,
PRIMARY KEY (id)
);
Expand All @@ -46,17 +46,15 @@ CREATE TABLE book_tag2 (


DROP TABLE IF EXISTS book2;
DROP SEQUENCE IF EXISTS book2_seq;

CREATE SEQUENCE book2_seq;

CREATE TABLE book2 (
id int check (id > 0) NOT NULL DEFAULT NEXTVAL ('book2_seq'),
uuid_pk varchar(36) NOT NULL,
created_at timestamp(3) DEFAULT NOW(),
title varchar(255) DEFAULT NULL,
foo varchar(255) DEFAULT NULL,
author_id int DEFAULT NULL,
publisher_id int DEFAULT NULL,
PRIMARY KEY (id)
PRIMARY KEY (uuid_pk)
);


Expand Down

0 comments on commit 71e0220

Please sign in to comment.