Skip to content

Commit

Permalink
fix(core): fix pivot tables for wild card schema entities
Browse files Browse the repository at this point in the history
Closes #2690
  • Loading branch information
B4nan committed Jan 31, 2022
1 parent 1f0122e commit 623dc91
Show file tree
Hide file tree
Showing 7 changed files with 147 additions and 24 deletions.
2 changes: 1 addition & 1 deletion packages/core/src/drivers/DatabaseDriver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ export abstract class DatabaseDriver<C extends Connection> implements IDatabaseD
}

async syncCollection<T, O>(coll: Collection<T, O>, options?: DriverMethodOptions): Promise<void> {
const pk = this.metadata.find(coll.property.type)!.primaryKeys[0];
const pk = coll.property.targetMeta!.primaryKeys[0];
const data = { [coll.property.name]: coll.getIdentifiers(pk) } as EntityData<T>;
await this.nativeUpdate<T>(coll.owner.constructor.name, coll.owner.__helper!.getPrimaryKey() as FilterQuery<T>, data, options);
}
Expand Down
2 changes: 1 addition & 1 deletion packages/core/src/drivers/IDatabaseDriver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ export interface IDatabaseDriver<C extends Connection = Connection> {

nativeDelete<T extends AnyEntity<T>>(entityName: string, where: FilterQuery<T>, options?: NativeDeleteOptions<T>): Promise<QueryResult<T>>;

syncCollection<T, O>(collection: Collection<T, O>, options?: { ctx?: Transaction }): Promise<void>;
syncCollection<T, O>(collection: Collection<T, O>, options?: DriverMethodOptions): Promise<void>;

count<T extends AnyEntity<T>, P extends string = never>(entityName: string, where: FilterQuery<T>, options?: CountOptions<T, P>): Promise<number>;

Expand Down
3 changes: 2 additions & 1 deletion packages/core/src/metadata/MetadataDiscovery.ts
Original file line number Diff line number Diff line change
Expand Up @@ -451,6 +451,7 @@ export class MetadataDiscovery {
[schemaName, tableName] = prop.pivotTable.split('.');
}

schemaName ??= meta.schema;
const targetType = prop.targetMeta!.root.className;
const data = new EntityMetadata({
name: prop.pivotTable,
Expand Down Expand Up @@ -528,7 +529,7 @@ export class MetadataDiscovery {
ret.joinColumns = [];
ret.inverseJoinColumns = [];
const schema = meta.schema ?? this.platform.getDefaultSchemaName();
ret.referencedTableName = schema ? schema + '.' + meta.tableName : meta.tableName;
ret.referencedTableName = schema && schema !== '*' ? schema + '.' + meta.tableName : meta.tableName;

if (owner) {
ret.owner = true;
Expand Down
14 changes: 9 additions & 5 deletions packages/core/src/unit-of-work/UnitOfWork.ts
Original file line number Diff line number Diff line change
Expand Up @@ -546,17 +546,21 @@ export class UnitOfWork {
}
}

private processToManyReference<T extends AnyEntity<T>>(reference: Collection<AnyEntity>, visited: Set<AnyEntity>, parent: T, prop: EntityProperty<T>): void {
if (this.isCollectionSelfReferenced(reference, visited)) {
this.extraUpdates.add([parent, prop.name, reference]);
private processToManyReference<T extends AnyEntity<T>>(collection: Collection<AnyEntity>, visited: Set<AnyEntity>, parent: T, prop: EntityProperty<T>): void {
if (this.isCollectionSelfReferenced(collection, visited)) {
this.extraUpdates.add([parent, prop.name, collection]);
parent[prop.name as keyof T] = new Collection<AnyEntity>(parent) as unknown as T[keyof T];

return;
}

reference.getItems(false)
collection.getItems(false)
.filter(item => !item.__helper!.__originalEntityData)
.forEach(item => this.findNewEntities(item, visited));
.forEach(item => {
// propagate schema from parent
item.__helper!.__schema ??= collection.owner.__helper!.__schema;
this.findNewEntities(item, visited);
});
}

private async runHooks<T extends AnyEntity<T>>(type: EventType, changeSet: ChangeSet<T>, sync = false): Promise<unknown> {
Expand Down
26 changes: 22 additions & 4 deletions packages/knex/src/AbstractSqlDriver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ export abstract class AbstractSqlDriver<C extends AbstractSqlConnection = Abstra
qb.select(fields)
.populate(populate)
.where(where)
.orderBy([ ...Utils.asArray(options.orderBy), ...joinedPropsOrderBy ])
.orderBy([...Utils.asArray(options.orderBy), ...joinedPropsOrderBy])
.groupBy(options.groupBy!)
.having(options.having!)
.withSchema(this.getSchemaName(meta, options));
Expand Down Expand Up @@ -233,7 +233,7 @@ export abstract class AbstractSqlDriver<C extends AbstractSqlConnection = Abstra
const qb = this.createQueryBuilder<T>(entityName, options.ctx, true, options.convertCustomTypes).withSchema(this.getSchemaName(meta, options));
res = await this.rethrow(qb.insert(data as unknown as RequiredEntityData<T>[]).execute('run', false));
} else {
let sql = `insert into ${(this.getTableName(meta, options))} `;
let sql = `insert into ${this.getTableName(meta, options)} `;
/* istanbul ignore next */
sql += fields.length > 0 ? '(' + fields.map(k => this.platform.quoteIdentifier(k)).join(', ') + ')' : 'default';
sql += ` values `;
Expand Down Expand Up @@ -430,13 +430,22 @@ export abstract class AbstractSqlDriver<C extends AbstractSqlConnection = Abstra
if (coll.property.reference === ReferenceType.ONE_TO_MANY) {
const cols = coll.property.referencedColumnNames;
const qb = this.createQueryBuilder(coll.property.type, ctx, true)
.withSchema(this.getSchemaName(meta, options))
.update({ [coll.property.mappedBy]: pks })
.getKnexQuery()
.whereIn(cols, insertDiff as string[][]);

return this.rethrow(this.execute<any>(qb));
}

const ownerSchema = wrapped.getSchema() === '*' ? this.config.get('schema') : wrapped.getSchema();
const pivotMeta = this.metadata.find(coll.property.pivotTable)!;

if (pivotMeta.schema === '*') {
options ??= {};
options.schema = ownerSchema;
}

return this.rethrow(this.updateCollectionDiff<T, O>(meta, coll.property, pks as any, deleteDiff as any, insertDiff as any, options));
}

Expand Down Expand Up @@ -653,13 +662,22 @@ export abstract class AbstractSqlDriver<C extends AbstractSqlConnection = Abstra
}
}

protected async updateCollectionDiff<T extends AnyEntity<T>, O extends AnyEntity<O>>(meta: EntityMetadata<O>, prop: EntityProperty<T>, pks: Primary<O>[], deleteDiff: Primary<T>[][] | boolean, insertDiff: Primary<T>[][], options?: DriverMethodOptions): Promise<void> {
protected async updateCollectionDiff<T extends AnyEntity<T>, O extends AnyEntity<O>>(
meta: EntityMetadata<O>,
prop: EntityProperty<T>,
pks: Primary<O>[],
deleteDiff: Primary<T>[][] | boolean,
insertDiff: Primary<T>[][],
options?: DriverMethodOptions & { ownerSchema?: string },
): Promise<void> {
if (!deleteDiff) {
deleteDiff = [];
}

if (deleteDiff === true || deleteDiff.length > 0) {
const qb1 = this.createQueryBuilder(prop.pivotTable, options?.ctx, true).withSchema(this.getSchemaName(meta, options)).unsetFlag(QueryFlag.CONVERT_CUSTOM_TYPES);
const qb1 = this.createQueryBuilder(prop.pivotTable, options?.ctx, true)
.withSchema(this.getSchemaName(meta, options))
.unsetFlag(QueryFlag.CONVERT_CUSTOM_TYPES);
const knex = qb1.getKnex();

if (Array.isArray(deleteDiff)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,34 @@ export class Book {
@ManyToOne({ entity: () => Book, onUpdateIntegrity: 'cascade', onDelete: 'set null', nullable: true })
basedOn?: Book;
}
",
"import { Entity, PrimaryKey, Property } from '@mikro-orm/core';
@Entity({ schema: 'n2' })
export class BookTag {
@PrimaryKey()
id!: number;
@Property({ length: 255, nullable: true })
name?: string;
}
",
"import { Entity, ManyToOne } from '@mikro-orm/core';
import { Book } from './Book';
import { BookTag } from './BookTag';
@Entity({ schema: 'n2' })
export class BookTags {
@ManyToOne({ entity: () => Book, onUpdateIntegrity: 'cascade', onDelete: 'cascade', primary: true })
book!: Book;
@ManyToOne({ entity: () => BookTag, onUpdateIntegrity: 'cascade', onDelete: 'cascade', primary: true })
bookTag!: BookTag;
}
",
]
Expand Down
96 changes: 84 additions & 12 deletions tests/features/multiple-schemas/multiple-schemas.postgres.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { BaseEntity, Cascade, Collection, Entity, LockMode, ManyToOne, MikroORM, OneToMany, OneToOne, PrimaryKey, Property, wrap } from '@mikro-orm/core';
import { BaseEntity, Cascade, Collection, Entity, LockMode, ManyToMany, ManyToOne, MikroORM, OneToMany, OneToOne, PrimaryKey, Property, wrap } from '@mikro-orm/core';
import type { PostgreSqlDriver } from '@mikro-orm/postgresql';
import { mockLogger } from '../../helpers';

Expand All @@ -20,6 +20,17 @@ export class Author {

}

@Entity({ schema: '*' })
export class BookTag extends BaseEntity<BookTag, 'id'> {

@PrimaryKey()
id!: number;

@Property({ nullable: true })
name?: string;

}

@Entity({ schema: '*' })
export class Book extends BaseEntity<Book, 'id'> {

Expand All @@ -35,6 +46,9 @@ export class Book extends BaseEntity<Book, 'id'> {
@ManyToOne(() => Book, { nullable: true })
basedOn?: Book;

@ManyToMany(() => BookTag, undefined, { cascade: [Cascade.ALL] })
tags = new Collection<BookTag>(this);

}

describe('multiple connected schemas in postgres', () => {
Expand All @@ -43,7 +57,7 @@ describe('multiple connected schemas in postgres', () => {

beforeAll(async () => {
orm = await MikroORM.init({
entities: [Author, Book],
entities: [Author, Book, BookTag],
dbName: `mikro_orm_test_multi_schemas`,
type: 'postgresql',
});
Expand Down Expand Up @@ -74,6 +88,10 @@ describe('multiple connected schemas in postgres', () => {
await orm.em.createQueryBuilder(Book).withSchema('n3').truncate().execute();
await orm.em.createQueryBuilder(Book).withSchema('n4').truncate().execute();
await orm.em.createQueryBuilder(Book).withSchema('n5').truncate().execute();
await orm.em.createQueryBuilder(BookTag).truncate().execute(); // current schema from config
await orm.em.createQueryBuilder(BookTag).withSchema('n3').truncate().execute();
await orm.em.createQueryBuilder(BookTag).withSchema('n4').truncate().execute();
await orm.em.createQueryBuilder(BookTag).withSchema('n5').truncate().execute();
orm.em.clear();
});

Expand All @@ -84,8 +102,11 @@ describe('multiple connected schemas in postgres', () => {
const author = new Author();
author.name = 'a1';
author.books.add(new Book(), new Book(), new Book());
author.books[0].tags.add(new BookTag(), new BookTag(), new BookTag());
author.books[1].basedOn = author.books[0];
author.books[1].tags.add(new BookTag(), new BookTag(), new BookTag());
author.books[2].basedOn = author.books[0];
author.books[2].tags.add(new BookTag(), new BookTag(), new BookTag());

// schema not specified yet, will be used from metadata
expect(wrap(author).getSchema()).toBeUndefined();
Expand All @@ -94,8 +115,11 @@ describe('multiple connected schemas in postgres', () => {
// schema is saved after flush
expect(wrap(author).getSchema()).toBe('n1');
expect(wrap(author.books[0]).getSchema()).toBe('n2');
expect(wrap(author.books[0].tags[0]).getSchema()).toBe('n2');
expect(wrap(author.books[1]).getSchema()).toBe('n2');
expect(wrap(author.books[1].tags[0]).getSchema()).toBe('n2');
expect(wrap(author.books[2]).getSchema()).toBe('n2');
expect(wrap(author.books[2].tags[0]).getSchema()).toBe('n2');
});

test('use different schema via options', async () => {
Expand All @@ -108,52 +132,91 @@ describe('multiple connected schemas in postgres', () => {
book51.setSchema('n5');
const book52 = orm.em.create(Book, {});
wrap(book52).setSchema('n5');
author.books.add(orm.em.create(Book, {}, { schema: 'n3' }), orm.em.create(Book, {}, { schema: 'n4' }), book51, book52);
author.books.add(
orm.em.create(Book, {}, { schema: 'n3' }),
orm.em.create(Book, {}, { schema: 'n4' }),
book51,
book52,
);
author.books[0].tags.add(new BookTag(), new BookTag(), new BookTag());
author.books[1].basedOn = author.books[0];
author.books[1].tags.add(new BookTag(), new BookTag(), new BookTag());
author.books[2].basedOn = author.books[0];
author.books[2].tags.add(new BookTag(), new BookTag(), new BookTag());
author.books[3].tags.add(new BookTag(), new BookTag(), new BookTag());

// schema not specified yet, will be used from metadata
expect(wrap(author).getSchema()).toBeUndefined();
const mock = mockLogger(orm);
await orm.em.persistAndFlush(author);
expect(orm.em.getUnitOfWork().getIdentityMap().keys()).toEqual([
'BookTag-n3:1',
'BookTag-n3:2',
'BookTag-n3:3',
'BookTag-n4:1',
'BookTag-n4:2',
'BookTag-n4:3',
'BookTag-n5:1',
'BookTag-n5:2',
'BookTag-n5:3',
'BookTag-n5:4',
'BookTag-n5:5',
'BookTag-n5:6',
'Author-n1:1',
'Book-n3:1',
'Book-n4:1',
'Book-n5:1',
'Book-n5:2',
]);
expect(mock.mock.calls[0][0]).toMatch(`begin`);
expect(mock.mock.calls[1][0]).toMatch(`insert into "n1"."author" ("name") values ('a1') returning "id"`);
expect(mock.mock.calls[2][0]).toMatch(`insert into "n3"."book" ("author_id") values (1) returning "id"`);
expect(mock.mock.calls[3][0]).toMatch(`insert into "n4"."book" ("author_id") values (1) returning "id"`);
expect(mock.mock.calls[4][0]).toMatch(`insert into "n5"."book" ("author_id") values (1), (1) returning "id"`);
expect(mock.mock.calls[5][0]).toMatch(`update "n4"."book" set "based_on_id" = 1 where "id" = 1`);
expect(mock.mock.calls[6][0]).toMatch(`update "n5"."book" set "based_on_id" = 1 where "id" = 1`);
expect(mock.mock.calls[7][0]).toMatch(`commit`);
expect(mock.mock.calls[1][0]).toMatch(`insert into "n3"."book_tag" ("id") values (default), (default), (default) returning "id"`);
expect(mock.mock.calls[2][0]).toMatch(`insert into "n4"."book_tag" ("id") values (default), (default), (default) returning "id"`);
expect(mock.mock.calls[3][0]).toMatch(`insert into "n5"."book_tag" ("id") values (default), (default), (default), (default), (default), (default) returning "id"`);
expect(mock.mock.calls[4][0]).toMatch(`insert into "n1"."author" ("name") values ('a1') returning "id"`);
expect(mock.mock.calls[5][0]).toMatch(`insert into "n3"."book" ("author_id") values (1) returning "id"`);
expect(mock.mock.calls[6][0]).toMatch(`insert into "n4"."book" ("author_id") values (1) returning "id"`);
expect(mock.mock.calls[7][0]).toMatch(`insert into "n5"."book" ("author_id") values (1), (1) returning "id"`);
expect(mock.mock.calls[8][0]).toMatch(`update "n4"."book" set "based_on_id" = 1 where "id" = 1`);
expect(mock.mock.calls[9][0]).toMatch(`update "n5"."book" set "based_on_id" = 1 where "id" = 1`);
expect(mock.mock.calls[10][0]).toMatch(`insert into "n3"."book_tags" ("book_id", "book_tag_id") values (1, 1), (1, 2), (1, 3) returning "book_id", "book_tag_id"`);
expect(mock.mock.calls[11][0]).toMatch(`insert into "n4"."book_tags" ("book_id", "book_tag_id") values (1, 1), (1, 2), (1, 3) returning "book_id", "book_tag_id"`);
expect(mock.mock.calls[12][0]).toMatch(`insert into "n5"."book_tags" ("book_id", "book_tag_id") values (1, 1), (1, 2), (1, 3) returning "book_id", "book_tag_id"`);
expect(mock.mock.calls[13][0]).toMatch(`insert into "n5"."book_tags" ("book_id", "book_tag_id") values (2, 4), (2, 5), (2, 6) returning "book_id", "book_tag_id"`);
expect(mock.mock.calls[14][0]).toMatch(`commit`);
mock.mockReset();

// schema is saved after flush as if the entity was loaded from db
expect(wrap(author).getSchema()).toBe('n1');
expect(wrap(author.books[0]).getSchema()).toBe('n3');
expect(wrap(author.books[0].tags[0]).getSchema()).toBe('n3');
expect(wrap(author.books[1]).getSchema()).toBe('n4');
expect(wrap(author.books[1].tags[0]).getSchema()).toBe('n4');
expect(author.books[2].getSchema()).toBe('n5');
expect(author.books[2].tags[0].getSchema()).toBe('n5');
expect(author.books[3].getSchema()).toBe('n5');
expect(author.books[3].tags[0].getSchema()).toBe('n5');

// update entities and flush
author.name = 'new name';
author.books[0].name = 'new name 1';
author.books[0].tags[0].name = 'new name 1';
author.books[1].name = 'new name 2';
author.books[1].tags[0].name = 'new name 2';
author.books[2].name = 'new name 3';
author.books[2].tags[0].name = 'new name 3';
author.books[3].name = 'new name 4';
author.books[3].tags[0].name = 'new name 4';
await orm.em.flush();

expect(mock.mock.calls[0][0]).toMatch(`begin`);
expect(mock.mock.calls[1][0]).toMatch(`update "n1"."author" set "name" = 'new name' where "id" = 1`);
expect(mock.mock.calls[2][0]).toMatch(`update "n3"."book" set "name" = 'new name 1' where "id" = 1`);
expect(mock.mock.calls[3][0]).toMatch(`update "n4"."book" set "name" = 'new name 2' where "id" = 1`);
expect(mock.mock.calls[4][0]).toMatch(`update "n5"."book" set "name" = case when ("id" = 1) then 'new name 3' when ("id" = 2) then 'new name 4' else "name" end where "id" in (1, 2)`);
expect(mock.mock.calls[5][0]).toMatch(`commit`);
expect(mock.mock.calls[5][0]).toMatch(`update "n3"."book_tag" set "name" = 'new name 1' where "id" = 1`);
expect(mock.mock.calls[6][0]).toMatch(`update "n4"."book_tag" set "name" = 'new name 2' where "id" = 1`);
expect(mock.mock.calls[7][0]).toMatch(`update "n5"."book_tag" set "name" = case when ("id" = 1) then 'new name 3' when ("id" = 4) then 'new name 4' else "name" end where "id" in (1, 4)`);
expect(mock.mock.calls[8][0]).toMatch(`commit`);
mock.mockReset();

// remove entity
Expand All @@ -165,18 +228,27 @@ describe('multiple connected schemas in postgres', () => {
expect(mock.mock.calls[2][0]).toMatch(`delete from "n4"."book" where "id" in (1)`);
expect(mock.mock.calls[3][0]).toMatch(`delete from "n5"."book" where "id" in (1, 2)`);
expect(mock.mock.calls[4][0]).toMatch(`delete from "n1"."author" where "id" in (1)`);
expect(mock.mock.calls[5][0]).toMatch(`commit`);
expect(mock.mock.calls[5][0]).toMatch(`delete from "n3"."book_tag" where "id" in (1, 2, 3)`);
expect(mock.mock.calls[6][0]).toMatch(`delete from "n4"."book_tag" where "id" in (1, 2, 3)`);
expect(mock.mock.calls[7][0]).toMatch(`delete from "n5"."book_tag" where "id" in (1, 2, 3, 4, 5, 6)`);
expect(mock.mock.calls[8][0]).toMatch(`commit`);

orm.em.clear();

const n1 = await orm.em.find(Author, {});
const n3 = await orm.em.find(Book, {}, { schema: 'n3' });
const n4 = await orm.em.find(Book, {}, { schema: 'n4' });
const n5 = await orm.em.find(Book, {}, { schema: 'n5' });
const n3tags = await orm.em.find(BookTag, {}, { schema: 'n3' });
const n4tags = await orm.em.find(BookTag, {}, { schema: 'n4' });
const n5tags = await orm.em.find(BookTag, {}, { schema: 'n5' });
expect(n1).toHaveLength(0);
expect(n3).toHaveLength(0);
expect(n4).toHaveLength(0);
expect(n5).toHaveLength(0);
expect(n3tags).toHaveLength(0);
expect(n4tags).toHaveLength(0);
expect(n5tags).toHaveLength(0);
});

test(`schema diffing won't remove other schemas or tables`, async () => {
Expand Down

0 comments on commit 623dc91

Please sign in to comment.