diff --git a/packages/core/src/EntityManager.ts b/packages/core/src/EntityManager.ts index 911f82b350d2..ae591a9c3c4d 100644 --- a/packages/core/src/EntityManager.ts +++ b/packages/core/src/EntityManager.ts @@ -1075,7 +1075,7 @@ export class EntityManager { Object.keys(data).forEach(k => { const prop = meta.properties[k]; - if (prop && prop.kind === ReferenceKind.SCALAR && SCALAR_TYPES.includes(prop.type) && (prop.setter || !prop.getter)) { + if (prop && prop.kind === ReferenceKind.SCALAR && SCALAR_TYPES.includes(prop.type.toLowerCase()) && (prop.setter || !prop.getter)) { data[k] = this.validator.validateProperty(prop, data[k], data); } }); diff --git a/packages/core/src/entity/ArrayCollection.ts b/packages/core/src/entity/ArrayCollection.ts index 5ab3e4765305..4d79863e2b68 100644 --- a/packages/core/src/entity/ArrayCollection.ts +++ b/packages/core/src/entity/ArrayCollection.ts @@ -202,7 +202,7 @@ export class ArrayCollection { /* istanbul ignore if */ if (!meta) { - throw MetadataError.fromUnknownEntity((this.owner as object).constructor.name, 'Collection.property getter, maybe you just forgot to initialize the ORM?'); + throw MetadataError.fromUnknownEntity(this.owner.constructor.name, 'Collection.property getter, maybe you just forgot to initialize the ORM?'); } const field = Utils.keys(meta.properties).find(k => this.owner[k] === this); @@ -279,7 +279,7 @@ export class ArrayCollection { const hidden = ['items', 'owner', '_property', '_count', 'snapshot', '_populated', '_lazyInitialized']; hidden.forEach(k => delete object[k]); const ret = inspect(object, { depth }); - const name = `${this.constructor.name}<${this.property.type}>`; + const name = `${this.constructor.name}<${this.property?.type ?? 'unknown'}>`; return ret === '[Object]' ? `[${name}]` : name + ' ' + ret; } diff --git a/packages/core/src/entity/EntityAssigner.ts b/packages/core/src/entity/EntityAssigner.ts index 246870568f17..9e3c325f6fbb 100644 --- a/packages/core/src/entity/EntityAssigner.ts +++ b/packages/core/src/entity/EntityAssigner.ts @@ -90,7 +90,7 @@ export class EntityAssigner { return EntityAssigner.assignReference(entity, value, props[prop], em, options); } - if (props[prop]?.kind === ReferenceKind.SCALAR && SCALAR_TYPES.includes(props[prop].type) && (props[prop].setter || !props[prop].getter)) { + if (props[prop]?.kind === ReferenceKind.SCALAR && SCALAR_TYPES.includes(props[prop].type.toLowerCase()) && (props[prop].setter || !props[prop].getter)) { return entity[prop as keyof T] = validator.validateProperty(props[prop], value, entity); } diff --git a/packages/core/src/entity/EntityValidator.ts b/packages/core/src/entity/EntityValidator.ts index cd9b66a43561..bd5befa4f6bb 100644 --- a/packages/core/src/entity/EntityValidator.ts +++ b/packages/core/src/entity/EntityValidator.ts @@ -18,9 +18,9 @@ export class EntityValidator { this.validateCollection(entity, prop); } - const SCALAR_TYPES = ['string', 'number', 'boolean', 'Date']; + const SCALAR_TYPES = ['string', 'number', 'boolean', 'date']; - if (prop.kind !== ReferenceKind.SCALAR || !SCALAR_TYPES.includes(prop.type)) { + if (prop.kind !== ReferenceKind.SCALAR || !SCALAR_TYPES.includes(prop.type.toLowerCase())) { return; } diff --git a/packages/core/src/entity/Reference.ts b/packages/core/src/entity/Reference.ts index 732b67f4e68f..e6c0f2f76084 100644 --- a/packages/core/src/entity/Reference.ts +++ b/packages/core/src/entity/Reference.ts @@ -204,22 +204,22 @@ export function ref = Primary>(entit */ export function ref = Primary>(entityOrType?: T | Ref | EntityClass, pk?: T | PKV): Ref | undefined | null { if (entityOrType == null) { - return pk as null; + return entityOrType as unknown as null; } - if (Utils.isEntity(pk)) { - return (pk as Dictionary).__helper.toReference(); + if (Utils.isEntity(entityOrType, true)) { + return helper(entityOrType).toReference() as Ref; } - if (Utils.isEntityClass(entityOrType)) { - if (pk == null) { - return pk as null; - } + if (Utils.isEntity(pk, true)) { + return helper(pk).toReference() as Ref; + } - return Reference.createFromPK(entityOrType as EntityClass, pk); + if (pk == null) { + return pk as null; } - return (entityOrType as Dictionary).__helper.toReference(); + return Reference.createFromPK(entityOrType as EntityClass, pk); } /** diff --git a/packages/core/src/enums.ts b/packages/core/src/enums.ts index bf79759f42a0..e9fe74858e4b 100644 --- a/packages/core/src/enums.ts +++ b/packages/core/src/enums.ts @@ -96,7 +96,7 @@ export enum QueryFlag { AUTO_JOIN_ONE_TO_ONE_OWNER = 'AUTO_JOIN_ONE_TO_ONE_OWNER', } -export const SCALAR_TYPES = ['string', 'number', 'boolean', 'Date', 'Buffer', 'RegExp']; +export const SCALAR_TYPES = ['string', 'number', 'boolean', 'date', 'buffer', 'regexp']; export enum ReferenceKind { SCALAR = 'scalar', diff --git a/packages/core/src/metadata/MetadataDiscovery.ts b/packages/core/src/metadata/MetadataDiscovery.ts index 5c4e374b3dbd..2e92eb712371 100644 --- a/packages/core/src/metadata/MetadataDiscovery.ts +++ b/packages/core/src/metadata/MetadataDiscovery.ts @@ -277,6 +277,9 @@ export class MetadataDiscovery { return; } + // infer default value from property initializer early, as the metatadata provide might use some defaults, e.g. string for reflect-metadata + Utils.values(meta.properties).forEach(prop => this.inferDefaultValue(meta, prop)); + // if the definition is using EntitySchema we still want it to go through the metadata provider to validate no types are missing await this.metadataProvider.loadEntityMetadata(meta, meta.className); @@ -464,7 +467,7 @@ export class MetadataDiscovery { for (const prop of Object.values(meta.properties)) { this.initNullability(prop); this.applyNamingStrategy(meta, prop); - this.initDefaultValue(meta, prop); + this.initDefaultValue(prop); this.initVersionProperty(meta, prop); this.initCustomType(meta, prop); await this.initColumnType(meta, prop, meta.path); @@ -929,20 +932,37 @@ export class MetadataDiscovery { return '1'; } - private initDefaultValue(meta: EntityMetadata, prop: EntityProperty): void { + private inferDefaultValue(meta: EntityMetadata, prop: EntityProperty): void { + if (!meta.class) { + return; + } + try { // try to create two entity instances to detect the value is stable const entity1 = new meta.class(); const entity2 = new meta.class(); // we compare the two values by reference, this will discard things like `new Date()` - if (entity1[prop.name] != null && entity1[prop.name] === entity2[prop.name]) { + if (this.config.get('discovery').inferDefaultValues && prop.default === undefined && entity1[prop.name] != null && entity1[prop.name] === entity2[prop.name]) { prop.default ??= entity1[prop.name]; } + + // if the default value is null, infer nullability + if (entity1[prop.name] === null) { + prop.nullable ??= true; + } + + // but still use object values for type inference if not explicitly set, e.g. `createdAt = new Date()` + if (prop.kind === ReferenceKind.SCALAR && prop.type == null && entity1[prop.name] != null) { + const type = Utils.getObjectType(entity1[prop.name]); + prop.type = type === 'object' ? 'string' : type; + } } catch { // ignore } + } + private initDefaultValue(prop: EntityProperty): void { if (prop.defaultRaw || !('default' in prop)) { return; } @@ -958,7 +978,7 @@ export class MetadataDiscovery { private initVersionProperty(meta: EntityMetadata, prop: EntityProperty): void { if (prop.version) { - this.initDefaultValue(meta, prop); + this.initDefaultValue(prop); meta.versionProperty = prop.name; prop.defaultRaw = this.getDefaultVersionValue(prop); } diff --git a/packages/core/src/platforms/Platform.ts b/packages/core/src/platforms/Platform.ts index fbcaad328eb6..52763aadfe84 100644 --- a/packages/core/src/platforms/Platform.ts +++ b/packages/core/src/platforms/Platform.ts @@ -260,6 +260,7 @@ export abstract class Platform { case 'numeric': return Type.getType(DecimalType); case 'boolean': return Type.getType(BooleanType); case 'blob': + case 'uint8array': case 'buffer': return Type.getType(BlobType); case 'uuid': return Type.getType(UuidType); case 'date': return Type.getType(DateType); diff --git a/packages/core/src/utils/Configuration.ts b/packages/core/src/utils/Configuration.ts index c52446c2986b..1e823c5f32ef 100644 --- a/packages/core/src/utils/Configuration.ts +++ b/packages/core/src/utils/Configuration.ts @@ -53,6 +53,7 @@ export class Configuration { requireEntitiesArray: false, alwaysAnalyseProperties: true, disableDynamicFileAccess: false, + inferDefaultValues: true, }, strict: false, validate: false, @@ -488,6 +489,7 @@ export interface MikroORMOptions ex requireEntitiesArray?: boolean; alwaysAnalyseProperties?: boolean; disableDynamicFileAccess?: boolean; + inferDefaultValues?: boolean; getMappedType?: (type: string, platform: Platform) => Type | undefined; }; driver?: { new(config: Configuration): D }; diff --git a/packages/core/src/utils/Utils.ts b/packages/core/src/utils/Utils.ts index e65980271236..c038116c304e 100644 --- a/packages/core/src/utils/Utils.ts +++ b/packages/core/src/utils/Utils.ts @@ -13,7 +13,6 @@ import { simple as walk } from 'acorn-walk'; import { clone } from './clone'; import type { Dictionary, - EntityClass, EntityData, EntityDictionary, EntityKey, @@ -594,21 +593,6 @@ export class Utils { return !!data.__entity; } - /** - * Checks whether given object is an entity instance. - */ - static isEntityClass(data: any, allowReference = false): data is EntityClass { - if (!('prototype' in data)) { - return false; - } - - if (allowReference && !!data.prototype.__reference) { - return true; - } - - return !!data.prototype.__entity; - } - /** * Checks whether the argument is ObjectId instance */ diff --git a/packages/knex/src/schema/SchemaHelper.ts b/packages/knex/src/schema/SchemaHelper.ts index 652f7f692af1..cd5028f88594 100644 --- a/packages/knex/src/schema/SchemaHelper.ts +++ b/packages/knex/src/schema/SchemaHelper.ts @@ -163,7 +163,7 @@ export abstract class SchemaHelper { if (changedProperties) { Utils.runIfNotEmpty(() => col.defaultTo(column.default == null ? null : knex.raw(column.default)), guard('default')); } else { - Utils.runIfNotEmpty(() => col.defaultTo(column.default == null ? null : knex.raw(column.default)), column.default !== undefined); + Utils.runIfNotEmpty(() => col.defaultTo(knex.raw(column.default!)), column.default != null && column.default !== 'null'); } return col; diff --git a/packages/mongodb/src/MongoPlatform.ts b/packages/mongodb/src/MongoPlatform.ts index 83df9ffc35e0..cfb14bc24e8d 100644 --- a/packages/mongodb/src/MongoPlatform.ts +++ b/packages/mongodb/src/MongoPlatform.ts @@ -14,6 +14,7 @@ export class MongoPlatform extends Platform { override setConfig(config: Configuration) { config.set('autoJoinOneToOneOwner', false); + config.get('discovery').inferDefaultValues = false; super.setConfig(config); } diff --git a/tests/entities-sql/Author2.ts b/tests/entities-sql/Author2.ts index 461c2eed6de3..470dd51839f8 100644 --- a/tests/entities-sql/Author2.ts +++ b/tests/entities-sql/Author2.ts @@ -19,10 +19,10 @@ export class Author2 extends BaseEntity2 { static afterDestroyCalled = 0; @Property({ length: 3, defaultRaw: 'current_timestamp(3)' }) - createdAt: Date = new Date(); + createdAt = new Date(); @Property({ onUpdate: () => new Date(), length: 3, defaultRaw: 'current_timestamp(3)' }) - updatedAt: Date = new Date(); + updatedAt = new Date(); @Property() name: string; @@ -35,8 +35,8 @@ export class Author2 extends BaseEntity2 { age?: number; @Index() - @Property({ default: false }) - termsAccepted: boolean = false; + @Property() + termsAccepted = false; @Property({ nullable: true }) optional?: boolean; @@ -51,10 +51,10 @@ export class Author2 extends BaseEntity2 { bornTime?: string; @OneToMany({ entity: () => Book2, mappedBy: 'author', orderBy: { title: QueryOrder.ASC } }) - books!: Collection; + books = new Collection(this); @OneToMany({ entity: () => Book2, mappedBy: 'author', strategy: LoadStrategy.JOINED, orderBy: { title: QueryOrder.ASC } }) - books2!: Collection; + books2 = new Collection(this); @OneToOne({ entity: () => Address2, mappedBy: address => address.author, cascade: [Cascade.ALL] }) address?: Address2; diff --git a/tests/entities-sql/BaseEntity2.ts b/tests/entities-sql/BaseEntity2.ts index b2adb8d6cb68..4fcc3aeeb73f 100644 --- a/tests/entities-sql/BaseEntity2.ts +++ b/tests/entities-sql/BaseEntity2.ts @@ -1,4 +1,4 @@ -import { BeforeCreate, Collection, PrimaryKey, Property, ReferenceKind, Utils, wrap } from '@mikro-orm/core'; +import { BeforeCreate, PrimaryKey, Property } from '@mikro-orm/core'; export abstract class BaseEntity2 { @@ -6,17 +6,7 @@ export abstract class BaseEntity2 { id!: number; @Property({ persist: false }) - hookTest: boolean = false; - - protected constructor() { - const props = wrap(this, true).__meta.properties; - - Utils.keys(props).forEach(prop => { - if ([ReferenceKind.ONE_TO_MANY, ReferenceKind.MANY_TO_MANY].includes(props[prop].kind)) { - (this as any)[prop] = new Collection(this); - } - }); - } + hookTest = false; @BeforeCreate() baseBeforeCreate() { diff --git a/tests/entities-sql/BaseEntity22.ts b/tests/entities-sql/BaseEntity22.ts index 8acf1a7d0bc0..3be881036714 100644 --- a/tests/entities-sql/BaseEntity22.ts +++ b/tests/entities-sql/BaseEntity22.ts @@ -1,18 +1,5 @@ -import type { AnyEntity } from '@mikro-orm/core'; -import { Collection, ReferenceKind, Utils, wrap } from '@mikro-orm/core'; - export abstract class BaseEntity22 { abstract id: number; - constructor() { - const props = wrap(this, true).__meta.properties; - - Utils.keys(props).forEach(prop => { - if ([ReferenceKind.ONE_TO_MANY, ReferenceKind.MANY_TO_MANY].includes(props[prop].kind)) { - (this as any)[prop] = new Collection(this as AnyEntity); - } - }); - } - } diff --git a/tests/entities-sql/Book2.ts b/tests/entities-sql/Book2.ts index becb0fb68287..01bfc2f5ec58 100644 --- a/tests/entities-sql/Book2.ts +++ b/tests/entities-sql/Book2.ts @@ -36,7 +36,7 @@ export class Book2 { uuid = v4(); @Property({ defaultRaw: 'current_timestamp(3)', length: 3 }) - createdAt: Date = new Date(); + createdAt = new Date(); @Index({ type: 'fulltext' }) @Property({ nullable: true, default: '' }) diff --git a/tests/entities-sql/BookTag2.ts b/tests/entities-sql/BookTag2.ts index 337fe076b151..c2f06eee841e 100644 --- a/tests/entities-sql/BookTag2.ts +++ b/tests/entities-sql/BookTag2.ts @@ -1,14 +1,4 @@ -import { - BigIntType, - Collection, - Entity, - ManyToMany, - PrimaryKey, - Property, - ReferenceKind, - Utils, - wrap, -} from '@mikro-orm/core'; +import { BigIntType, Collection, Entity, ManyToMany, PrimaryKey, Property } from '@mikro-orm/core'; import { Book2 } from './Book2'; @Entity() @@ -21,20 +11,12 @@ export class BookTag2 { name: string; @ManyToMany(() => Book2, book => book.tags) - books!: Collection; + books = new Collection(this); @ManyToMany(() => Book2, book => book.tagsUnordered) - booksUnordered!: Collection; + booksUnordered = new Collection(this); constructor(name: string) { - const props = wrap(this, true).__meta.properties; - - Utils.keys(props).forEach(prop => { - if ([ReferenceKind.ONE_TO_MANY, ReferenceKind.MANY_TO_MANY].includes(props[prop].kind)) { - (this as any)[prop] = new Collection(this); - } - }); - this.name = name; } diff --git a/tests/entities-sql/Publisher2.ts b/tests/entities-sql/Publisher2.ts index 0353d665c9be..05b5aa6f76c8 100644 --- a/tests/entities-sql/Publisher2.ts +++ b/tests/entities-sql/Publisher2.ts @@ -32,10 +32,10 @@ export class Publisher2 extends BaseEntity2 { name: string; @OneToMany(() => Book2, 'publisher', { joinColumn: 'book_uuid', inverseJoinColumn: 'publisher_id' }) - books!: Collection; + books = new Collection(this); @ManyToMany({ entity: () => Test2, pivotTable: 'publisher2_tests', fixedOrder: true }) - tests!: Collection; + tests = new Collection(this); @Enum(() => PublisherType) type = PublisherType.LOCAL; diff --git a/tests/entities/BaseEntity.ts b/tests/entities/BaseEntity.ts index 0f3a762a9e2a..9a541f697c2a 100644 --- a/tests/entities/BaseEntity.ts +++ b/tests/entities/BaseEntity.ts @@ -23,16 +23,16 @@ export abstract class BaseEntity new Date() }) - updatedAt: Date = new Date(); + updatedAt = new Date(); @Property() foo?: string; @Property({ persist: false }) - hookTest: boolean = false; + hookTest = false; @BeforeCreate() baseBeforeCreate() { diff --git a/tests/features/embeddables/polymorphic-embedded-entities.sqlite.test.ts b/tests/features/embeddables/polymorphic-embedded-entities.sqlite.test.ts index 9ef644de7521..e1ffcc172e73 100644 --- a/tests/features/embeddables/polymorphic-embedded-entities.sqlite.test.ts +++ b/tests/features/embeddables/polymorphic-embedded-entities.sqlite.test.ts @@ -23,7 +23,7 @@ abstract class Animal { @Embeddable({ discriminatorValue: AnimalType.CAT }) class Cat extends Animal { - @Property() + @Property({ default: null }) canMeow? = true; constructor(name: string) { @@ -37,7 +37,7 @@ class Cat extends Animal { @Embeddable({ discriminatorValue: AnimalType.DOG }) class Dog extends Animal { - @Property() + @Property({ default: null }) canBark? = true; constructor(name: string) { diff --git a/tests/features/entity-generator/__snapshots__/EntityGenerator.test.ts.snap b/tests/features/entity-generator/__snapshots__/EntityGenerator.test.ts.snap index 2378ac894fb8..202eaf9e95f2 100644 --- a/tests/features/entity-generator/__snapshots__/EntityGenerator.test.ts.snap +++ b/tests/features/entity-generator/__snapshots__/EntityGenerator.test.ts.snap @@ -547,8 +547,8 @@ import { Publisher2Tests } from './Publisher2Tests'; export class Publisher2 { id!: number; name!: string; - type!: Publisher2Type; - type2!: Publisher2Type2; + type: Publisher2Type = Publisher2Type.LOCAL; + type2: Publisher2Type2 = Publisher2Type2.LOCAL; enum1?: number; enum2?: number; enum3?: number; @@ -582,9 +582,9 @@ export const Publisher2Schema = new EntitySchema({ class: Publisher2, properties: { id: { primary: true, type: 'number' }, - name: { type: 'string', length: 255 }, - type: { enum: true, items: () => Publisher2Type }, - type2: { enum: true, items: () => Publisher2Type2 }, + name: { type: 'string', length: 255, default: 'asd' }, + type: { enum: true, items: () => Publisher2Type, default: 'local' }, + type2: { enum: true, items: () => Publisher2Type2, default: 'LOCAL' }, enum1: { type: 'number', columnType: 'tinyint', nullable: true }, enum2: { type: 'number', columnType: 'tinyint', nullable: true }, enum3: { type: 'number', columnType: 'tinyint', nullable: true }, @@ -1115,22 +1115,24 @@ export class FooParam2 { } ", - "import { Entity, Enum, PrimaryKey, Property } from '@mikro-orm/core'; + "import { Entity, Enum, OptionalProps, PrimaryKey, Property } from '@mikro-orm/core'; @Entity() export class Publisher2 { + [OptionalProps]?: 'name' | 'type' | 'type2'; + @PrimaryKey() id!: number; - @Property({ length: 255 }) + @Property({ length: 255, default: 'asd' }) name!: string; - @Enum({ items: () => Publisher2Type }) - type!: Publisher2Type; + @Enum({ items: () => Publisher2Type, default: 'local' }) + type: Publisher2Type = Publisher2Type.LOCAL; - @Enum({ items: () => Publisher2Type2 }) - type2!: Publisher2Type2; + @Enum({ items: () => Publisher2Type2, default: 'LOCAL' }) + type2: Publisher2Type2 = Publisher2Type2.LOCAL; @Property({ columnType: 'tinyint', nullable: true }) enum1?: number; @@ -2223,24 +2225,26 @@ export class FooParam2 { } ", - "import { Collection, Entity, Enum, OneToMany, PrimaryKey, Property } from '@mikro-orm/core'; + "import { Collection, Entity, Enum, OneToMany, OptionalProps, PrimaryKey, Property } from '@mikro-orm/core'; import { Book2 } from './Book2'; import { Publisher2Tests } from './Publisher2Tests'; @Entity() export class Publisher2 { + [OptionalProps]?: 'name' | 'type' | 'type2'; + @PrimaryKey() id!: number; - @Property({ length: 255 }) + @Property({ length: 255, default: 'asd' }) name!: string; - @Enum({ items: () => Publisher2Type }) - type!: Publisher2Type; + @Enum({ items: () => Publisher2Type, default: 'local' }) + type: Publisher2Type = Publisher2Type.LOCAL; - @Enum({ items: () => Publisher2Type2 }) - type2!: Publisher2Type2; + @Enum({ items: () => Publisher2Type2, default: 'LOCAL' }) + type2: Publisher2Type2 = Publisher2Type2.LOCAL; @Property({ columnType: 'tinyint', nullable: true }) enum1?: number; @@ -2799,24 +2803,26 @@ export class FooParam2 { } ", - "import { Collection, Entity, Enum, OneToMany, PrimaryKey, Property } from '@mikro-orm/core'; + "import { Collection, Entity, Enum, OneToMany, OptionalProps, PrimaryKey, Property } from '@mikro-orm/core'; import { Book2 } from './Book2'; import { Publisher2Tests } from './Publisher2Tests'; @Entity() export class Publisher2 { + [OptionalProps]?: 'name' | 'type' | 'type2'; + @PrimaryKey() id!: number; - @Property({ length: 255 }) + @Property({ length: 255, default: 'asd' }) name!: string; - @Enum({ items: () => Publisher2Type }) - type!: Publisher2Type; + @Enum({ items: () => Publisher2Type, default: 'local' }) + type: Publisher2Type = Publisher2Type.LOCAL; - @Enum({ items: () => Publisher2Type2 }) - type2!: Publisher2Type2; + @Enum({ items: () => Publisher2Type2, default: 'LOCAL' }) + type2: Publisher2Type2 = Publisher2Type2.LOCAL; @Property({ columnType: 'tinyint', nullable: true }) enum1?: number; @@ -3312,22 +3318,24 @@ export class FooParam2 { } ", - "import { Entity, Enum, PrimaryKey, Property } from '@mikro-orm/core'; + "import { Entity, Enum, OptionalProps, PrimaryKey, Property } from '@mikro-orm/core'; @Entity() export class Publisher2 { + [OptionalProps]?: 'name' | 'type' | 'type2'; + @PrimaryKey() id!: number; - @Property({ length: 255 }) + @Property({ length: 255, default: 'asd' }) name!: string; - @Enum({ items: () => Publisher2Type }) - type!: Publisher2Type; + @Enum({ items: () => Publisher2Type, default: 'local' }) + type: Publisher2Type = Publisher2Type.LOCAL; - @Enum({ items: () => Publisher2Type2 }) - type2!: Publisher2Type2; + @Enum({ items: () => Publisher2Type2, default: 'LOCAL' }) + type2: Publisher2Type2 = Publisher2Type2.LOCAL; @Property({ columnType: 'tinyint', nullable: true }) enum1?: number; diff --git a/tests/features/migrations/__snapshots__/Migrator.postgres.test.ts.snap b/tests/features/migrations/__snapshots__/Migrator.postgres.test.ts.snap index 06cd505fae77..6957e4b80a27 100644 --- a/tests/features/migrations/__snapshots__/Migrator.postgres.test.ts.snap +++ b/tests/features/migrations/__snapshots__/Migrator.postgres.test.ts.snap @@ -19,9 +19,9 @@ export class Migration20191013214813 extends Migration { this.addSql('create table "custom"."foo_param2" ("bar_id" int not null, "baz_id" int not null, "value" varchar(255) not null, "version" timestamptz(3) not null default current_timestamp(3), constraint "foo_param2_pkey" primary key ("bar_id", "baz_id"));'); - this.addSql('create table "custom"."publisher2" ("id" serial primary key, "name" varchar(255) not null, "type" text check ("type" in (\\'local\\', \\'global\\')) not null, "type2" text check ("type2" in (\\'LOCAL\\', \\'GLOBAL\\')) not null, "enum1" smallint null, "enum2" smallint null, "enum3" smallint null, "enum4" text check ("enum4" in (\\'a\\', \\'b\\', \\'c\\')) null, "enum5" text check ("enum5" in (\\'a\\')) null);'); + this.addSql('create table "custom"."publisher2" ("id" serial primary key, "name" varchar(255) not null default \\'asd\\', "type" text check ("type" in (\\'local\\', \\'global\\')) not null default \\'local\\', "type2" text check ("type2" in (\\'LOCAL\\', \\'GLOBAL\\')) not null default \\'LOCAL\\', "enum1" smallint null, "enum2" smallint null, "enum3" smallint null, "enum4" text check ("enum4" in (\\'a\\', \\'b\\', \\'c\\')) null, "enum5" text check ("enum5" in (\\'a\\')) null);'); - this.addSql('create table "custom"."author2" ("id" serial primary key, "created_at" timestamptz(3) not null default current_timestamp(3), "updated_at" timestamptz(3) not null default current_timestamp(3), "name" varchar(255) not null, "email" varchar(255) not null, "age" int null default null, "terms_accepted" boolean not null default false, "optional" boolean null, "identities" text[] null, "born" date null, "born_time" time(0) null, "favourite_book_uuid_pk" uuid null, "favourite_author_id" int null);'); + this.addSql('create table "custom"."author2" ("id" serial primary key, "created_at" timestamptz(3) not null default current_timestamp(3), "updated_at" timestamptz(3) not null default current_timestamp(3), "name" varchar(255) not null, "email" varchar(255) not null, "age" int null, "terms_accepted" boolean not null default false, "optional" boolean null, "identities" text[] null, "born" date null, "born_time" time(0) null, "favourite_book_uuid_pk" uuid null, "favourite_author_id" int null);'); this.addSql('create index "custom_email_index_name" on "custom"."author2" ("email");'); this.addSql('alter table "custom"."author2" add constraint "custom_email_unique_name" unique ("email");'); this.addSql('create index "author2_terms_accepted_index" on "custom"."author2" ("terms_accepted");'); @@ -110,9 +110,9 @@ export class Migration20191013214813 extends Migration { "", "create table "custom"."foo_param2" ("bar_id" int not null, "baz_id" int not null, "value" varchar(255) not null, "version" timestamptz(3) not null default current_timestamp(3), constraint "foo_param2_pkey" primary key ("bar_id", "baz_id"));", "", - "create table "custom"."publisher2" ("id" serial primary key, "name" varchar(255) not null, "type" text check ("type" in ('local', 'global')) not null, "type2" text check ("type2" in ('LOCAL', 'GLOBAL')) not null, "enum1" smallint null, "enum2" smallint null, "enum3" smallint null, "enum4" text check ("enum4" in ('a', 'b', 'c')) null, "enum5" text check ("enum5" in ('a')) null);", + "create table "custom"."publisher2" ("id" serial primary key, "name" varchar(255) not null default 'asd', "type" text check ("type" in ('local', 'global')) not null default 'local', "type2" text check ("type2" in ('LOCAL', 'GLOBAL')) not null default 'LOCAL', "enum1" smallint null, "enum2" smallint null, "enum3" smallint null, "enum4" text check ("enum4" in ('a', 'b', 'c')) null, "enum5" text check ("enum5" in ('a')) null);", "", - "create table "custom"."author2" ("id" serial primary key, "created_at" timestamptz(3) not null default current_timestamp(3), "updated_at" timestamptz(3) not null default current_timestamp(3), "name" varchar(255) not null, "email" varchar(255) not null, "age" int null default null, "terms_accepted" boolean not null default false, "optional" boolean null, "identities" text[] null, "born" date null, "born_time" time(0) null, "favourite_book_uuid_pk" uuid null, "favourite_author_id" int null);", + "create table "custom"."author2" ("id" serial primary key, "created_at" timestamptz(3) not null default current_timestamp(3), "updated_at" timestamptz(3) not null default current_timestamp(3), "name" varchar(255) not null, "email" varchar(255) not null, "age" int null, "terms_accepted" boolean not null default false, "optional" boolean null, "identities" text[] null, "born" date null, "born_time" time(0) null, "favourite_book_uuid_pk" uuid null, "favourite_author_id" int null);", "create index "custom_email_index_name" on "custom"."author2" ("email");", "alter table "custom"."author2" add constraint "custom_email_unique_name" unique ("email");", "create index "author2_terms_accepted_index" on "custom"."author2" ("terms_accepted");", @@ -207,9 +207,9 @@ export class Migration20191013214813 extends Migration { this.addSql('create table "custom"."foo_param2" ("bar_id" int not null, "baz_id" int not null, "value" varchar(255) not null, "version" timestamptz(3) not null default current_timestamp(3), constraint "foo_param2_pkey" primary key ("bar_id", "baz_id"));'); - this.addSql('create table "custom"."publisher2" ("id" serial primary key, "name" varchar(255) not null, "type" text check ("type" in (\\'local\\', \\'global\\')) not null, "type2" text check ("type2" in (\\'LOCAL\\', \\'GLOBAL\\')) not null, "enum1" smallint null, "enum2" smallint null, "enum3" smallint null, "enum4" text check ("enum4" in (\\'a\\', \\'b\\', \\'c\\')) null, "enum5" text check ("enum5" in (\\'a\\')) null);'); + this.addSql('create table "custom"."publisher2" ("id" serial primary key, "name" varchar(255) not null default \\'asd\\', "type" text check ("type" in (\\'local\\', \\'global\\')) not null default \\'local\\', "type2" text check ("type2" in (\\'LOCAL\\', \\'GLOBAL\\')) not null default \\'LOCAL\\', "enum1" smallint null, "enum2" smallint null, "enum3" smallint null, "enum4" text check ("enum4" in (\\'a\\', \\'b\\', \\'c\\')) null, "enum5" text check ("enum5" in (\\'a\\')) null);'); - this.addSql('create table "custom"."author2" ("id" serial primary key, "created_at" timestamptz(3) not null default current_timestamp(3), "updated_at" timestamptz(3) not null default current_timestamp(3), "name" varchar(255) not null, "email" varchar(255) not null, "age" int null default null, "terms_accepted" boolean not null default false, "optional" boolean null, "identities" text[] null, "born" date null, "born_time" time(0) null, "favourite_book_uuid_pk" uuid null, "favourite_author_id" int null);'); + this.addSql('create table "custom"."author2" ("id" serial primary key, "created_at" timestamptz(3) not null default current_timestamp(3), "updated_at" timestamptz(3) not null default current_timestamp(3), "name" varchar(255) not null, "email" varchar(255) not null, "age" int null, "terms_accepted" boolean not null default false, "optional" boolean null, "identities" text[] null, "born" date null, "born_time" time(0) null, "favourite_book_uuid_pk" uuid null, "favourite_author_id" int null);'); this.addSql('create index "custom_email_index_name" on "custom"."author2" ("email");'); this.addSql('alter table "custom"."author2" add constraint "custom_email_unique_name" unique ("email");'); this.addSql('create index "author2_terms_accepted_index" on "custom"."author2" ("terms_accepted");'); @@ -298,9 +298,9 @@ export class Migration20191013214813 extends Migration { "", "create table "custom"."foo_param2" ("bar_id" int not null, "baz_id" int not null, "value" varchar(255) not null, "version" timestamptz(3) not null default current_timestamp(3), constraint "foo_param2_pkey" primary key ("bar_id", "baz_id"));", "", - "create table "custom"."publisher2" ("id" serial primary key, "name" varchar(255) not null, "type" text check ("type" in ('local', 'global')) not null, "type2" text check ("type2" in ('LOCAL', 'GLOBAL')) not null, "enum1" smallint null, "enum2" smallint null, "enum3" smallint null, "enum4" text check ("enum4" in ('a', 'b', 'c')) null, "enum5" text check ("enum5" in ('a')) null);", + "create table "custom"."publisher2" ("id" serial primary key, "name" varchar(255) not null default 'asd', "type" text check ("type" in ('local', 'global')) not null default 'local', "type2" text check ("type2" in ('LOCAL', 'GLOBAL')) not null default 'LOCAL', "enum1" smallint null, "enum2" smallint null, "enum3" smallint null, "enum4" text check ("enum4" in ('a', 'b', 'c')) null, "enum5" text check ("enum5" in ('a')) null);", "", - "create table "custom"."author2" ("id" serial primary key, "created_at" timestamptz(3) not null default current_timestamp(3), "updated_at" timestamptz(3) not null default current_timestamp(3), "name" varchar(255) not null, "email" varchar(255) not null, "age" int null default null, "terms_accepted" boolean not null default false, "optional" boolean null, "identities" text[] null, "born" date null, "born_time" time(0) null, "favourite_book_uuid_pk" uuid null, "favourite_author_id" int null);", + "create table "custom"."author2" ("id" serial primary key, "created_at" timestamptz(3) not null default current_timestamp(3), "updated_at" timestamptz(3) not null default current_timestamp(3), "name" varchar(255) not null, "email" varchar(255) not null, "age" int null, "terms_accepted" boolean not null default false, "optional" boolean null, "identities" text[] null, "born" date null, "born_time" time(0) null, "favourite_book_uuid_pk" uuid null, "favourite_author_id" int null);", "create index "custom_email_index_name" on "custom"."author2" ("email");", "alter table "custom"."author2" add constraint "custom_email_unique_name" unique ("email");", "create index "author2_terms_accepted_index" on "custom"."author2" ("terms_accepted");", @@ -375,365 +375,3 @@ export class Migration20191013214813 extends Migration { "fileName": "Migration20191013214813.ts", } `; - -exports[`Migrator (postgres) generate js schema migration: migration-js-dump 1`] = ` -{ - "code": "'use strict'; -Object.defineProperty(exports, '__esModule', { value: true }); -const { Migration } = require('@mikro-orm/migrations'); - -class Migration20191013214813 extends Migration { - - async up() { - this.addSql('alter table "custom"."book2" alter column "double" type double precision using ("double"::double precision);'); - this.addSql('alter table "custom"."book2" drop column "foo";'); - - this.addSql('alter table "custom"."test2" drop column "path";'); - } - - async down() { - this.addSql('alter table "custom"."book2" add column "foo" varchar null default \\'lol\\';'); - this.addSql('alter table "custom"."book2" alter column "double" type numeric using ("double"::numeric);'); - - this.addSql('alter table "custom"."test2" add column "path" polygon null default null;'); - } - -} -exports.Migration20191013214813 = Migration20191013214813; -", - "diff": { - "down": [ - "alter table "custom"."book2" add column "foo" varchar null default 'lol';", - "alter table "custom"."book2" alter column "double" type numeric using ("double"::numeric);", - "", - "alter table "custom"."test2" add column "path" polygon null default null;", - ], - "up": [ - "alter table "custom"."book2" alter column "double" type double precision using ("double"::double precision);", - "alter table "custom"."book2" drop column "foo";", - "", - "alter table "custom"."test2" drop column "path";", - ], - }, - "fileName": "Migration20191013214813.js", -} -`; - -exports[`Migrator (postgres) generate migration with custom migrator: migration-ts-dump 1`] = ` -{ - "code": "// this file was generated via custom migration generator - -import { Migration } from '@mikro-orm/migrations'; - -export class Migration20191013214813 extends Migration { - - async up(): Promise { - this.addSql('alter table - "custom"."book2" - alter column - "double" type double precision using ("double" :: double precision);'); - this.addSql('alter table - "custom"."book2" drop column "foo";'); - - this.addSql('alter table - "custom"."test2" drop column "path";'); - } - - async down(): Promise { - this.addSql('alter table - "custom"."book2" - add - column "foo" varchar null default \\'lol\\';'); - this.addSql('alter table - "custom"."book2" - alter column - "double" type numeric using ("double" :: numeric);'); - - this.addSql('alter table - "custom"."test2" - add - column "path" polygon null default null;'); - } - -} -", - "diff": { - "down": [ - "alter table "custom"."book2" add column "foo" varchar null default 'lol';", - "alter table "custom"."book2" alter column "double" type numeric using ("double"::numeric);", - "", - "alter table "custom"."test2" add column "path" polygon null default null;", - ], - "up": [ - "alter table "custom"."book2" alter column "double" type double precision using ("double"::double precision);", - "alter table "custom"."book2" drop column "foo";", - "", - "alter table "custom"."test2" drop column "path";", - ], - }, - "fileName": "Migration20191013214813.ts", -} -`; - -exports[`Migrator (postgres) generate migration with custom name: migration-dump 1`] = ` -{ - "code": "import { Migration } from '@mikro-orm/migrations'; - -export class Migration20191013214813 extends Migration { - - async up(): Promise { - this.addSql('alter table "custom"."book2" alter column "double" type double precision using ("double"::double precision);'); - this.addSql('alter table "custom"."book2" drop column "foo";'); - - this.addSql('alter table "custom"."test2" drop column "path";'); - } - - async down(): Promise { - this.addSql('alter table "custom"."book2" add column "foo" varchar null default \\'lol\\';'); - this.addSql('alter table "custom"."book2" alter column "double" type numeric using ("double"::numeric);'); - - this.addSql('alter table "custom"."test2" add column "path" polygon null default null;'); - } - -} -", - "diff": { - "down": [ - "alter table "custom"."book2" add column "foo" varchar null default 'lol';", - "alter table "custom"."book2" alter column "double" type numeric using ("double"::numeric);", - "", - "alter table "custom"."test2" add column "path" polygon null default null;", - ], - "up": [ - "alter table "custom"."book2" alter column "double" type double precision using ("double"::double precision);", - "alter table "custom"."book2" drop column "foo";", - "", - "alter table "custom"."test2" drop column "path";", - ], - }, - "fileName": "migration-20191013214813.ts", -} -`; - -exports[`Migrator (postgres) generate migration with snapshot: migration-snapshot-dump-1 1`] = ` -{ - "code": "import { Migration } from '@mikro-orm/migrations'; - -export class Migration20191013214813 extends Migration { - - async up(): Promise { - this.addSql('alter table "custom"."book2" alter column "double" type double precision using ("double"::double precision);'); - this.addSql('alter table "custom"."book2" drop column "foo";'); - - this.addSql('alter table "custom"."test2" drop column "path";'); - } - - async down(): Promise { - this.addSql('alter table "custom"."book2" add column "foo" varchar null default \\'lol\\';'); - this.addSql('alter table "custom"."book2" alter column "double" type numeric using ("double"::numeric);'); - - this.addSql('alter table "custom"."test2" add column "path" polygon null default null;'); - } - -} -", - "diff": { - "down": [ - "alter table "custom"."book2" add column "foo" varchar null default 'lol';", - "alter table "custom"."book2" alter column "double" type numeric using ("double"::numeric);", - "", - "alter table "custom"."test2" add column "path" polygon null default null;", - ], - "up": [ - "alter table "custom"."book2" alter column "double" type double precision using ("double"::double precision);", - "alter table "custom"."book2" drop column "foo";", - "", - "alter table "custom"."test2" drop column "path";", - ], - }, - "fileName": "Migration20191013214813.ts", -} -`; - -exports[`Migrator (postgres) generate migration with snapshot: migration-snapshot-dump-2 1`] = ` -{ - "code": "", - "diff": { - "down": [], - "up": [], - }, - "fileName": "", -} -`; - -exports[`Migrator (postgres) generate schema migration: migration-dump 1`] = ` -{ - "code": "import { Migration } from '@mikro-orm/migrations'; - -export class Migration20191013214813 extends Migration { - - async up(): Promise { - this.addSql('alter table "custom"."book2" alter column "double" type double precision using ("double"::double precision);'); - this.addSql('alter table "custom"."book2" drop column "foo";'); - - this.addSql('alter table "custom"."test2" drop column "path";'); - } - - async down(): Promise { - this.addSql('alter table "custom"."book2" add column "foo" varchar null default \\'lol\\';'); - this.addSql('alter table "custom"."book2" alter column "double" type numeric using ("double"::numeric);'); - - this.addSql('alter table "custom"."test2" add column "path" polygon null default null;'); - } - -} -", - "diff": { - "down": [ - "alter table "custom"."book2" add column "foo" varchar null default 'lol';", - "alter table "custom"."book2" alter column "double" type numeric using ("double"::numeric);", - "", - "alter table "custom"."test2" add column "path" polygon null default null;", - ], - "up": [ - "alter table "custom"."book2" alter column "double" type double precision using ("double"::double precision);", - "alter table "custom"."book2" drop column "foo";", - "", - "alter table "custom"."test2" drop column "path";", - ], - }, - "fileName": "Migration20191013214813.ts", -} -`; - -exports[`Migrator (postgres) up/down params [all or nothing disabled]: all-or-nothing-disabled 1`] = ` -[ - "select 1 from pg_database where datname = 'mikro_orm_test_migrations'", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "select schema_name from information_schema.schemata where "schema_name" not like 'pg_%' and "schema_name" not like 'crdb_%' and "schema_name" not like '_timescaledb_%' and "schema_name" not in ('information_schema', 'tiger', 'topology') order by schema_name", - "create table "custom"."mikro_orm_migrations" ("id" serial primary key, "name" varchar(255), "executed_at" timestamptz default current_timestamp)", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "begin", - "select 1", - "commit", - "insert into "custom"."mikro_orm_migrations" ("name") values ($1)", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "begin", - "commit", - "delete from "custom"."mikro_orm_migrations" where "name" in ($1, $2)", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "begin", - "select 1", - "commit", - "insert into "custom"."mikro_orm_migrations" ("name") values ($1)", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "begin", - "commit", - "delete from "custom"."mikro_orm_migrations" where "name" in ($1, $2)", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "begin", - "select 1", - "commit", - "insert into "custom"."mikro_orm_migrations" ("name") values ($1)", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "begin", - "commit", - "delete from "custom"."mikro_orm_migrations" where "name" in ($1, $2)", -] -`; - -exports[`Migrator (postgres) up/down params [all or nothing enabled]: all-or-nothing 1`] = ` -[ - "select 1 from pg_database where datname = 'mikro_orm_test_migrations'", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "select schema_name from information_schema.schemata where "schema_name" not like 'pg_%' and "schema_name" not like 'crdb_%' and "schema_name" not like '_timescaledb_%' and "schema_name" not in ('information_schema', 'tiger', 'topology') order by schema_name", - "create table "custom"."mikro_orm_migrations" ("id" serial primary key, "name" varchar(255), "executed_at" timestamptz default current_timestamp)", - "begin", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "savepointtrx\\d+", - "select 1", - "release savepointtrx\\d+", - "insert into "custom"."mikro_orm_migrations" ("name") values ($1)", - "commit", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "begin", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "savepointtrx\\d+", - "release savepointtrx\\d+", - "delete from "custom"."mikro_orm_migrations" where "name" in ($1, $2)", - "commit", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "begin", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "savepointtrx\\d+", - "select 1", - "release savepointtrx\\d+", - "insert into "custom"."mikro_orm_migrations" ("name") values ($1)", - "commit", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "begin", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "savepointtrx\\d+", - "release savepointtrx\\d+", - "delete from "custom"."mikro_orm_migrations" where "name" in ($1, $2)", - "commit", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "begin", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "savepointtrx\\d+", - "select 1", - "release savepointtrx\\d+", - "insert into "custom"."mikro_orm_migrations" ("name") values ($1)", - "commit", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "begin", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "commit", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "begin", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "savepointtrx\\d+", - "release savepointtrx\\d+", - "delete from "custom"."mikro_orm_migrations" where "name" in ($1, $2)", - "commit", -] -`; - -exports[`Migrator (postgres) up/down with explicit transaction: explicit-tx 1`] = ` -[ - "begin", - "select 1 from pg_database where datname = 'mikro_orm_test_migrations'", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "select schema_name from information_schema.schemata where "schema_name" not like 'pg_%' and "schema_name" not like 'crdb_%' and "schema_name" not like '_timescaledb_%' and "schema_name" not in ('information_schema', 'tiger', 'topology') order by schema_name", - "create table "custom"."mikro_orm_migrations" ("id" serial primary key, "name" varchar(255), "executed_at" timestamptz default current_timestamp)", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "savepointtrx_xx", - "select 1", - "release savepointtrx_xx", - "insert into "custom"."mikro_orm_migrations" ("name") values ($1)", - "savepointtrx_xx", - "select 1", - "release savepointtrx_xx", - "insert into "custom"."mikro_orm_migrations" ("name") values ($1)", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "savepointtrx_xx", - "release savepointtrx_xx", - "delete from "custom"."mikro_orm_migrations" where "name" in ($1, $2)", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "savepointtrx_xx", - "release savepointtrx_xx", - "delete from "custom"."mikro_orm_migrations" where "name" in ($1, $2)", - "select table_name, table_schema as schema_name, (select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment from information_schema.tables where "table_schema" not like 'pg_%' and "table_schema" not like 'crdb_%' and "table_schema" not like '_timescaledb_%' and "table_schema" not in ('information_schema', 'tiger', 'topology') and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' order by table_name", - "select * from "custom"."mikro_orm_migrations" order by "id" asc", - "commit", -] -`; diff --git a/tests/features/migrations/__snapshots__/Migrator.test.ts.snap b/tests/features/migrations/__snapshots__/Migrator.test.ts.snap index ce0fce2d1df5..2160464e3d3d 100644 --- a/tests/features/migrations/__snapshots__/Migrator.test.ts.snap +++ b/tests/features/migrations/__snapshots__/Migrator.test.ts.snap @@ -61,7 +61,7 @@ export class Migration20191013214813 extends Migration { this.addSql('alter table \`foo_param2\` add index \`foo_param2_bar_id_index\`(\`bar_id\`);'); this.addSql('alter table \`foo_param2\` add index \`foo_param2_baz_id_index\`(\`baz_id\`);'); - this.addSql('create table \`publisher2\` (\`id\` int unsigned not null auto_increment primary key, \`name\` varchar(255) not null, \`type\` enum(\\'local\\', \\'global\\') not null, \`type2\` enum(\\'LOCAL\\', \\'GLOBAL\\') not null, \`enum1\` tinyint null, \`enum2\` tinyint null, \`enum3\` tinyint null, \`enum4\` enum(\\'a\\', \\'b\\', \\'c\\') null, \`enum5\` enum(\\'a\\') null) default character set utf8mb4 engine = InnoDB;'); + this.addSql('create table \`publisher2\` (\`id\` int unsigned not null auto_increment primary key, \`name\` varchar(255) not null default \\'asd\\', \`type\` enum(\\'local\\', \\'global\\') not null default \\'local\\', \`type2\` enum(\\'LOCAL\\', \\'GLOBAL\\') not null default \\'LOCAL\\', \`enum1\` tinyint null, \`enum2\` tinyint null, \`enum3\` tinyint null, \`enum4\` enum(\\'a\\', \\'b\\', \\'c\\') null, \`enum5\` enum(\\'a\\') null) default character set utf8mb4 engine = InnoDB;'); this.addSql('create table \`author2\` (\`id\` int unsigned not null auto_increment primary key, \`created_at\` datetime(3) not null default current_timestamp(3), \`updated_at\` datetime(3) not null default current_timestamp(3), \`name\` varchar(255) not null, \`email\` varchar(255) not null, \`age\` int null default null, \`terms_accepted\` tinyint(1) not null default false, \`optional\` tinyint(1) null, \`identities\` text null, \`born\` date null, \`born_time\` time null, \`favourite_book_uuid_pk\` varchar(36) null, \`favourite_author_id\` int unsigned null) default character set utf8mb4 engine = InnoDB;'); this.addSql('alter table \`author2\` add index \`custom_email_index_name\`(\`email\`);'); @@ -208,7 +208,7 @@ export class Migration20191013214813 extends Migration { "alter table \`foo_param2\` add index \`foo_param2_bar_id_index\`(\`bar_id\`);", "alter table \`foo_param2\` add index \`foo_param2_baz_id_index\`(\`baz_id\`);", "", - "create table \`publisher2\` (\`id\` int unsigned not null auto_increment primary key, \`name\` varchar(255) not null, \`type\` enum('local', 'global') not null, \`type2\` enum('LOCAL', 'GLOBAL') not null, \`enum1\` tinyint null, \`enum2\` tinyint null, \`enum3\` tinyint null, \`enum4\` enum('a', 'b', 'c') null, \`enum5\` enum('a') null) default character set utf8mb4 engine = InnoDB;", + "create table \`publisher2\` (\`id\` int unsigned not null auto_increment primary key, \`name\` varchar(255) not null default 'asd', \`type\` enum('local', 'global') not null default 'local', \`type2\` enum('LOCAL', 'GLOBAL') not null default 'LOCAL', \`enum1\` tinyint null, \`enum2\` tinyint null, \`enum3\` tinyint null, \`enum4\` enum('a', 'b', 'c') null, \`enum5\` enum('a') null) default character set utf8mb4 engine = InnoDB;", "", "create table \`author2\` (\`id\` int unsigned not null auto_increment primary key, \`created_at\` datetime(3) not null default current_timestamp(3), \`updated_at\` datetime(3) not null default current_timestamp(3), \`name\` varchar(255) not null, \`email\` varchar(255) not null, \`age\` int null default null, \`terms_accepted\` tinyint(1) not null default false, \`optional\` tinyint(1) null, \`identities\` text null, \`born\` date null, \`born_time\` time null, \`favourite_book_uuid_pk\` varchar(36) null, \`favourite_author_id\` int unsigned null) default character set utf8mb4 engine = InnoDB;", "alter table \`author2\` add index \`custom_email_index_name\`(\`email\`);", @@ -559,7 +559,7 @@ export class Migration20191013214813 extends Migration { this.addSql('alter table \`foo_param2\` add index \`foo_param2_bar_id_index\`(\`bar_id\`);'); this.addSql('alter table \`foo_param2\` add index \`foo_param2_baz_id_index\`(\`baz_id\`);'); - this.addSql('create table \`publisher2\` (\`id\` int unsigned not null auto_increment primary key, \`name\` varchar(255) not null, \`type\` enum(\\'local\\', \\'global\\') not null, \`type2\` enum(\\'LOCAL\\', \\'GLOBAL\\') not null, \`enum1\` tinyint null, \`enum2\` tinyint null, \`enum3\` tinyint null, \`enum4\` enum(\\'a\\', \\'b\\', \\'c\\') null, \`enum5\` enum(\\'a\\') null) default character set utf8mb4 engine = InnoDB;'); + this.addSql('create table \`publisher2\` (\`id\` int unsigned not null auto_increment primary key, \`name\` varchar(255) not null default \\'asd\\', \`type\` enum(\\'local\\', \\'global\\') not null default \\'local\\', \`type2\` enum(\\'LOCAL\\', \\'GLOBAL\\') not null default \\'LOCAL\\', \`enum1\` tinyint null, \`enum2\` tinyint null, \`enum3\` tinyint null, \`enum4\` enum(\\'a\\', \\'b\\', \\'c\\') null, \`enum5\` enum(\\'a\\') null) default character set utf8mb4 engine = InnoDB;'); this.addSql('create table \`author2\` (\`id\` int unsigned not null auto_increment primary key, \`created_at\` datetime(3) not null default current_timestamp(3), \`updated_at\` datetime(3) not null default current_timestamp(3), \`name\` varchar(255) not null, \`email\` varchar(255) not null, \`age\` int null default null, \`terms_accepted\` tinyint(1) not null default false, \`optional\` tinyint(1) null, \`identities\` text null, \`born\` date null, \`born_time\` time null, \`favourite_book_uuid_pk\` varchar(36) null, \`favourite_author_id\` int unsigned null) default character set utf8mb4 engine = InnoDB;'); this.addSql('alter table \`author2\` add index \`custom_email_index_name\`(\`email\`);'); @@ -706,7 +706,7 @@ export class Migration20191013214813 extends Migration { "alter table \`foo_param2\` add index \`foo_param2_bar_id_index\`(\`bar_id\`);", "alter table \`foo_param2\` add index \`foo_param2_baz_id_index\`(\`baz_id\`);", "", - "create table \`publisher2\` (\`id\` int unsigned not null auto_increment primary key, \`name\` varchar(255) not null, \`type\` enum('local', 'global') not null, \`type2\` enum('LOCAL', 'GLOBAL') not null, \`enum1\` tinyint null, \`enum2\` tinyint null, \`enum3\` tinyint null, \`enum4\` enum('a', 'b', 'c') null, \`enum5\` enum('a') null) default character set utf8mb4 engine = InnoDB;", + "create table \`publisher2\` (\`id\` int unsigned not null auto_increment primary key, \`name\` varchar(255) not null default 'asd', \`type\` enum('local', 'global') not null default 'local', \`type2\` enum('LOCAL', 'GLOBAL') not null default 'LOCAL', \`enum1\` tinyint null, \`enum2\` tinyint null, \`enum3\` tinyint null, \`enum4\` enum('a', 'b', 'c') null, \`enum5\` enum('a') null) default character set utf8mb4 engine = InnoDB;", "", "create table \`author2\` (\`id\` int unsigned not null auto_increment primary key, \`created_at\` datetime(3) not null default current_timestamp(3), \`updated_at\` datetime(3) not null default current_timestamp(3), \`name\` varchar(255) not null, \`email\` varchar(255) not null, \`age\` int null default null, \`terms_accepted\` tinyint(1) not null default false, \`optional\` tinyint(1) null, \`identities\` text null, \`born\` date null, \`born_time\` time null, \`favourite_book_uuid_pk\` varchar(36) null, \`favourite_author_id\` int unsigned null) default character set utf8mb4 engine = InnoDB;", "alter table \`author2\` add index \`custom_email_index_name\`(\`email\`);", diff --git a/tests/features/schema-generator/__snapshots__/SchemaGenerator.mysql.test.ts.snap b/tests/features/schema-generator/__snapshots__/SchemaGenerator.mysql.test.ts.snap index 717d0a7c5b2b..f70757b38138 100644 --- a/tests/features/schema-generator/__snapshots__/SchemaGenerator.mysql.test.ts.snap +++ b/tests/features/schema-generator/__snapshots__/SchemaGenerator.mysql.test.ts.snap @@ -188,8 +188,6 @@ set foreign_key_checks = 0; alter table \`test2\` drop foreign key \`test2_foo___bar_foreign\`; -alter table \`publisher2\` modify \`name\` varchar(255) not null default 'asd', modify \`type\` enum('local', 'global') not null default 'local', modify \`type2\` enum('LOCAL', 'GLOBAL') not null default 'LOCAL'; - alter table \`book2\` drop \`foo\`; alter table \`test2\` drop index \`test2_foo___bar_unique\`; @@ -388,8 +386,6 @@ set foreign_key_checks = 0; alter table \`test2\` drop foreign key \`test2_foo___bar_foreign\`; -alter table \`publisher2\` modify \`name\` varchar(255) not null default 'asd', modify \`type\` enum('local', 'global') not null default 'local', modify \`type2\` enum('LOCAL', 'GLOBAL') not null default 'LOCAL'; - alter table \`book2\` drop \`foo\`; alter table \`test2\` drop index \`test2_foo___bar_unique\`; @@ -405,8 +401,6 @@ exports[`SchemaGenerator rename column [mysql]: mysql-update-schema-rename-colum alter table \`test2\` drop foreign key \`test2_foo___bar_foreign\`; -alter table \`publisher2\` modify \`name\` varchar(255) not null default 'asd', modify \`type\` enum('local', 'global') not null default 'local', modify \`type2\` enum('LOCAL', 'GLOBAL') not null default 'LOCAL'; - alter table \`author2\` drop index \`author2_favourite_author_id_index\`; alter table \`author2\` drop index \`author2_name_age_index\`; alter table \`author2\` change \`age\` \`age_in_years\` int null default null; @@ -462,8 +456,6 @@ exports[`SchemaGenerator update schema [mysql]: mysql-update-schema-create-table alter table \`test2\` drop foreign key \`test2_foo___bar_foreign\`; -alter table \`publisher2\` modify \`name\` varchar(255) not null default 'asd', modify \`type\` enum('local', 'global') not null default 'local', modify \`type2\` enum('LOCAL', 'GLOBAL') not null default 'LOCAL'; - alter table \`book2\` drop \`foo\`; alter table \`test2\` drop index \`test2_foo___bar_unique\`; @@ -552,8 +544,6 @@ exports[`SchemaGenerator update schema enums [mysql]: mysql-update-schema-enums- alter table \`test2\` drop foreign key \`test2_foo___bar_foreign\`; -alter table \`publisher2\` modify \`name\` varchar(255) not null default 'asd', modify \`type\` enum('local', 'global') not null default 'local', modify \`type2\` enum('LOCAL', 'GLOBAL') not null default 'LOCAL'; - alter table \`book2\` drop \`foo\`; alter table \`test2\` drop index \`test2_foo___bar_unique\`; diff --git a/tests/features/schema-generator/__snapshots__/SchemaGenerator.mysql2.test.ts.snap b/tests/features/schema-generator/__snapshots__/SchemaGenerator.mysql2.test.ts.snap index a0e0260c5cb9..1d725597f674 100644 --- a/tests/features/schema-generator/__snapshots__/SchemaGenerator.mysql2.test.ts.snap +++ b/tests/features/schema-generator/__snapshots__/SchemaGenerator.mysql2.test.ts.snap @@ -160,8 +160,6 @@ drop table if exists \`base_user2\`; exports[`SchemaGenerator (no FKs) generate schema from metadata [mysql]: mysql-update-schema-dump 1`] = ` "alter table \`test2\` drop foreign key \`test2_foo___bar_foreign\`; -alter table \`publisher2\` modify \`name\` varchar(255) not null default 'asd', modify \`type\` enum('local', 'global') not null default 'local', modify \`type2\` enum('LOCAL', 'GLOBAL') not null default 'LOCAL'; - alter table \`book2\` drop \`foo\`; alter table \`test2\` drop index \`test2_foo___bar_unique\`; @@ -176,8 +174,6 @@ exports[`SchemaGenerator (no FKs) rename column [mysql]: mysql-update-schema-ren alter table \`test2\` drop foreign key \`test2_foo___bar_foreign\`; -alter table \`publisher2\` modify \`name\` varchar(255) not null default 'asd', modify \`type\` enum('local', 'global') not null default 'local', modify \`type2\` enum('LOCAL', 'GLOBAL') not null default 'LOCAL'; - alter table \`author2\` drop index \`author2_favourite_author_id_index\`; alter table \`author2\` drop index \`author2_name_age_index\`; alter table \`author2\` change \`age\` \`age_in_years\` int null default null; @@ -230,8 +226,6 @@ exports[`SchemaGenerator (no FKs) update schema [mysql]: mysql-update-schema-cre alter table \`test2\` drop foreign key \`test2_foo___bar_foreign\`; -alter table \`publisher2\` modify \`name\` varchar(255) not null default 'asd', modify \`type\` enum('local', 'global') not null default 'local', modify \`type2\` enum('LOCAL', 'GLOBAL') not null default 'LOCAL'; - alter table \`book2\` drop \`foo\`; alter table \`test2\` drop index \`test2_foo___bar_unique\`; @@ -320,8 +314,6 @@ exports[`SchemaGenerator (no FKs) update schema enums [mysql]: mysql-update-sche alter table \`test2\` drop foreign key \`test2_foo___bar_foreign\`; -alter table \`publisher2\` modify \`name\` varchar(255) not null default 'asd', modify \`type\` enum('local', 'global') not null default 'local', modify \`type2\` enum('LOCAL', 'GLOBAL') not null default 'LOCAL'; - alter table \`book2\` drop \`foo\`; alter table \`test2\` drop index \`test2_foo___bar_unique\`; diff --git a/tests/features/schema-generator/__snapshots__/SchemaGenerator.postgres.test.ts.snap b/tests/features/schema-generator/__snapshots__/SchemaGenerator.postgres.test.ts.snap index feae2a36a38c..a0e2f4bd2954 100644 --- a/tests/features/schema-generator/__snapshots__/SchemaGenerator.postgres.test.ts.snap +++ b/tests/features/schema-generator/__snapshots__/SchemaGenerator.postgres.test.ts.snap @@ -20,7 +20,7 @@ create table "label2" ("uuid" uuid not null, "name" varchar(255) not null, const create table "publisher2" ("id" serial primary key, "name" varchar(255) not null default 'asd', "type" text check ("type" in ('local', 'global')) not null default 'local', "type2" text check ("type2" in ('LOCAL', 'GLOBAL')) not null default 'LOCAL', "enum1" smallint null, "enum2" smallint null, "enum3" smallint null, "enum4" text check ("enum4" in ('a', 'b', 'c')) null, "enum5" text check ("enum5" in ('a')) null); -create table "author2" ("id" serial primary key, "created_at" timestamptz(3) not null default current_timestamp(3), "updated_at" timestamptz(3) not null default current_timestamp(3), "name" varchar(255) not null, "email" varchar(255) not null, "age" int null default null, "terms_accepted" boolean not null default false, "optional" boolean null, "identities" text[] null, "born" date null, "born_time" time(0) null, "favourite_book_uuid_pk" uuid null, "favourite_author_id" int null); +create table "author2" ("id" serial primary key, "created_at" timestamptz(3) not null default current_timestamp(3), "updated_at" timestamptz(3) not null default current_timestamp(3), "name" varchar(255) not null, "email" varchar(255) not null, "age" int null, "terms_accepted" boolean not null default false, "optional" boolean null, "identities" text[] null, "born" date null, "born_time" time(0) null, "favourite_book_uuid_pk" uuid null, "favourite_author_id" int null); create index "custom_email_index_name" on "author2" ("email"); alter table "author2" add constraint "custom_email_unique_name" unique ("email"); create index "author2_terms_accepted_index" on "author2" ("terms_accepted"); @@ -149,7 +149,7 @@ create table "label2" ("uuid" uuid not null, "name" varchar(255) not null, const create table "publisher2" ("id" serial primary key, "name" varchar(255) not null default 'asd', "type" text check ("type" in ('local', 'global')) not null default 'local', "type2" text check ("type2" in ('LOCAL', 'GLOBAL')) not null default 'LOCAL', "enum1" smallint null, "enum2" smallint null, "enum3" smallint null, "enum4" text check ("enum4" in ('a', 'b', 'c')) null, "enum5" text check ("enum5" in ('a')) null); -create table "author2" ("id" serial primary key, "created_at" timestamptz(3) not null default current_timestamp(3), "updated_at" timestamptz(3) not null default current_timestamp(3), "name" varchar(255) not null, "email" varchar(255) not null, "age" int null default null, "terms_accepted" boolean not null default false, "optional" boolean null, "identities" text[] null, "born" date null, "born_time" time(0) null, "favourite_book_uuid_pk" uuid null, "favourite_author_id" int null); +create table "author2" ("id" serial primary key, "created_at" timestamptz(3) not null default current_timestamp(3), "updated_at" timestamptz(3) not null default current_timestamp(3), "name" varchar(255) not null, "email" varchar(255) not null, "age" int null, "terms_accepted" boolean not null default false, "optional" boolean null, "identities" text[] null, "born" date null, "born_time" time(0) null, "favourite_book_uuid_pk" uuid null, "favourite_author_id" int null); create index "custom_email_index_name" on "author2" ("email"); alter table "author2" add constraint "custom_email_unique_name" unique ("email"); create index "author2_terms_accepted_index" on "author2" ("terms_accepted"); @@ -228,170 +228,3 @@ alter table "test2_bars" add constraint "test2_bars_foo_bar2_id_foreign" foreign set session_replication_role = 'origin'; " `; - -exports[`SchemaGenerator [postgres] update indexes [postgres]: postgres-update-schema-add-fulltext-index-tsvector 1`] = ` -"alter table "book2" alter column "title" drop default; -alter table "book2" alter column "title" type tsvector using ("title"::tsvector); -create index "book2_title_index" on "public"."book2" using gin("title"); - -" -`; - -exports[`SchemaGenerator [postgres] update indexes [postgres]: postgres-update-schema-add-index 1`] = ` -"create index "author2_name_email_index" on "public"."author2" using gin(to_tsvector('simple', "name" || ' ' || "email")); - -create index "book2_author_id_publisher_id_index" on "book2" ("author_id", "publisher_id"); -alter table "book2" add constraint "book2_author_id_publisher_id_unique" unique ("author_id", "publisher_id"); - -" -`; - -exports[`SchemaGenerator [postgres] update indexes [postgres]: postgres-update-schema-alter-index 1`] = ` -"alter index "book2_author_id_publisher_id_index" rename to "custom_idx_123"; - -" -`; - -exports[`SchemaGenerator [postgres] update indexes [postgres]: postgres-update-schema-drop-index 1`] = ` -"drop index "book2_title_index"; -drop index "custom_idx_123"; - -" -`; - -exports[`SchemaGenerator [postgres] update indexes [postgres]: postgres-update-schema-drop-unique 1`] = ` -"alter table "book2" drop constraint "book2_author_id_publisher_id_unique"; - -" -`; - -exports[`SchemaGenerator [postgres] update schema [postgres]: postgres-update-schema-add-column 1`] = ` -"alter table "new_table" add column "id" serial primary key, add column "updated_at" timestamp(3) not null default current_timestamp(3); - -alter table "author2" add column "favourite_book_uuid_pk" uuid null; -alter table "author2" add constraint "author2_favourite_book_uuid_pk_foreign" foreign key ("favourite_book_uuid_pk") references "book2" ("uuid_pk") on update no action on delete cascade; - -" -`; - -exports[`SchemaGenerator [postgres] update schema [postgres]: postgres-update-schema-alter-column 1`] = ` -"alter table "author2" drop constraint "author2_favourite_author_id_foreign"; - -alter table "author2" alter column "name" type int using ("name"::int); -alter table "author2" alter column "name" set default 42; -alter table "author2" alter column "name" drop not null; -alter table "author2" alter column "age" type int using ("age"::int); -alter table "author2" alter column "age" set default 42; -alter table "author2" add constraint "author2_favourite_author_id_foreign" foreign key ("favourite_author_id") references "foo_bar2" ("id") on update cascade on delete set null; - -" -`; - -exports[`SchemaGenerator [postgres] update schema [postgres]: postgres-update-schema-create-table 1`] = ` -"create table "new_table" ("id" serial primary key, "created_at" timestamp(3) not null default current_timestamp(3), "updated_at" timestamp(3) not null default current_timestamp(3), "name" varchar(255) not null); - -" -`; - -exports[`SchemaGenerator [postgres] update schema [postgres]: postgres-update-schema-drop-1:1 1`] = ` -"alter table "foo_bar2" drop constraint "foo_bar2_baz_id_foreign"; - -alter table "foo_bar2" drop constraint "foo_bar2_baz_id_unique"; -alter table "foo_bar2" drop column "baz_id"; - -" -`; - -exports[`SchemaGenerator [postgres] update schema [postgres]: postgres-update-schema-drop-column 1`] = ` -"alter table "author2" drop constraint "author2_favourite_book_uuid_pk_foreign"; - -alter table "new_table" drop constraint "new_table_pkey"; -alter table "new_table" drop column "id"; -alter table "new_table" drop column "updated_at"; - -alter table "author2" alter column "name" drop default; -alter table "author2" alter column "name" type int using ("name"::int); -alter table "author2" alter column "name" set not null; -alter table "author2" drop column "favourite_book_uuid_pk"; - -" -`; - -exports[`SchemaGenerator [postgres] update schema [postgres]: postgres-update-schema-drop-table 1`] = ` -"alter table "address2" drop constraint "address2_author_id_foreign"; - -alter table "author2_following" drop constraint "author2_following_author2_1_id_foreign"; - -alter table "author2_following" drop constraint "author2_following_author2_2_id_foreign"; - -alter table "author_to_friend" drop constraint "author_to_friend_author2_1_id_foreign"; - -alter table "author_to_friend" drop constraint "author_to_friend_author2_2_id_foreign"; - -alter table "book2" drop constraint "book2_author_id_foreign"; - -drop table if exists "author2" cascade; - -drop table if exists "new_table" cascade; - -" -`; - -exports[`SchemaGenerator [postgres] update schema [postgres]: postgres-update-schema-rename-column 1`] = ` -"alter table "author2" drop constraint "author2_favourite_author_id_foreign"; - -drop index "author2_name_age_index"; -alter table "author2" rename column "age" to "age_in_years"; -alter table "author2" rename column "favourite_author_id" to "favourite_writer_id"; -alter table "author2" add constraint "author2_favourite_writer_id_foreign" foreign key ("favourite_writer_id") references "foo_bar2" ("id") on update cascade on delete set null; -create index "author2_name_age_in_years_index" on "author2" ("name", "age_in_years"); - -" -`; - -exports[`SchemaGenerator [postgres] update schema - entity in different namespace [postgres] (GH #1215): postgres-update-schema-1215 1`] = ` -"create schema if not exists "other"; - -create table "other"."new_table" ("id" serial primary key, "column_name" varchar(255) not null); -alter table "other"."new_table" add constraint "new_table_column_name_unique" unique ("column_name"); - -" -`; - -exports[`SchemaGenerator [postgres] update schema - entity in different namespace [postgres] (GH #1215): postgres-update-schema-1215 2`] = ` -"drop table if exists "other"."new_table" cascade; - -drop schema "other"; - -" -`; - -exports[`SchemaGenerator [postgres] update schema enums [postgres]: postgres-update-schema-enums-1 1`] = ` -"create table "new_table" ("id" serial primary key, "enum_test" varchar(255) not null); - -" -`; - -exports[`SchemaGenerator [postgres] update schema enums [postgres]: postgres-update-schema-enums-2 1`] = ` -"alter table "new_table" alter column "enum_test" type text using ("enum_test"::text); -alter table "new_table" add constraint "new_table_enum_test_check" check ("enum_test" in ('a', 'b')); - -" -`; - -exports[`SchemaGenerator [postgres] update schema enums [postgres]: postgres-update-schema-enums-3 1`] = ` -"alter table "new_table" drop constraint if exists "new_table_enum_test_check"; - -alter table "new_table" alter column "enum_test" type text using ("enum_test"::text); -alter table "new_table" add constraint "new_table_enum_test_check" check ("enum_test" in ('a', 'b', 'c')); - -" -`; - -exports[`SchemaGenerator [postgres] update schema enums [postgres]: postgres-update-schema-enums-4 1`] = ` -"alter table "new_table" drop constraint if exists "new_table_enum_test_check"; - -alter table "new_table" alter column "enum_test" type int using ("enum_test"::int); - -" -`; diff --git a/tests/features/schema-generator/__snapshots__/changing-pk-type.postgres.test.ts.snap b/tests/features/schema-generator/__snapshots__/changing-pk-type.postgres.test.ts.snap index 7cbfcc1e3f1b..36daa689897d 100644 --- a/tests/features/schema-generator/__snapshots__/changing-pk-type.postgres.test.ts.snap +++ b/tests/features/schema-generator/__snapshots__/changing-pk-type.postgres.test.ts.snap @@ -39,7 +39,7 @@ alter table "user" add constraint "user_pkey" primary key ("id", "id2"); exports[`changing PK column type [postgres] (GH 1480) changing PK type: 3. remove old PK (make it single PK again) 1`] = ` { - "down": "alter table "user" add column "id2" int4 not null default null; + "down": "alter table "user" add column "id2" int4 not null; alter table "user" drop constraint "user_pkey"; alter table "user" add constraint "user_pkey" primary key ("id", "id2"); diff --git a/tests/features/schema-generator/__snapshots__/custom-type-mapping.test.ts.snap b/tests/features/schema-generator/__snapshots__/custom-type-mapping.test.ts.snap index 11fb461d9cb5..0f622dd306e8 100644 --- a/tests/features/schema-generator/__snapshots__/custom-type-mapping.test.ts.snap +++ b/tests/features/schema-generator/__snapshots__/custom-type-mapping.test.ts.snap @@ -9,9 +9,9 @@ create table "foo_bar2" ("id" serial primary key, "name" text not null, "name wi alter table "foo_bar2" add constraint "foo_bar2_baz_id_unique" unique ("baz_id"); alter table "foo_bar2" add constraint "foo_bar2_foo_bar_id_unique" unique ("foo_bar_id"); -create table "publisher2" ("id" serial primary key, "name" text not null, "type" text check ("type" in ('local', 'global')) not null, "type2" text check ("type2" in ('LOCAL', 'GLOBAL')) not null, "enum1" smallint null, "enum2" smallint null, "enum3" smallint null, "enum4" text check ("enum4" in ('a', 'b', 'c')) null, "enum5" text check ("enum5" in ('a')) null); +create table "publisher2" ("id" serial primary key, "name" text not null default 'asd', "type" text check ("type" in ('local', 'global')) not null default 'local', "type2" text check ("type2" in ('LOCAL', 'GLOBAL')) not null default 'LOCAL', "enum1" smallint null, "enum2" smallint null, "enum3" smallint null, "enum4" text check ("enum4" in ('a', 'b', 'c')) null, "enum5" text check ("enum5" in ('a')) null); -create table "author2" ("id" serial primary key, "created_at" timestamptz(3) not null default current_timestamp(3), "updated_at" timestamptz(3) not null default current_timestamp(3), "name" text not null, "email" text not null, "age" int null default null, "terms_accepted" boolean not null default false, "optional" boolean null, "identities" text[] null, "born" date null, "born_time" time(0) null, "favourite_book_uuid_pk" uuid null, "favourite_author_id" int null); +create table "author2" ("id" serial primary key, "created_at" timestamptz(3) not null default current_timestamp(3), "updated_at" timestamptz(3) not null default current_timestamp(3), "name" text not null, "email" text not null, "age" int null, "terms_accepted" boolean not null default false, "optional" boolean null, "identities" text[] null, "born" date null, "born_time" time(0) null, "favourite_book_uuid_pk" uuid null, "favourite_author_id" int null); create index "custom_email_index_name" on "author2" ("email"); alter table "author2" add constraint "custom_email_unique_name" unique ("email"); create index "author2_terms_accepted_index" on "author2" ("terms_accepted"); diff --git a/tests/mysql-schema.sql b/tests/mysql-schema.sql index f18b6194d1ca..5a4eec4176de 100644 --- a/tests/mysql-schema.sql +++ b/tests/mysql-schema.sql @@ -37,7 +37,7 @@ set foreign_key_checks = 0; create table `sandwich` (`id` int(10) unsigned not null auto_increment primary key, `name` varchar(255) not null, `price` int(11) not null) default character set utf8mb4 engine = InnoDB; -create table `publisher2` (`id` int(10) unsigned not null auto_increment primary key, `name` varchar(255) not null, `type` enum('local', 'global') not null, `type2` enum('LOCAL', 'GLOBAL') not null, `enum1` tinyint null, `enum2` tinyint null, `enum3` tinyint null, `enum4` enum('a', 'b', 'c') null, `enum5` enum('a') null) default character set utf8mb4 engine = InnoDB; +create table `publisher2` (`id` int(10) unsigned not null auto_increment primary key, `name` varchar(255) not null default 'asd', `type` enum('local', 'global') not null default 'local', `type2` enum('LOCAL', 'GLOBAL') not null default 'LOCAL', `enum1` tinyint null, `enum2` tinyint null, `enum3` tinyint null, `enum4` enum('a', 'b', 'c') null, `enum5` enum('a') null) default character set utf8mb4 engine = InnoDB; create table `foo_baz2` (`id` int(10) unsigned not null auto_increment primary key, `name` varchar(255) not null, `version` datetime(3) not null default current_timestamp(3)) default character set utf8mb4 engine = InnoDB;