diff --git a/package.json b/package.json index 92464af..6f20e1e 100644 --- a/package.json +++ b/package.json @@ -17,6 +17,7 @@ "lint:fix": "standard --fix" }, "dependencies": { + "@baethon/promise-duck": "^1.0.1", "dataloader": "^2.0.0", "lodash.flow": "^3.5.0", "lodash.frompairs": "^4.0.1", @@ -25,6 +26,7 @@ }, "devDependencies": { "ava": "^3.6.0", + "faker": "^4.1.0", "husky": "^4.2.3", "knex": "^0.20.13", "lint-staged": "^10.1.1", diff --git a/src/events/deleted.js b/src/events/deleted.js new file mode 100644 index 0000000..2bd671f --- /dev/null +++ b/src/events/deleted.js @@ -0,0 +1,18 @@ +const Event = require('./event') + +class Deleted extends Event { + static get eventName () { + return 'deleted' + } + + /** + * @param {*} results + */ + constructor (results) { + super() + + this.result = results + } +} + +module.exports = Deleted diff --git a/src/events/deleting.js b/src/events/deleting.js new file mode 100644 index 0000000..48bfdd8 --- /dev/null +++ b/src/events/deleting.js @@ -0,0 +1,29 @@ +const Event = require('./event') +const Deleted = require('./deleted') + +class Deleting extends Event { + static get eventName () { + return 'deleting' + } + + /** + * @param {String|String[]} returning + */ + constructor (returning) { + super() + this.returning = returning + } + + mutateQueryBuilder (qb) { + qb._single.returning = this.returning + } + + /** + * @inheritdoc + */ + toAfterEvent (results) { + return new Deleted(results) + } +} + +module.exports = Deleting diff --git a/src/events/event.js b/src/events/event.js new file mode 100644 index 0000000..6d51590 --- /dev/null +++ b/src/events/event.js @@ -0,0 +1,34 @@ +const { KexError } = require('../errors') + +class Event { + static get eventName () { + throw new KexError('Event name should be set in child class') + } + + constructor () { + this.cancelled = false + this.emitted = false + } + + cancel () { + this.cancelled = true + } + + markEmitted () { + this.emitted = true + } + + mutateQueryBuilder (qb) { + // extend on when required + } + + /** + * @param {*} results + * @return {Event} + */ + toAfterEvent (results) { + throw new KexError('toAfterEvent() should be implemented in the child class') + } +} + +module.exports = Event diff --git a/src/events/fetched.js b/src/events/fetched.js new file mode 100644 index 0000000..4a6208e --- /dev/null +++ b/src/events/fetched.js @@ -0,0 +1,18 @@ +const Event = require('./event') + +class Fetched extends Event { + static get eventName () { + return 'fetched' + } + + /** + * @param {Object|Object[]} results + */ + constructor (results) { + super() + + this.results = results + } +} + +module.exports = Fetched diff --git a/src/events/fetching.js b/src/events/fetching.js new file mode 100644 index 0000000..e14f5e0 --- /dev/null +++ b/src/events/fetching.js @@ -0,0 +1,18 @@ +const Event = require('./event') +const Fetched = require('./fetched') + +class Fetching extends Event { + static get eventName () { + return 'fetching' + } + + /** + * @param {*} results + * @return {import('./fetched')} + */ + toAfterEvent (results) { + return new Fetched(results) + } +} + +module.exports = Fetching diff --git a/src/events/index.js b/src/events/index.js new file mode 100644 index 0000000..632a467 --- /dev/null +++ b/src/events/index.js @@ -0,0 +1,11 @@ +module.exports = { + EventsPipeline: require('./pipeline'), + DeletingEvent: require('./deleting'), + DeletedEvent: require('./deleted'), + FetchingEvent: require('./fetching'), + FetchedEvent: require('./fetched'), + UpdatingEvent: require('./updating'), + UpdatedEvent: require('./updated'), + InsertingEvent: require('./inserting'), + InsertedEvent: require('./inserted') +} diff --git a/src/events/inserted.js b/src/events/inserted.js new file mode 100644 index 0000000..b5d6830 --- /dev/null +++ b/src/events/inserted.js @@ -0,0 +1,19 @@ +const Event = require('./event') + +class Inserted extends Event { + static get eventName () { + return 'inserted' + } + + /** + * @param {*} results + * @param {Object|Object[]} values + */ + constructor (results, values) { + super() + this.results = results + this.values = values + } +} + +module.exports = Inserted diff --git a/src/events/inserting.js b/src/events/inserting.js new file mode 100644 index 0000000..6dbd6cf --- /dev/null +++ b/src/events/inserting.js @@ -0,0 +1,33 @@ +const Event = require('./event') +const Inserted = require('./inserted') + +class Inserting extends Event { + static get eventName () { + return 'inserting' + } + + /** + * @param {Object|Object[]} values + * @param {String|String[]} returning + */ + constructor (values, returning) { + super() + + this.values = values + this.returning = returning + } + + mutateQueryBuilder (qb) { + qb._single.insert = this.values + qb._single.returning = this.returning + } + + /** + * @inheritdoc + */ + toAfterEvent (results) { + return new Inserted(results, this.values) + } +} + +module.exports = Inserting diff --git a/src/events/pipeline.js b/src/events/pipeline.js new file mode 100644 index 0000000..90e5be8 --- /dev/null +++ b/src/events/pipeline.js @@ -0,0 +1,90 @@ +/** @typedef { import('./event') } Event */ + +/** + * @callback EventListener + * @param {Event} event + */ + +/** @typedef {Map} Listeners */ + +class EventsPipeline { + /** + * @param {Array} listeners entries for listeners mapTo + */ + constructor (listeners = []) { + /** @type {Listeners} */ + this.listeners = new Map(listeners) + } + + /** + * @param {String} eventName + * @param {EventListener} listener + * @return {Function} a callback which removes the listener + */ + on (eventName, listener) { + const list = this.listeners.get(eventName) || [] + this.listeners.set(eventName, list.concat(listener)) + + return () => { + const list = this.listeners.get(eventName) + const index = list.indexOf(listener) + + if (index >= 0) { + list.splice(index, 1) + this.listeners.set(eventName, list) + } + } + } + + /** + * Execute all listeners of given event. + * + * The listeners are called serially. + * Event instance can be emitted only once. To repeat it emission, create new event. + * + * @param {Event} event + * @param {*} [bind] value to bind with the listener + * @return {Promise} the result of calling the listener; + * FALSE indicates that event was cancelled + */ + async emit (event, bind = null) { + if (event.emitted) { + return false + } + + const { eventName } = event.constructor + const list = this.listeners.get(eventName) || [] + + event.markEmitted() + + for (let i = 0; i < list.length; i++) { + const fn = list[i] + + await fn.call(bind, event) + + if (event.cancelled) { + return false + } + } + + return true + } + + /** + * Create copy of current instance. + * + * This method makes sure that all lists are dereferenced. + * + * @return {EventsPipeline} + */ + clone () { + const entries = Array.from(this.listeners.entries()) + + return new this.constructor(entries.map(([name, listeners]) => ([ + name, + [...listeners] + ]))) + } +} + +module.exports = EventsPipeline diff --git a/src/events/updated.js b/src/events/updated.js new file mode 100644 index 0000000..57cf17c --- /dev/null +++ b/src/events/updated.js @@ -0,0 +1,20 @@ +const Event = require('./event') + +class Updated extends Event { + static get eventName () { + return 'updated' + } + + /** + * @param {*} results + * @param {Object|Object[]} values + */ + constructor (results, values) { + super() + + this.results = results + this.values = values + } +} + +module.exports = Updated diff --git a/src/events/updating.js b/src/events/updating.js new file mode 100644 index 0000000..be03091 --- /dev/null +++ b/src/events/updating.js @@ -0,0 +1,33 @@ +const Event = require('./event') +const Updated = require('./updated') + +class Updating extends Event { + static get eventName () { + return 'updating' + } + + /** + * @param {Object|Object[]} values + * @param {String|String[]} returning + */ + constructor (values, returning) { + super() + + this.values = values + this.returning = returning + } + + mutateQueryBuilder (qb) { + qb._single.update = this.values + qb._single.returning = this.returning + } + + /** + * @inheritdoc + */ + toAfterEvent (results) { + return new Updated(results, this.values) + } +} + +module.exports = Updating diff --git a/src/model.js b/src/model.js index db8eee4..0cf61e2 100644 --- a/src/model.js +++ b/src/model.js @@ -2,11 +2,13 @@ const pluralize = require('pluralize') const snakeCase = require('lodash.snakecase') const QueryBuilder = require('./query-builder') const { KexError } = require('./errors') +const { EventsPipeline } = require('./events') /** @typedef { import('./plugins/soft-deletes').SoftDeleteOptions } SoftDeleteOptions */ /** @typedef { import('./relations/relation') } Relation */ /** @typedef { import('./query-builder').Scope } Scope */ /** @typedef { import('./plugins/timestamps').TimestampsOptions } TimestampsOptions */ +/** @typedef { import('./events/pipeline').EventListener } EventListener */ /** * @typedef {Object} ModelOptions @@ -39,6 +41,7 @@ class Model { this.options = options this.QueryBuilder = QueryBuilder.createChildClass(this) this.booted = false + this.events = new EventsPipeline() } get tableName () { @@ -53,6 +56,7 @@ class Model { query () { this.bootIfNotBooted() + return this.QueryBuilder.create(this.kex.getKnexClient()) } @@ -74,6 +78,16 @@ class Model { } } + /** + * @param {String} eventName + * @param {EventListener} listener + * @return {Model} + */ + on (eventName, listener) { + this.events.on(eventName, listener) + return this + } + /** * @private */ diff --git a/src/plugins/include/index.js b/src/plugins/include/index.js index e55d367..898630a 100644 --- a/src/plugins/include/index.js +++ b/src/plugins/include/index.js @@ -5,7 +5,6 @@ const { parseIncludes } = require('./parser') * @param {import('../../model')} Model */ module.exports = (Model) => { - const { QueryBuilder } = Model const related = new Related(Model) Model.QueryBuilder.extend({ @@ -20,16 +19,7 @@ module.exports = (Model) => { } }) - const { then: thenMethod } = QueryBuilder.prototype - - QueryBuilder.extend({ - methodName: 'then', - force: true, - fn (resolve, reject) { - return thenMethod.call(this) - .then(results => related.fetchRelated(results, this.includes)) - .then(resolve) - .catch(reject) - } + Model.on('fetched', async function (event) { + event.results = await related.fetchRelated(event.results, this.includes) }) } diff --git a/src/plugins/timestamps.js b/src/plugins/timestamps.js index b182f6d..8f980f8 100644 --- a/src/plugins/timestamps.js +++ b/src/plugins/timestamps.js @@ -7,13 +7,13 @@ const setDateField = (name) => item => ({ /** * @typedef {Object} TimestampsOptions - * @property {String} [deletedAtColumn=deleted_at] + * @property {String} [createdAtColumn=created_at] * @property {String} [updatedAtColumn=updated_at] */ /** @type {TimestampsOptions} */ const defaults = { - deletedAtColumn: 'deleted_at', + createdAtColumn: 'created_at', updatedAtColumn: 'updated_at' } @@ -27,47 +27,27 @@ module.exports = (Model) => { return } - const { QueryBuilder } = Model - const timestampsOptions = { + const options = { ...defaults, ...timestamps } - const { - createdAtColumn = 'created_at', - updatedAtColumn = 'updated_at' - } = timestampsOptions + const setUpdatedAt = setDateField(options.updatedAtColumn) + const setCreatedAt = setDateField(options.createdAtColumn) + const setBothFields = flow([ + setUpdatedAt, + setCreatedAt + ]) - const { - insert: insertMethod, - update: updateMethod - } = QueryBuilder.prototype - - const setUpdatedAt = setDateField(updatedAtColumn) - const setCreatedAt = setDateField(createdAtColumn) - - QueryBuilder.extend({ - methodName: 'update', - force: true, - fn (values, returning) { - return updateMethod.call(this, setUpdatedAt(values), returning) - } + Model.on('updating', event => { + event.values = setUpdatedAt(event.values) }) - QueryBuilder.extend({ - methodName: 'insert', - force: true, - fn (values, returning) { - const update = flow([ - setUpdatedAt, - setCreatedAt - ]) - - const newValues = Array.isArray(values) - ? values.map(update) - : update(values) + Model.on('inserting', event => { + const { values } = event - return insertMethod.call(this, newValues, returning) - } + event.values = Array.isArray(values) + ? values.map(setBothFields) + : setBothFields(values) }) } diff --git a/src/query-builder.js b/src/query-builder.js index f336bcf..f92c8b1 100644 --- a/src/query-builder.js +++ b/src/query-builder.js @@ -1,7 +1,18 @@ +const promiseDuck = require('@baethon/promise-duck') const BaseQueryBuilder = require('knex/lib/query/builder') const { KexError } = require('./errors') +const { + FetchingEvent, + UpdatingEvent, + DeletingEvent, + InsertingEvent +} = require('./events') /** @typedef { import('knex/lib/client') } KnexClient */ +/** @typedef { import('./model') } Model */ +/** @typedef { import('./events/pipeline') } EventsPipeline */ +/** @typedef { import('./events/event') } Event */ +/** @typedef { import('./events/pipeline').EventListener } EventListener */ /** * @callback Scope @@ -27,15 +38,6 @@ class QueryBuilder extends BaseQueryBuilder { return BaseQueryBuilder.prototype.table.call(qb, this.tableName) } - table () { - throw new KexError('Can\'t use table() in models query builder') - } - - newInstance () { - const Builder = this.constructor - return new Builder(this.client) - } - /** * Add a global scope to the model * @@ -73,6 +75,14 @@ class QueryBuilder extends BaseQueryBuilder { this.prototype[methodName] = fn } + /** + * @return {Model} + * @abstract + */ + static get Model () { + throw new KexError('The Model getter is not implemented') + } + /** * @inheritdoc */ @@ -81,6 +91,21 @@ class QueryBuilder extends BaseQueryBuilder { /** @type {Set} */ this.ignoredScopes = new Set() + + /** @type {EventsPipeline} */ + this.events = this.constructor.Model.events.clone() + + // overwrite native then() method + Object.assign(this, promiseDuck.thenable(this.executeQuery.bind(this))) + } + + table () { + throw new KexError('Can\'t use table() in models query builder') + } + + newInstance () { + const Builder = this.constructor + return new Builder(this.client) } /** @@ -137,18 +162,89 @@ class QueryBuilder extends BaseQueryBuilder { Object.assign(qb, builder) }) } + + /** + * Execute the query and fetch the results + * + * @return {Promise<*>} + */ + async executeQuery () { + const event = this.createEventToEmit() + + await this.events.emit(event, this) + + if (event.cancelled) { + return undefined + } + + event.mutateQueryBuilder(this) + + const results = await BaseQueryBuilder.prototype.then.call(this) + const afterEvent = event.toAfterEvent(results) + + await this.events.emit(afterEvent, this) + + return afterEvent.results + } + + /** + * Create the event which should be emitted before the query + * + * @return {Event} + */ + createEventToEmit () { + switch (this._method) { + case 'update': + return new UpdatingEvent(this._single.update, this._single.returning) + + case 'del': + return new DeletingEvent(this._single.returning) + + case 'insert': + return new InsertingEvent(this._single.insert, this._single.returning) + + default: + return new FetchingEvent() + } + } + + /** + * @param {String} eventName + * @param {EventListener|Function} listener + * @param {Object} [options] + * @param {Boolean} [options.native=false] + * @return {QueryBuilder} + */ + on (eventName, listener, options = {}) { + const { native = false } = options + + if (native) { + return BaseQueryBuilder.prototype.on.call(this, eventName, listener) + } + + this.events.on(eventName, listener) + + return this + } } /** * Create the a new child class of QueryBuilder * - * @param {import('./model')} Model + * @param {Model} Model * @returns {typeof QueryBuilder} */ const createChildClass = (Model) => { class ChildQueryBuilder extends QueryBuilder { static get tableName () { - return Model.tableName + return this.Model.tableName + } + + /** + * @return {Model} + */ + static get Model () { + return Model } } diff --git a/tests/events/pipeline.test.js b/tests/events/pipeline.test.js new file mode 100644 index 0000000..f34990e --- /dev/null +++ b/tests/events/pipeline.test.js @@ -0,0 +1,120 @@ +const test = require('ava') +const sinon = require('sinon') +const EventsPipeline = require('../../src/events/pipeline') +const Event = require('../../src/events/event') + +class TestEvent extends Event { + static get eventName () { + return 'test' + } +} + +const defer = (ms, fn) => () => new Promise((resolve) => { + setTimeout(resolve(fn()), ms) +}) + +test('add & remove listener', t => { + const events = new EventsPipeline() + const remove = events.on('test', sinon.stub()) + + t.is(events.listeners.get('test').length, 1) + + remove() + t.is(events.listeners.get('test').length, 0) +}) + +test('call listeners', async t => { + const events = new EventsPipeline() + const firstListener = sinon.stub() + const secondListener = sinon.stub() + + events.on('test', firstListener) + events.on('test', secondListener) + + const event = new TestEvent() + + await events.emit(event) + + t.true(event.emitted) + t.true(firstListener.calledWith(event)) + t.true(secondListener.calledWith(event)) +}) + +test('prevent second emission of the same event', async t => { + const events = new EventsPipeline() + const firstListener = sinon.stub() + + events.on('test', firstListener) + + const event = new TestEvent() + event.markEmitted() + + await events.emit(event) + + t.false(firstListener.called) +}) + +test('cancel event', async t => { + const secondListener = sinon.stub() + const events = new EventsPipeline([ + ['test', [ + (event) => { + event.cancel() + }, + secondListener + ]] + ]) + + const result = await events.emit(new TestEvent()) + + t.false(secondListener.calledOnce) + t.false(result) +}) + +test('call listeners in exact order', async t => { + const events = new EventsPipeline() + const callsList = [] + const firstListener = () => { + callsList.push('first') + } + const secondListener = () => { + callsList.push('second') + } + + events.on('test', defer(100, firstListener)) + events.on('test', defer(10, secondListener)) + + await events.emit(new TestEvent()) + + t.deepEqual(callsList, [ + 'first', + 'second' + ]) +}) + +test('cloning', t => { + const listener = sinon.stub() + + const events = new EventsPipeline() + events.on('test', listener) + + const cloned = events.clone() + + t.false(events === cloned) + t.false(events.listeners === cloned.listeners) + t.false(events.listeners.get('test') === cloned.listeners.get('test')) + t.deepEqual(events.listeners.get('test'), cloned.listeners.get('test')) +}) + +test('binding', async t => { + const fakeContext = {} + const events = new EventsPipeline() + t.plan(1) + + events.on('test', function () { + t.is(fakeContext, this) + }) + + const event = new TestEvent() + await events.emit(event, fakeContext) +}) diff --git a/tests/migrations/1-users.js b/tests/migrations/1-users.js index afaedeb..77abc07 100644 --- a/tests/migrations/1-users.js +++ b/tests/migrations/1-users.js @@ -7,6 +7,10 @@ module.exports = { table.string('first_name') table.string('last_name') table.boolean('active').default(false) + table.datetime('created').nullable().default(null) + table.datetime('updated').nullable().default(null) + table.datetime('created_at').nullable().default(null) + table.datetime('updated_at').nullable().default(null) }) }, diff --git a/tests/model-events.test.js b/tests/model-events.test.js new file mode 100644 index 0000000..d3b6081 --- /dev/null +++ b/tests/model-events.test.js @@ -0,0 +1,347 @@ +const test = require('ava') +const sinon = require('sinon') +const setupDb = require('./setup-db') +const { createKex } = require('./utils') +const { compareDbResults } = require('./assertions') +const events = require('../src/events') + +setupDb() + +const emitted = event => { + event.markEmitted() + return event +} + +test.beforeEach(t => { + const kex = createKex(t) + t.context.User = kex.createModel('User') +}) + +test('events passing to query builder', t => { + const { User } = t.context + const spy = sinon.spy(User.events, 'clone') + + User.events.on('fetched', sinon.stub()) + + const queryEvents = User.query().events + + t.deepEqual(User.events, queryEvents) + t.false(User.events === queryEvents) + t.true(spy.calledOnce) +}) + +test('fetched/fetching', async t => { + const { User, knex } = t.context + + const fetching = sinon.stub() + const fetched = sinon.stub() + + User.events.on('fetching', fetching) + User.events.on('fetched', fetched) + + const expected = await knex.table('users') + const actual = await User.query() + + t.true(fetching.calledWith(emitted(new events.FetchingEvent()))) + t.true(fetched.calledWith(emitted(new events.FetchedEvent(expected)))) + compareDbResults(t, expected, actual) +}) + +test('fetched/fetching | cancel fetching event', async t => { + const { User } = t.context + + const fetched = sinon.stub() + + User.events.on('fetching', event => { + event.cancel() + }) + User.events.on('fetched', fetched) + + const actual = await User.query() + + t.false(fetched.called) + t.is(actual, undefined) +}) + +test('fetched/fetching | modify end results', async t => { + const { User } = t.context + + User.events.on('fetched', event => { + event.results = 'foo' + }) + + const actual = await User.query() + + t.is(actual, 'foo') +}) + +test.serial('updating/updated', async t => { + const { User, knex } = t.context + + const updating = sinon.stub() + const updated = sinon.stub() + const [userId] = await knex.table('users') + .returning('id') + .insert({ + username: 'arya', + first_name: 'Arya', + last_name: 'Stark', + active: true + }) + + User.events.on('updating', updating) + User.events.on('updated', updated) + + const data = { active: false } + const query = User.query() + .where('id', userId) + .update(data) + + await query + + const check = await User.find(userId) + + await User.find(userId) + .delete() + + t.true(updating.calledWith(emitted(new events.UpdatingEvent(data)))) + t.true(updated.calledWith(emitted(new events.UpdatedEvent( + sinon.match.any, + data + )))) + + t.falsy(check.active) +}) + +test.serial('updating/updated | alter update data', async t => { + const { User, knex } = t.context + + const [userId] = await knex.table('users') + .returning('id') + .insert({ + username: 'arya', + first_name: 'Arya', + last_name: 'Stark', + active: true + }) + + User.events.on('updating', (event) => { + event.values = { + ...event.values, + last_name: 'No name' + } + }) + + const data = { active: false } + await User.query() + .where('id', userId) + .update(data) + + const check = await User.find(userId) + + await User.find(userId) + .delete() + + t.falsy(check.active) + t.is(check.last_name, 'No name') +}) + +test.serial('updating/updated | cancel update', async t => { + const { User, knex } = t.context + + const updated = sinon.stub() + const [userId] = await knex.table('users') + .returning('id') + .insert({ + username: 'arya', + first_name: 'Arya', + last_name: 'Stark', + active: true + }) + + User.events.on('updating', (event) => { + event.cancel() + }) + User.events.on('updated', updated) + + const data = { active: false } + const query = User.query() + .where('id', userId) + .update(data) + + await query + + const check = await User.find(userId) + + await User.find(userId) + .delete() + + t.false(updated.called) + + t.truthy(check.active) +}) + +test.serial('deleting/deleted', async t => { + const { User, knex } = t.context + + const deleting = sinon.stub() + const deleted = sinon.stub() + const [userId] = await knex.table('users') + .returning('id') + .insert({ + username: 'arya', + first_name: 'Arya', + last_name: 'Stark', + active: true + }) + + User.events.on('deleting', deleting) + User.events.on('deleted', deleted) + + const query = User.query() + .where('id', userId) + .delete() + + await query + + const check = await User.find(userId) + + await knex.table('users') + .where('id', userId) + .delete() + + t.true(deleting.calledWith(emitted(new events.DeletingEvent()))) + t.true(deleted.calledWith(emitted(new events.DeletedEvent(sinon.match.any)))) + t.falsy(check) +}) + +test.serial('deleting/deleted | cancel event', async t => { + const { User, knex } = t.context + + const deleted = sinon.stub() + const [userId] = await knex.table('users') + .returning('id') + .insert({ + username: 'arya', + first_name: 'Arya', + last_name: 'Stark', + active: true + }) + + User.events.on('deleting', (event) => { + event.cancel() + }) + User.events.on('deleted', deleted) + + await User.query() + .where('id', userId) + .delete() + + const check = await User.find(userId) + + await knex.table('users') + .where('id', userId) + .delete() + + t.false(deleted.called) + t.truthy(check) +}) + +test.serial('inserting/inserted', async t => { + const { User } = t.context + + const inserting = sinon.stub() + const inserted = sinon.stub() + const data = { + username: 'arya', + first_name: 'Arya', + last_name: 'Stark', + active: true + } + + User.events.on('inserting', inserting) + User.events.on('inserted', inserted) + + const [userId] = await User.returning('id') + .insert(data) + + await User.findOrFail(userId) + await User.find(userId) + .delete() + + t.true(inserting.calledWith(emitted(new events.InsertingEvent(data, 'id')))) + t.true(inserted.calledWith(emitted(new events.InsertedEvent( + sinon.match.any, + data + )))) +}) + +test.serial('inserting/inserted | cancel inserting', async t => { + const { User } = t.context + + const inserted = sinon.stub() + const data = { + username: 'arya', + first_name: 'Arya', + last_name: 'Stark', + active: true + } + + User.events.on('inserting', (event) => { + event.cancel() + }) + User.events.on('inserted', inserted) + + const [{ count: usersCount }] = await User.query().count() + + const result = await User.returning('id') + .insert(data) + + const [{ count: checkCount }] = await User.query().count() + + t.false(inserted.called) + t.is(checkCount, usersCount) + t.falsy(result) +}) + +test.serial('inserting/inserted | modify data', async t => { + const { User } = t.context + + const data = { + username: 'arya', + first_name: 'Arya', + last_name: 'Stark', + active: true + } + + User.events.on('inserting', (event) => { + event.values = { + ...event.values, + active: false + } + }) + + const [userId] = await User.returning('id') + .insert(data) + + const check = await User.findOrFail(userId) + + await User.find(userId) + .delete() + + t.falsy(check.active) +}) + +test('query builder binding', async t => { + const { User } = t.context + const assert = function () { + t.true(this instanceof User.QueryBuilder) + } + + t.plan(2) + + User.on('fetching', assert) + User.on('fetched', assert) + + await User.query() +}) diff --git a/tests/plugins/timestamps.test.js b/tests/plugins/timestamps.test.js index d2700be..be2c7bc 100644 --- a/tests/plugins/timestamps.test.js +++ b/tests/plugins/timestamps.test.js @@ -1,97 +1,189 @@ const test = require('ava') const sinon = require('sinon') +const faker = require('faker') const setupDb = require('../setup-db') -const { equalQueries } = require('../assertions') -const { createKex } = require('../utils') +const { createKex, userFactory } = require('../utils') setupDb() -test.before(() => { - sinon.useFakeTimers({ now: new Date() }) +const timestampFields = { + created: null, + created_at: null, + updated: null, + updated_at: null +} +const userData = userFactory() + +test.before(t => { + // eliminate the milliseconds precision from current time + // without it, MySQL might return weird results + const now = Math.floor(new Date().getTime() / 1000) + const clock = sinon.useFakeTimers({ now: new Date(now * 1000) }) + + t.context.clock = clock }) -test('disabled timestamps', t => { +test.beforeEach(async t => { const { knex } = t.context - const User = createKex(t).createModel('User') - equalQueries(t, knex.from('users').insert({ foo: 1 }), User.insert({ foo: 1 })) - equalQueries(t, knex.from('users').update({ foo: 1 }), User.query().update({ foo: 1 })) + const trx = await knex.transaction() + const kex = createKex(t, { + knexClientResolver: () => trx.client + }) + + Object.assign(t.context, { kex, trx }) }) -test('insert | default columns', async t => { - const { knex } = t.context - const User = createKex(t).createModel('User', { - timestamps: true - }) +test.afterEach.always(async t => { + const { trx } = t.context + + await trx.rollback() +}) + +test.serial('disabled timestamps', async t => { + const { kex } = t.context - const expected = knex.table('users').insert({ - foo: 1, - updated_at: new Date(), - created_at: new Date() + const User = kex.createModel('User') + const [id] = await User.returning('id') + .insert(userData) + + await User.query() + .where({ + ...userData, + ...timestampFields + }) + .firstOrFail() + + const firstName = faker.name.firstName() + + await User.find(id) + .update({ first_name: firstName }) + + await User.query() + .where({ + ...userData, + ...timestampFields, + first_name: firstName + }) + .firstOrFail() + + t.pass() +}) + +test.serial('insert | default columns', async t => { + const { kex } = t.context + const User = kex.createModel('User', { + timestamps: true }) - equalQueries(t, expected, User.insert({ foo: 1 })) + await User.insert(userData) + await User.query() + .where({ + ...userData, + ...timestampFields, + updated_at: new Date(), + created_at: new Date() + }) + .firstOrFail() + + t.pass() }) -test('insert | list of items', async t => { - const { knex } = t.context - const User = createKex(t).createModel('User', { +test.serial('insert | list of items', async t => { + const { kex } = t.context + const User = kex.createModel('User', { timestamps: true }) const data = [ - { foo: 1 }, - { foo: 2 } + userFactory(), + userFactory() ] - const expected = knex.table('users').insert(data.map(item => ({ - ...item, - updated_at: new Date(), - created_at: new Date() - }))) + await User.insert(data) - equalQueries(t, expected, User.insert(data)) -}) + await Promise.all(data.map(item => User.query() + .where({ + ...item, + ...timestampFields, + updated_at: new Date(), + created_at: new Date() + }) + .firstOrFail() + )) -test('insert | custom column name', async t => { - const { knex } = t.context - const User = createKex(t).createModel('User', { - timestamps: { createdAtColumn: 'createdAt', updatedAtColumn: 'updatedAt' } - }) + t.pass() +}) - const expected = knex.table('users').insert({ - foo: 1, - updatedAt: new Date(), - createdAt: new Date() +test.serial('insert | custom column name', async t => { + const { kex } = t.context + const User = kex.createModel('User', { + timestamps: { createdAtColumn: 'created', updatedAtColumn: 'updated' } }) - equalQueries(t, expected, User.insert({ foo: 1 })) + await User.insert(userData) + await User.query() + .where({ + ...userData, + ...timestampFields, + created: new Date(), + updated: new Date() + }) + .firstOrFail() + + t.pass() }) -test('update | default columns', async t => { - const { knex } = t.context - const User = createKex(t).createModel('User', { +test.serial('update | default columns', async t => { + const { kex, clock } = t.context + const User = kex.createModel('User', { timestamps: true }) - const expected = knex.table('users').update({ - foo: 1, - updated_at: new Date() - }) + const createdAt = new Date() + const [id] = await User.returning('id') + .insert(userData) + + clock.tick(1000) - equalQueries(t, expected, User.query().update({ foo: 1 })) + const firstName = faker.name.firstName() + + await User.find(id) + .update({ first_name: firstName }) + + await User.query() + .where({ + ...userData, + first_name: firstName, + created_at: createdAt, + updated_at: new Date() + }) + .firstOrFail() + + t.pass() }) -test('update | custom column name', async t => { - const { knex } = t.context - const User = createKex(t).createModel('User', { - timestamps: { createdAtColumn: 'createdAt', updatedAtColumn: 'updatedAt' } +test.serial('update | custom column name', async t => { + const { kex } = t.context + const User = kex.createModel('User', { + timestamps: { updatedAtColumn: 'updated' } }) - const expected = knex.table('users').update({ - foo: 1, - updatedAt: new Date() - }) + const jon = await User.where('username', 'jon') + .firstOrFail() + + const firstName = faker.name.firstName() + + await User.find(jon.id) + .update({ first_name: firstName }) + + await User.query() + .where({ + ...jon, + first_name: firstName, + updated: new Date() + }) + .firstOrFail() - equalQueries(t, expected, User.query().update({ foo: 1 })) + t.pass() }) diff --git a/tests/queries.test.js b/tests/queries.test.js index 3e0b015..ec27bd5 100644 --- a/tests/queries.test.js +++ b/tests/queries.test.js @@ -1,4 +1,5 @@ const test = require('ava') +const sinon = require('sinon') const setupDb = require('./setup-db') const { equalQueries } = require('./assertions') const { createKex } = require('./utils') @@ -36,3 +37,25 @@ test('forbid using table()', t => { .table('foo') }) }) + +test('events | add listener', async t => { + const User = createKex(t).createModel('User') + + const listener = sinon.stub() + + await User.query() + .on('fetching', listener) + + t.true(listener.calledOnce) +}) + +test('events | add knex native listener', async t => { + const User = createKex(t).createModel('User') + + const listener = sinon.stub() + + await User.query() + .on('query', listener, { native: true }) + + t.true(listener.calledOnce) +}) diff --git a/tests/utils.js b/tests/utils.js index 2efd620..b2fa4fb 100644 --- a/tests/utils.js +++ b/tests/utils.js @@ -1,4 +1,5 @@ const test = require('ava') +const faker = require('faker') const { Kex } = require('../') const createKex = (t, options = {}) => { @@ -17,4 +18,11 @@ const onlyForClient = (dbClient, name, testFn) => { test(name, testFn) } -module.exports = { createKex, onlyForClient } +const userFactory = () => ({ + username: faker.internet.userName(), + first_name: faker.name.firstName(), + last_name: faker.name.lastName(), + active: (process.env.DB_CLIENT === 'mysql') ? 1 : true +}) + +module.exports = { createKex, onlyForClient, userFactory } diff --git a/yarn.lock b/yarn.lock index a663e36..3ea310e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -30,6 +30,11 @@ dependencies: regenerator-runtime "^0.13.4" +"@baethon/promise-duck@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@baethon/promise-duck/-/promise-duck-1.0.1.tgz#6c41a2451d3a8140cb8eaecaa928a1fe4c5fcabe" + integrity sha512-TPVq2hMtBTe9PDGzr2bcNmwAxw5rSWWxNIAkeOpZglS3KHOdJKRWSEjY6a6BSEy9XBKBLw+Z/cmqop22Jw+xQg== + "@concordance/react@^2.0.0": version "2.0.0" resolved "https://registry.yarnpkg.com/@concordance/react/-/react-2.0.0.tgz#aef913f27474c53731f4fd79cc2f54897de90fde" @@ -1512,6 +1517,11 @@ extsprintf@^1.2.0: resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= +faker@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/faker/-/faker-4.1.0.tgz#1e45bbbecc6774b3c195fad2835109c6d748cc3f" + integrity sha1-HkW7vsxndLPBlfrSg1EJxtdIzD8= + fast-deep-equal@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz#545145077c501491e33b15ec408c294376e94ae4"