Skip to content

Commit

Permalink
✨ post update collision detection (#8328) (#8362)
Browse files Browse the repository at this point in the history
closes #5599

If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore.

✨ Update collision for posts
- add a new bookshelf plugin to detect these changes
- use the `changed` object of bookshelf -> we don't have to create our own diff
- compare client and server updated_at field
- run editing posts in a transaction (see comments in code base)

🙀  update collision for tags
- `updateTags` for adding posts on `onCreated` - happens after the post was inserted
   --> it's "okay" to attach the tags afterwards on insert
   --> there is no need to add collision for inserting data
   --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post
- `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected

- Post model edit: if we push a transaction from outside, take this one

✨  introduce options.forUpdate
- if two queries happening in a transaction we have to signalise knex/mysql that we select for an update
- otherwise the following case happens:
  >> you fetch posts for an update
  >> a user requests comes in and updates the post (e.g. sets title to "X")
  >> you update the fetched posts, title would get overriden to the old one

use options.forUpdate and protect internal post updates: model listeners
- use a transaction for listener updates
- signalise forUpdate
- write a complex test

use options.forUpdate and protect internal post updates: scheduling
- publish endpoint runs in a transaction
- add complex test
- @todo: right now scheduling api uses posts api, therefor we had to extend the options for api's
  >> allowed to pass transactions through it
  >> but these are only allowed if defined from outside {opts: [...]}
  >> so i think this is fine and not dirty
  >> will wait for opinions
  >> alternatively we have to re-write the scheduling endpoint to use the models directly
  • Loading branch information
kirrg001 authored and kevinansfield committed Apr 19, 2017
1 parent 482ea12 commit c93f03b
Show file tree
Hide file tree
Showing 11 changed files with 563 additions and 135 deletions.
4 changes: 2 additions & 2 deletions core/server/api/posts.js
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ posts = {

// Push all of our tasks into a `tasks` array in the correct order
tasks = [
utils.validate(docName, {attrs: attrs}),
utils.validate(docName, {attrs: attrs, opts: options.opts || []}),
utils.handlePublicPermissions(docName, 'read'),
utils.convertOptions(allowedIncludes),
modelQuery
Expand Down Expand Up @@ -135,7 +135,7 @@ posts = {

// Push all of our tasks into a `tasks` array in the correct order
tasks = [
utils.validate(docName, {opts: utils.idDefaultOptions}),
utils.validate(docName, {opts: utils.idDefaultOptions.concat(options.opts || [])}),
utils.handlePermissions(docName, 'edit'),
utils.convertOptions(allowedIncludes),
modelQuery
Expand Down
41 changes: 28 additions & 13 deletions core/server/api/schedules.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,11 @@ var _ = require('lodash'),
utils = require('./utils');

/**
* publish a scheduled post
* Publish a scheduled post
*
* `apiPosts.read` and `apiPosts.edit` must happen in one transaction.
* We lock the target row on fetch by using the `forUpdate` option.
* Read more in models/post.js - `onFetching`
*
* object.force: you can force publishing a post in the past (for example if your service was down)
*/
Expand All @@ -35,21 +39,32 @@ exports.publishPost = function publishPost(object, options) {
function (cleanOptions) {
cleanOptions.status = 'scheduled';

return apiPosts.read(cleanOptions)
.then(function (result) {
post = result.posts[0];
publishedAtMoment = moment(post.published_at);
return dataProvider.Base.transaction(function (transacting) {
cleanOptions.transacting = transacting;
cleanOptions.forUpdate = true;

if (publishedAtMoment.diff(moment(), 'minutes') > publishAPostBySchedulerToleranceInMinutes) {
return Promise.reject(new errors.NotFoundError({message: i18n.t('errors.api.job.notFound')}));
}
// CASE: extend allowed options, see api/utils.js
cleanOptions.opts = ['forUpdate', 'transacting'];

if (publishedAtMoment.diff(moment(), 'minutes') < publishAPostBySchedulerToleranceInMinutes * -1 && object.force !== true) {
return Promise.reject(new errors.NotFoundError({message: i18n.t('errors.api.job.publishInThePast')}));
}
return apiPosts.read(cleanOptions)
.then(function (result) {
post = result.posts[0];
publishedAtMoment = moment(post.published_at);

return apiPosts.edit({posts: [{status: 'published'}]}, _.pick(cleanOptions, ['context', 'id']));
});
if (publishedAtMoment.diff(moment(), 'minutes') > publishAPostBySchedulerToleranceInMinutes) {
return Promise.reject(new errors.NotFoundError({message: i18n.t('errors.api.job.notFound')}));
}

if (publishedAtMoment.diff(moment(), 'minutes') < publishAPostBySchedulerToleranceInMinutes * -1 && object.force !== true) {
return Promise.reject(new errors.NotFoundError({message: i18n.t('errors.api.job.publishInThePast')}));
}

return apiPosts.edit({
posts: [{status: 'published'}]},
_.pick(cleanOptions, ['context', 'id', 'transacting', 'forUpdate', 'opts'])
);
});
});
}
], options);
};
Expand Down
3 changes: 2 additions & 1 deletion core/server/api/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ utils = {
name: {}
},
// these values are sanitised/validated separately
noValidation = ['data', 'context', 'include', 'filter'],
noValidation = ['data', 'context', 'include', 'filter', 'forUpdate', 'transacting'],
errors = [];

_.each(options, function (value, key) {
Expand Down Expand Up @@ -262,6 +262,7 @@ utils = {
options.columns = utils.prepareFields(options.fields);
delete options.fields;
}

return options;
};
},
Expand Down
90 changes: 57 additions & 33 deletions core/server/models/base/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,20 +5,20 @@
// The models are internal to Ghost, only the API and some internal functions such as migration and import/export
// accesses the models directly. All other parts of Ghost, including the blog frontend, admin UI, and apps are only
// allowed to access data via the API.
var _ = require('lodash'),
bookshelf = require('bookshelf'),
moment = require('moment'),
Promise = require('bluebird'),
ObjectId = require('bson-objectid'),
config = require('../../config'),
db = require('../../data/db'),
errors = require('../../errors'),
filters = require('../../filters'),
schema = require('../../data/schema'),
utils = require('../../utils'),
var _ = require('lodash'),
bookshelf = require('bookshelf'),
moment = require('moment'),
Promise = require('bluebird'),
ObjectId = require('bson-objectid'),
config = require('../../config'),
db = require('../../data/db'),
errors = require('../../errors'),
filters = require('../../filters'),
schema = require('../../data/schema'),
utils = require('../../utils'),
validation = require('../../data/validation'),
plugins = require('../plugins'),
i18n = require('../../i18n'),
plugins = require('../plugins'),
i18n = require('../../i18n'),

ghostBookshelf,
proto;
Expand All @@ -42,6 +42,9 @@ ghostBookshelf.plugin(plugins.includeCount);
// Load the Ghost pagination plugin, which gives us the `fetchPage` method on Models
ghostBookshelf.plugin(plugins.pagination);

// Update collision plugin
ghostBookshelf.plugin(plugins.collision);

// Cache an instance of the base model prototype
proto = ghostBookshelf.Model.prototype;

Expand Down Expand Up @@ -77,18 +80,35 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
this.include = _.clone(options.include);
}

['fetching', 'fetched', 'creating', 'created', 'updating', 'updated', 'destroying', 'destroyed', 'saved']
.forEach(function (eventName) {
var functionName = 'on' + eventName[0].toUpperCase() + eventName.slice(1);
[
'fetching',
'fetching:collection',
'fetched',
'creating',
'created',
'updating',
'updated',
'destroying',
'destroyed',
'saved'
].forEach(function (eventName) {
var functionName = 'on' + eventName[0].toUpperCase() + eventName.slice(1);

if (functionName.indexOf(':') !== -1) {
functionName = functionName.slice(0, functionName.indexOf(':'))
+ functionName[functionName.indexOf(':') + 1].toUpperCase()
+ functionName.slice(functionName.indexOf(':') + 2);
functionName = functionName.replace(':', '');
}

if (!self[functionName]) {
return;
}
if (!self[functionName]) {
return;
}

self.on(eventName, function eventTriggered() {
return this[functionName].apply(this, arguments);
});
self.on(eventName, function eventTriggered() {
return this[functionName].apply(this, arguments);
});
});

this.on('saving', function onSaving() {
var self = this,
Expand Down Expand Up @@ -134,8 +154,8 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({

_.each(attrs, function each(value, key) {
if (value !== null
&& schema.tables[self.tableName].hasOwnProperty(key)
&& schema.tables[self.tableName][key].type === 'dateTime') {
&& schema.tables[self.tableName].hasOwnProperty(key)
&& schema.tables[self.tableName][key].type === 'dateTime') {
attrs[key] = moment(value).format('YYYY-MM-DD HH:mm:ss');
}
});
Expand Down Expand Up @@ -172,7 +192,7 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
var self = this;
_.each(attrs, function each(value, key) {
if (schema.tables[self.tableName].hasOwnProperty(key)
&& schema.tables[self.tableName][key].type === 'bool') {
&& schema.tables[self.tableName][key].type === 'bool') {
attrs[key] = value ? true : false;
}
});
Expand Down Expand Up @@ -360,7 +380,7 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
* @param {Object} options Represents options to filter in order to be passed to the Bookshelf query.
* @param {String} methodName The name of the method to check valid options for.
* @return {Object} The filtered results of `options`.
*/
*/
filterOptions: function filterOptions(options, methodName) {
var permittedOptions = this.permittedOptions(methodName),
filteredOptions = _.pick(options, permittedOptions);
Expand Down Expand Up @@ -423,9 +443,9 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
findPage: function findPage(options) {
options = options || {};

var self = this,
itemCollection = this.forge(null, {context: options.context}),
tableName = _.result(this.prototype, 'tableName'),
var self = this,
itemCollection = this.forge(null, {context: options.context}),
tableName = _.result(this.prototype, 'tableName'),
requestedColumns = options.columns;

// Set this to true or pass ?debug=true as an API option to get output
Expand Down Expand Up @@ -462,7 +482,7 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
}

return itemCollection.fetchPage(options).then(function formatResponse(response) {
var data = {},
var data = {},
models = [];

options.columns = requestedColumns;
Expand Down Expand Up @@ -496,6 +516,10 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
/**
* ### Edit
* Naive edit
*
* We always forward the `method` option to Bookshelf, see http://bookshelfjs.org/#Model-instance-save.
* Based on the `method` option Bookshelf and Ghost can determine if a query is an insert or an update.
*
* @param {Object} data
* @param {Object} options (optional)
* @return {Promise(ghostBookshelf.Model)} Edited Model
Expand All @@ -514,7 +538,7 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({

return model.fetch(options).then(function then(object) {
if (object) {
return object.save(data, options);
return object.save(data, _.merge({method: 'update'}, options));
}
});
},
Expand Down Expand Up @@ -560,13 +584,13 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
},

/**
* ### Generate Slug
* ### Generate Slug
* Create a string to act as the permalink for an object.
* @param {ghostBookshelf.Model} Model Model type to generate a slug for
* @param {String} base The string for which to generate a slug, usually a title or name
* @param {Object} options Options to pass to findOne
* @return {Promise(String)} Resolves to a unique slug string
*/
*/
generateSlug: function generateSlug(Model, base, options) {
var slug,
slugTryCount = 1,
Expand Down
95 changes: 54 additions & 41 deletions core/server/models/base/listeners.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ var config = require('../../config'),
errors = require(config.get('paths:corePath') + '/server/errors'),
logging = require(config.get('paths:corePath') + '/server/logging'),
sequence = require(config.get('paths:corePath') + '/server/utils/sequence'),
moment = require('moment-timezone');
moment = require('moment-timezone'),
_ = require('lodash');

/**
* WHEN access token is created we will update last_seen for user.
Expand Down Expand Up @@ -43,54 +44,66 @@ events.on('user.deactivated', function (userModel) {
events.on('settings.activeTimezone.edited', function (settingModel) {
var newTimezone = settingModel.attributes.value,
previousTimezone = settingModel._updatedAttributes.value,
timezoneOffsetDiff = moment.tz(previousTimezone).utcOffset() - moment.tz(newTimezone).utcOffset();
timezoneOffsetDiff = moment.tz(previousTimezone).utcOffset() - moment.tz(newTimezone).utcOffset(),
options = {context: {internal: true}};

// CASE: TZ was updated, but did not change
if (previousTimezone === newTimezone) {
return;
}

models.Post.findAll({filter: 'status:scheduled', context: {internal: true}})
.then(function (results) {
if (!results.length) {
return;
}
/**
* CASE:
* `Post.findAll` and the Post.edit` must run in one single transaction.
* We lock the target row on fetch by using the `forUpdate` option.
* Read more in models/post.js - `onFetching`
*/
return models.Base.transaction(function (transacting) {
options.transacting = transacting;
options.forUpdate = true;

return sequence(results.map(function (post) {
return function reschedulePostIfPossible() {
var newPublishedAtMoment = moment(post.get('published_at')).add(timezoneOffsetDiff, 'minutes');
return models.Post.findAll(_.merge({filter: 'status:scheduled'}, options))
.then(function (results) {
if (!results.length) {
return;
}

/**
* CASE:
* - your configured TZ is GMT+01:00
* - now is 10AM +01:00 (9AM UTC)
* - your post should be published 8PM +01:00 (7PM UTC)
* - you reconfigure your blog TZ to GMT+08:00
* - now is 5PM +08:00 (9AM UTC)
* - if we don't change the published_at, 7PM + 8 hours === next day 5AM
* - so we update published_at to 7PM - 480minutes === 11AM UTC
* - 11AM UTC === 7PM +08:00
*/
if (newPublishedAtMoment.isBefore(moment().add(5, 'minutes'))) {
post.set('status', 'draft');
} else {
post.set('published_at', newPublishedAtMoment.toDate());
}
return sequence(results.map(function (post) {
return function reschedulePostIfPossible() {
var newPublishedAtMoment = moment(post.get('published_at')).add(timezoneOffsetDiff, 'minutes');

return models.Post.edit(post.toJSON(), {id: post.id, context: {internal: true}}).reflect();
};
})).each(function (result) {
if (!result.isFulfilled()) {
logging.error(new errors.GhostError({
err: result.reason()
}));
}
/**
* CASE:
* - your configured TZ is GMT+01:00
* - now is 10AM +01:00 (9AM UTC)
* - your post should be published 8PM +01:00 (7PM UTC)
* - you reconfigure your blog TZ to GMT+08:00
* - now is 5PM +08:00 (9AM UTC)
* - if we don't change the published_at, 7PM + 8 hours === next day 5AM
* - so we update published_at to 7PM - 480minutes === 11AM UTC
* - 11AM UTC === 7PM +08:00
*/
if (newPublishedAtMoment.isBefore(moment().add(5, 'minutes'))) {
post.set('status', 'draft');
} else {
post.set('published_at', newPublishedAtMoment.toDate());
}

return models.Post.edit(post.toJSON(), _.merge({id: post.id}, options)).reflect();
};
})).each(function (result) {
if (!result.isFulfilled()) {
logging.error(new errors.GhostError({
err: result.reason()
}));
}
});
})
.catch(function (err) {
logging.error(new errors.GhostError({
err: err,
level: 'critical'
}));
});
})
.catch(function (err) {
logging.error(new errors.GhostError({
err: err,
level: 'critical'
}));
});
});
});
Loading

0 comments on commit c93f03b

Please sign in to comment.