Skip to content
Permalink
Browse files

[IMP] account_asset,account: Performance improvements

The generation of the deffered revenues took initially 471s.This
improvement permit to reduce the time to 72s (behave Cprofiler).
The issue was related to the fact that all the message post and logs
invalids the cache. It results that several read requests were
repeated multiple times.

closes #31750

Signed-off-by: Denis Ledoux <beledouxdenis@users.noreply.github.com>
  • Loading branch information...
ndeodoo authored and beledouxdenis committed Mar 11, 2019
1 parent c23d118 commit 10964a230ca820271926c6b9e45906841e7b0617
@@ -140,8 +140,9 @@ def write(self, vals):
def post(self):
invoice = self._context.get('invoice', False)
self._post_validate()
# Create the analytic lines in batch is faster as it leads to less cache invalidation.
self.mapped('line_ids').create_analytic_lines()
for move in self:
move.line_ids.create_analytic_lines()
if move.name == '/':
new_name = False
journal = move.journal_id
@@ -18,7 +18,5 @@ def button_cancel(self):

@api.multi
def post(self):
for move in self:
for depreciation_line in move.asset_depreciation_ids:
depreciation_line.post_lines_and_close_asset()
self.mapped('asset_depreciation_ids').post_lines_and_close_asset()
return super(AccountMove, self).post()
@@ -486,8 +486,12 @@ def _get_move_posted_check(self):
def create_move(self, post_move=True):
created_moves = self.env['account.move']
prec = self.env['decimal.precision'].precision_get('Account')
# `line.move_id` was invalidated from the cache at each iteration
# To prevent to refetch `move_id` of all lines at each iteration just to check a UserError,
# we use an intermediar dict which stores the information the UserError check requires.
line_moves = {line: line.move_id for line in self}
for line in self:
if line.move_id:
if line_moves[line]:
raise UserError(_('This depreciation is already linked to a journal entry! Please post or delete it.'))
category_id = line.asset_id.category_id
depreciation_date = self.env.context.get('depreciation_date') or line.depreciation_date or fields.Date.context_today(self)
@@ -525,6 +529,7 @@ def create_move(self, post_move=True):
}
move = self.env['account.move'].create(move_vals)
line.write({'move_id': move.id, 'move_check': True})
line_moves[line] = move
created_moves |= move

if post_move and created_moves:
@@ -581,12 +586,19 @@ def create_grouped_move(self, post_move=True):
@api.multi
def post_lines_and_close_asset(self):
# we re-evaluate the assets to determine whether we can close them
# `message_post` invalidates the (whole) cache
# preprocess the assets and lines in which a message should be posted,
# and then post in batch will prevent the re-fetch of the same data over and over.
assets_to_close = self.env['account.asset.asset']
for line in self:
line.log_message_when_posted()
asset = line.asset_id
if asset.currency_id.is_zero(asset.value_residual):
asset.message_post(body=_("Document closed."))
asset.write({'state': 'close'})
assets_to_close |= asset
self.log_message_when_posted()
assets_to_close.write({'state': 'close'})
for asset in assets_to_close:
asset.message_post(body=_("Document closed."))


@api.multi
def log_message_when_posted(self):
@@ -599,6 +611,10 @@ def _format_message(message_description, tracked_values):
message += '%s</div>' % values
return message

# `message_post` invalidates the (whole) cache
# preprocess the assets in which messages should be posted,
# and then post in batch will prevent the re-fetch of the same data over and over.
assets_to_post = {}
for line in self:
if line.move_id and line.move_id.state == 'draft':
partner_name = line.asset_id.partner_id.name
@@ -607,7 +623,10 @@ def _format_message(message_description, tracked_values):
if partner_name:
msg_values[_('Partner')] = partner_name
msg = _format_message(_('Depreciation line posted.'), msg_values)
line.asset_id.message_post(body=msg)
assets_to_post.setdefault(line.asset_id, []).append(msg)
for asset, messages in assets_to_post.items():
for msg in messages:
asset.message_post(body=msg)

@api.multi
def unlink(self):

0 comments on commit 10964a2

Please sign in to comment.
You can’t perform that action at this time.