Permalink
Browse files

⚡️ Save resized image to s3 instead of passing original (big) image

  • Loading branch information...
yelizariev committed Nov 8, 2018
1 parent a5cec94 commit 1f374c4f30bb5cf3bc01fcf7e93c95e32653c0a6
@@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
from . import models
from . import controllers
@@ -4,7 +4,7 @@
"summary": """Upload attachments on Amazon S3""",
"category": "Tools",
"images": [],
"version": "10.0.1.1.2",
"version": "10.0.1.2.0",
"application": False,
"author": "IT-Projects LLC, Ildar Nasyrov",
@@ -19,6 +19,7 @@
"external_dependencies": {"python": ['boto3'], "bin": []},
"data": [
"views/ir_attachment_s3.xml",
"security/ir.model.access.csv",
],
"qweb": [
],
@@ -0,0 +1 @@
from . import main
@@ -0,0 +1,88 @@
# -*- coding: utf-8 -*-
# Copyright 2018 Ivan Yelizariev <https://it-projects.info/team/yelizariev>
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html).
import logging
import werkzeug
import odoo
from odoo.http import request, route
from odoo.addons.web.controllers.main import Binary
# TODO some code can be part of ir_attachment_url
_logger = logging.getLogger(__name__)
class BinaryExtended(Binary):
def redirect_to_url(self, url):
return werkzeug.utils.redirect(url, code=301)
@route()
def content_image(self, xmlid=None, model='ir.attachment', id=None, field='datas', filename_field='datas_fname', unique=None, filename=None, mimetype=None, download=None, width=0, height=0):
res = super(BinaryExtended, self).content_image(xmlid, model, id, field, filename_field, unique, filename, mimetype, download, width, height)
if not (res.status_code == 301 and (width or height)):
return res
# * check that it's image on s3
# * upload resized image if needed
# * return url to resized image
# FIND ATTACHMENT. The code is copy-pasted from binary_content method
env = request.env
# get object and content
obj = None
if xmlid:
obj = env.ref(xmlid, False)
elif id and model in env.registry:
obj = env[model].browse(int(id))
attachment = None
if model == 'ir.attachment':
attachment = obj
else:
attachment = env['ir.http'].find_field_attachment(env, model, field, obj)
if not attachment:
# imposible case?
_logger.error('Attachment is not found')
return res
# FIX SIZES
height = int(height or 0)
width = int(width or 0)
# resize maximum 500*500
if width > 500:
width = 500
if height > 500:
height = 500
# CHECK FOR CACHE.
# We may already uploaded that resized image
cache = env['ir.attachment.resized'].sudo().search([
('attachment_id', '=', attachment.id),
('width', '=', width),
('height', '=', height),
])
if cache:
url = cache.resized_attachment_id.url
return self.redirect_to_url(url)
# PREPARE CACHE
content = attachment.datas
content = odoo.tools.image_resize_image(base64_source=content, size=(width or None, height or None), encoding='base64', filetype='PNG')
resized_attachment = env['ir.attachment'].with_context(force_s3=True).create({
'name': '%sx%s %s' % (width, height, attachment.name),
'datas': content,
})
env['ir.attachment.resized'].sudo().create({
'attachment_id': attachment.id,
'width': width,
'height': height,
'resized_attachment_id': resized_attachment.id,
})
url = resized_attachment.url
return self.redirect_to_url(url)
@@ -1,3 +1,8 @@
`1.2.0`
-------
- **Improvement:** Save resized image to s3 instead of passing original (big) image
`1.1.2`
-------
@@ -1,9 +1,11 @@
# -*- coding: utf-8 -*-
# Copyright 2016-2018 Ildar Nasyrov <https://it-projects.info/team/iledarn>
# Copyright 2016-2018 Ivan Yelizariev <https://it-projects.info/team/yelizariev>
import os
import hashlib
import logging
from odoo import api, models, _
from odoo import api, models, _, fields
from odoo.tools.safe_eval import safe_eval
_logger = logging.getLogger(__name__)
@@ -15,9 +17,21 @@
found on your installation')
class IrAttachmentResized(models.Model):
_name = 'ir.attachment.resized'
_description = 'Url to resized image'
attachment_id = fields.Many2one('ir.attachment')
width = fields.Integer()
height = fields.Integer()
resized_attachment_id = fields.Many2one('ir.attachment', ondelete='cascade')
class IrAttachment(models.Model):
_inherit = 'ir.attachment'
resized_ids = fields.One2many('ir.attachment.resized', 'attachment_id')
def _get_s3_settings(self, param_name, os_var_name):
config_obj = self.env['ir.config_parameter']
res = config_obj.sudo().get_param(param_name)
@@ -62,11 +76,12 @@ def _get_s3_resource(self):
def _inverse_datas(self):
# set s3_records to empty recordset
condition = self._get_s3_settings('s3.condition', 'S3_CONDITION')
if condition:
if condition and not self.env.context.get('force_s3'):
condition = safe_eval(condition, mode="eval")
s3_records = self.sudo().search([('id', 'in', self.ids)] + condition)
else:
# if there is no condition then store all attachments on s3
# if there is no condition or force_s3 in context
# then store all attachments on s3
s3_records = self
if s3_records:
@@ -78,7 +93,9 @@ def _inverse_datas(self):
s3_records = s3_records._filter_protected_attachments()
s3_records = s3_records.filtered(lambda r: r.type != 'url')
resized_to_remove = self.env['ir.attachment.resized'].sudo()
for attach in self & s3_records: # datas field has got empty somehow in the result of ``s3_records = self.sudo().search([('id', 'in', self.ids)] + condition)`` search for non-superusers but it is in original recordset. Here we use original (with datas) in case it intersects with the search result
resized_to_remove |= attach.sudo().resized_ids
value = attach.datas
bin_data = value and value.decode('base64') or ''
fname = hashlib.sha1(bin_data).hexdigest()
@@ -102,4 +119,6 @@ def _inverse_datas(self):
}
super(IrAttachment, attach.sudo()).write(vals)
resized_to_remove.mapped('resized_attachment_id').unlink()
resized_to_remove.unlink()
super(IrAttachment, self - s3_records)._inverse_datas()
@@ -0,0 +1,2 @@
id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink
access_ir_attachment_resized,access_ir_attachment_resized,model_ir_attachment_resized,base.group_user,1,0,0,0

0 comments on commit 1f374c4

Please sign in to comment.