Permalink
Browse files

Merge branch '10.0' into 10.0-project_timelog-port

  • Loading branch information...
yelizariev committed Nov 21, 2018
2 parents fe05947 + bc375d9 commit e9533dfae81ce6c59b5b042d3f2d507ebace6695
@@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-

from . import models
from . import controllers
@@ -4,7 +4,7 @@
"summary": """Upload attachments on Amazon S3""",
"category": "Tools",
"images": [],
"version": "10.0.1.1.2",
"version": "10.0.1.2.0",
"application": False,

"author": "IT-Projects LLC, Ildar Nasyrov",
@@ -19,6 +19,7 @@
"external_dependencies": {"python": ['boto3'], "bin": []},
"data": [
"views/ir_attachment_s3.xml",
"security/ir.model.access.csv",
],
"qweb": [
],
@@ -0,0 +1 @@
from . import main
@@ -0,0 +1,88 @@
# -*- coding: utf-8 -*-
# Copyright 2018 Ivan Yelizariev <https://it-projects.info/team/yelizariev>
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html).
import logging
import werkzeug

import odoo
from odoo.http import request, route
from odoo.addons.web.controllers.main import Binary
# TODO some code can be part of ir_attachment_url

_logger = logging.getLogger(__name__)


class BinaryExtended(Binary):

def redirect_to_url(self, url):
return werkzeug.utils.redirect(url, code=301)

@route()
def content_image(self, xmlid=None, model='ir.attachment', id=None, field='datas', filename_field='datas_fname', unique=None, filename=None, mimetype=None, download=None, width=0, height=0):

res = super(BinaryExtended, self).content_image(xmlid, model, id, field, filename_field, unique, filename, mimetype, download, width, height)

if not (res.status_code == 301 and (width or height)):
return res

# * check that it's image on s3
# * upload resized image if needed
# * return url to resized image

# FIND ATTACHMENT. The code is copy-pasted from binary_content method
env = request.env
# get object and content
obj = None
if xmlid:
obj = env.ref(xmlid, False)
elif id and model in env.registry:
obj = env[model].browse(int(id))

attachment = None
if model == 'ir.attachment':
attachment = obj
else:
attachment = env['ir.http'].find_field_attachment(env, model, field, obj)

if not attachment:
# imposible case?
_logger.error('Attachment is not found')
return res

# FIX SIZES
height = int(height or 0)
width = int(width or 0)
# resize maximum 500*500
if width > 500:
width = 500
if height > 500:
height = 500

# CHECK FOR CACHE.
# We may already uploaded that resized image
cache = env['ir.attachment.resized'].sudo().search([
('attachment_id', '=', attachment.id),
('width', '=', width),
('height', '=', height),
])
if cache:
url = cache.resized_attachment_id.url
return self.redirect_to_url(url)

# PREPARE CACHE
content = attachment.datas
content = odoo.tools.image_resize_image(base64_source=content, size=(width or None, height or None), encoding='base64', filetype='PNG')
resized_attachment = env['ir.attachment'].with_context(force_s3=True).create({
'name': '%sx%s %s' % (width, height, attachment.name),
'datas': content,
})

env['ir.attachment.resized'].sudo().create({
'attachment_id': attachment.id,
'width': width,
'height': height,
'resized_attachment_id': resized_attachment.id,
})

url = resized_attachment.url
return self.redirect_to_url(url)
@@ -1,3 +1,8 @@
`1.2.0`
-------

- **Improvement:** Save resized image to s3 instead of passing original (big) image

`1.1.2`
-------

@@ -5,40 +5,16 @@
Installation
============

* `Install <https://odoo-development.readthedocs.io/en/latest/odoo/usage/install-module.html>`__ this module in a usual way
* `Using this quickstart instruction <https://boto3.readthedocs.io/en/latest/guide/quickstart.html>`__ install boto3 library and get credentials for it
* `Using this instruction <http://mikeferrier.com/2011/10/27/granting-access-to-a-single-s3-bucket-using-amazon-iam>`__ grant access to your s3 bucket
* Set your S3 bucket as public
* Optionaly, add following parameter to prevent heavy logs from boto3 library:

--log-handler=boto3.resources.action:WARNING

Configuration
=============

* To enable the feature of linking existing urls to binary fields:

* Start Odoo with ``--load=web,ir_attachment_url`` or set the ``server_wide_modules`` option in The Odoo configuration file:

::

[options]
# (...)
server_wide_modules = web,ir_attachment_url
# (...)

* `Enable technical features <https://odoo-development.readthedocs.io/en/latest/odoo/usage/technical-features.html>`__
* Open menu ``Settings >> Parameters >> System Parameters`` and specify the following parameters there

* ``s3.bucket``: the name of your bucket (e.g. ``mybucket``)
* ``s3.condition``: only the attachments that meet the condition will be sent to s3 (e.g. ``[('res_model', 'in', ['product.image'])]``) - it is actually the way of specifying the models with ``fields.Binary`` fields that should be stored on s3 instead of local file storage or db. Don't specify anything if you want to store all your attachment data from ``fields.Binary`` and also ordinary attachments on s3.
* ``s3.access_key_id``: S3 access key ID
* ``s3.secret_key``: S3 secret access key

The settings are also available from the ``Settings >> Technical >> Database Structure >> S3 Settings``.

S3
--
IAM
---

Minimal access policy for s3 credentials are as following::

@@ -63,7 +39,31 @@ Minimal access policy for s3 credentials are as following::
}
You can also remove ``"s3:CreateBucket"`` if bucket already exists.
You can also remove ``"s3:CreateBucket"`` if bucket already exists.

Configuration
=============

* To enable the feature of linking existing urls to binary fields:

* Start Odoo with ``--load=web,ir_attachment_url`` or set the ``server_wide_modules`` option in The Odoo configuration file:

::

[options]
# (...)
server_wide_modules = web,ir_attachment_url
# (...)

* `Enable technical features <https://odoo-development.readthedocs.io/en/latest/odoo/usage/technical-features.html>`__
* Open menu ``Settings >> Parameters >> System Parameters`` and specify the following parameters there

* ``s3.bucket``: the name of your bucket (e.g. ``mybucket``)
* ``s3.condition``: only the attachments that meet the condition will be sent to s3 (e.g. ``[('res_model', 'in', ['product.image'])]``) - it is actually the way of specifying the models with ``fields.Binary`` fields that should be stored on s3 instead of local file storage or db. Don't specify anything if you want to store all your attachment data from ``fields.Binary`` and also ordinary attachments on s3.
* ``s3.access_key_id``: S3 access key ID
* ``s3.secret_key``: S3 secret access key

The settings are also available from the ``Settings >> Technical >> Database Structure >> S3 Settings``.

Usage
=====
@@ -1,9 +1,11 @@
# -*- coding: utf-8 -*-
# Copyright 2016-2018 Ildar Nasyrov <https://it-projects.info/team/iledarn>
# Copyright 2016-2018 Ivan Yelizariev <https://it-projects.info/team/yelizariev>
import os
import hashlib
import logging

from odoo import api, models, _
from odoo import api, models, _, fields
from odoo.tools.safe_eval import safe_eval

_logger = logging.getLogger(__name__)
@@ -15,9 +17,21 @@
found on your installation')


class IrAttachmentResized(models.Model):
_name = 'ir.attachment.resized'
_description = 'Url to resized image'

attachment_id = fields.Many2one('ir.attachment')
width = fields.Integer()
height = fields.Integer()
resized_attachment_id = fields.Many2one('ir.attachment', ondelete='cascade')


class IrAttachment(models.Model):
_inherit = 'ir.attachment'

resized_ids = fields.One2many('ir.attachment.resized', 'attachment_id')

def _get_s3_settings(self, param_name, os_var_name):
config_obj = self.env['ir.config_parameter']
res = config_obj.sudo().get_param(param_name)
@@ -62,11 +76,12 @@ def _get_s3_resource(self):
def _inverse_datas(self):
# set s3_records to empty recordset
condition = self._get_s3_settings('s3.condition', 'S3_CONDITION')
if condition:
if condition and not self.env.context.get('force_s3'):
condition = safe_eval(condition, mode="eval")
s3_records = self.sudo().search([('id', 'in', self.ids)] + condition)
else:
# if there is no condition then store all attachments on s3
# if there is no condition or force_s3 in context
# then store all attachments on s3
s3_records = self

if s3_records:
@@ -78,7 +93,9 @@ def _inverse_datas(self):
s3_records = s3_records._filter_protected_attachments()
s3_records = s3_records.filtered(lambda r: r.type != 'url')

resized_to_remove = self.env['ir.attachment.resized'].sudo()
for attach in self & s3_records: # datas field has got empty somehow in the result of ``s3_records = self.sudo().search([('id', 'in', self.ids)] + condition)`` search for non-superusers but it is in original recordset. Here we use original (with datas) in case it intersects with the search result
resized_to_remove |= attach.sudo().resized_ids
value = attach.datas
bin_data = value and value.decode('base64') or ''
fname = hashlib.sha1(bin_data).hexdigest()
@@ -102,4 +119,6 @@ def _inverse_datas(self):
}
super(IrAttachment, attach.sudo()).write(vals)

resized_to_remove.mapped('resized_attachment_id').unlink()
resized_to_remove.unlink()
super(IrAttachment, self - s3_records)._inverse_datas()
@@ -0,0 +1,2 @@
id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink
access_ir_attachment_resized,access_ir_attachment_resized,model_ir_attachment_resized,base.group_user,1,0,0,0
@@ -4,7 +4,7 @@
"summary": """Use attachment URL and upload data to external storage""",
"category": "Tools",
"images": [],
"version": "10.0.1.1.4",
"version": "10.0.1.1.6",
"application": False,

"author": "IT-Projects LLC, Ildar Nasyrov",
@@ -1,3 +1,13 @@
`1.1.6`
-------

- **Fix** When the "image_resize_image" function was called, they received the error "binascii.Error: decoding with base64 codec failed (Error: Incorrect padding)", since the value of the binary field is the URL, not the base_64 string.

`1.1.5`
-------

- **Fix:** Product Variant were downloaded on server instead of passing url

`1.1.4`
-------

@@ -29,4 +29,55 @@ def is_url(value):
return re.findall(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', value)


super_image_resize_image = tools.image_resize_image


def updated_image_resize_image(base64_source, size=(1024, 1024), encoding='base64', filetype=None, avoid_if_small=False):
if is_url(base64_source):
return base64_source
return super_image_resize_image(base64_source, size=size, encoding=encoding, filetype=filetype, avoid_if_small=avoid_if_small)


def updated_image_resize_image_big(base64_source, size=(1024, 1024), encoding='base64', filetype=None, avoid_if_small=True):
""" copy-pasted from odoo/tools/image.py::image_resize_image_big
because we rewrite image_resize_image function.
"""
return updated_image_resize_image(base64_source, size, encoding, filetype, avoid_if_small)


def updated_image_resize_image_medium(base64_source, size=(128, 128), encoding='base64', filetype=None, avoid_if_small=False):
""" copy-pasted from odoo/tools/image.py::image_resize_image_medium
because we rewrite image_resize_image function.
"""
return updated_image_resize_image(base64_source, size, encoding, filetype, avoid_if_small)


def updated_image_resize_image_small(base64_source, size=(64, 64), encoding='base64', filetype=None, avoid_if_small=False):
""" copy-pasted from odoo/tools/image.py::image_resize_image_small
because we rewrite image_resize_image function.
"""
return updated_image_resize_image(base64_source, size, encoding, filetype, avoid_if_small)


def updated_image_get_resized_images(base64_source, return_big=False, return_medium=True, return_small=True,
big_name='image', medium_name='image_medium', small_name='image_small',
avoid_resize_big=True, avoid_resize_medium=False, avoid_resize_small=False):
""" copy-pasted from odoo/tools/image.py::image_get_resized_images
because we rewrite image_resize_image function.
"""
return_dict = dict()
if return_big:
return_dict[big_name] = updated_image_resize_image_big(base64_source, avoid_if_small=avoid_resize_big)
if return_medium:
return_dict[medium_name] = updated_image_resize_image_medium(base64_source, avoid_if_small=avoid_resize_medium)
if return_small:
return_dict[small_name] = updated_image_resize_image_small(base64_source, avoid_if_small=avoid_resize_small)
return return_dict


tools.image_resize_images = updated_image_resize_images
tools.image_resize_image = updated_image_resize_image
tools.image_resize_image_big = updated_image_resize_image_big
tools.image_resize_image_medium = updated_image_resize_image_medium
tools.image_resize_image_small = updated_image_resize_image_small
tools.image_get_resized_images = updated_image_get_resized_images
Oops, something went wrong.

0 comments on commit e9533df

Please sign in to comment.