Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

New flush strategies => Metadata on SQL #144747

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
80 changes: 30 additions & 50 deletions addons/account/models/account_analytic_account.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,81 +18,61 @@ class AccountAnalyticAccount(models.Model):
@api.depends('line_ids')
def _compute_invoice_count(self):
sale_types = self.env['account.move'].get_sale_types(include_receipts=True)

query = self.env['account.move.line']._search([
('parent_state', '=', 'posted'),
('move_id.move_type', 'in', sale_types),
])
query.add_where(
'account_move_line.analytic_distribution ?| %s',
[[str(account_id) for account_id in self.ids]],
)

query_string, query_param = query.select(
'jsonb_object_keys(account_move_line.analytic_distribution) as account_id',
'COUNT(DISTINCT(account_move_line.move_id)) as move_count',
data = self.env['account.move.line']._read_group(
[
('parent_state', '=', 'posted'),
('move_id.move_type', 'in', sale_types),
('analytic_distribution', 'in', self.ids),
],
['analytic_distribution'],
['move_id:count_distinct'],
)
query_string = f"{query_string} GROUP BY jsonb_object_keys(account_move_line.analytic_distribution)"

self._cr.execute(query_string, query_param)
data = {int(record.get('account_id')): record.get('move_count') for record in self._cr.dictfetchall()}
data = {int(account_id): move_count for account_id, move_count in data}
for account in self:
account.invoice_count = data.get(account.id, 0)

@api.depends('line_ids')
def _compute_vendor_bill_count(self):
purchase_types = self.env['account.move'].get_purchase_types(include_receipts=True)

query = self.env['account.move.line']._search([
('parent_state', '=', 'posted'),
('move_id.move_type', 'in', purchase_types),
])
query.add_where(
'account_move_line.analytic_distribution ?| %s',
[[str(account_id) for account_id in self.ids]],
)

query_string, query_param = query.select(
'jsonb_object_keys(account_move_line.analytic_distribution) as account_id',
'COUNT(DISTINCT(account_move_line.move_id)) as move_count',
data = self.env['account.move.line']._read_group(
[
('parent_state', '=', 'posted'),
('move_id.move_type', 'in', purchase_types),
('analytic_distribution', 'in', self.ids),
],
['analytic_distribution'],
['move_id:count_distinct'],
)
query_string = f"{query_string} GROUP BY jsonb_object_keys(account_move_line.analytic_distribution)"

self._cr.execute(query_string, query_param)
data = {int(record.get('account_id')): record.get('move_count') for record in self._cr.dictfetchall()}
data = {int(account_id): move_count for account_id, move_count in data}
for account in self:
account.vendor_bill_count = data.get(account.id, 0)

def action_view_invoice(self):
self.ensure_one()
query = self.env['account.move.line']._search([('move_id.move_type', 'in', self.env['account.move'].get_sale_types())])
query.add_where('analytic_distribution ? %s', [str(self.id)])
query_string, query_param = query.select('DISTINCT account_move_line.move_id')
self._cr.execute(query_string, query_param)
move_ids = [line.get('move_id') for line in self._cr.dictfetchall()]
result = {
account_move_lines = self.env['account.move.line'].search_fetch([
('move_id.move_type', 'in', self.env['account.move'].get_sale_types()),
('analytic_distribution', 'in', self.ids),
], ['move_id'])
return {
"type": "ir.actions.act_window",
"res_model": "account.move",
"domain": [('id', 'in', move_ids)],
"domain": [('id', 'in', account_move_lines.move_id.ids)],
"context": {"create": False, 'default_move_type': 'out_invoice'},
"name": _("Customer Invoices"),
'view_mode': 'tree,form',
}
return result

def action_view_vendor_bill(self):
self.ensure_one()
query = self.env['account.move.line']._search([('move_id.move_type', 'in', self.env['account.move'].get_purchase_types())])
query.add_where('analytic_distribution ? %s', [str(self.id)])
query_string, query_param = query.select('DISTINCT account_move_line.move_id')
self._cr.execute(query_string, query_param)
move_ids = [line.get('move_id') for line in self._cr.dictfetchall()]
result = {
account_move_lines = self.env['account.move.line'].search_fetch([
('move_id.move_type', 'in', self.env['account.move'].get_purchase_types()),
('analytic_distribution', 'in', self.ids),
], ['move_id'])
return {
"type": "ir.actions.act_window",
"res_model": "account.move",
"domain": [('id', 'in', move_ids)],
"domain": [('id', 'in', account_move_lines.move_id.ids)],
"context": {"create": False, 'default_move_type': 'in_invoice'},
"name": _("Vendor Bills"),
'view_mode': 'tree,form',
}
return result
7 changes: 4 additions & 3 deletions addons/account/models/account_bank_statement_line.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,17 +181,15 @@ def _compute_running_balance(self):
# the user can split on that lines, but their balance should be the same as previous posted line
# we do the same for the canceled lines, in order to keep using them as anchor points

self.statement_id.flush_model(['balance_start', 'first_line_index'])
self.flush_model(['internal_index', 'date', 'journal_id', 'statement_id', 'amount', 'state'])
record_by_id = {x.id: x for x in self}

for journal in self.journal_id:
journal_lines_indexes = self.filtered(lambda line: line.journal_id == journal)\
.sorted('internal_index')\
.mapped('internal_index')
min_index, max_index = journal_lines_indexes[0], journal_lines_indexes[-1]

# Find the oldest index for each journal.
self.env['account.bank.statement'].flush_model(['first_line_index', 'journal_id', 'balance_start'])
self._cr.execute(
"""
SELECT first_line_index, COALESCE(balance_start, 0.0)
Expand All @@ -213,6 +211,9 @@ def _compute_running_balance(self):
extra_clause = "AND st_line.internal_index >= %s"
extra_params.append(starting_index)

self.flush_model(['amount', 'move_id', 'statement_id', 'internal_index'])
self.env['account.bank.statement'].flush_model(['first_line_index', 'balance_start'])
self.env['account.move'].flush_model(['state', 'journal_id'])
self._cr.execute(
f"""
SELECT
Expand Down
24 changes: 13 additions & 11 deletions addons/account/models/account_move_line.py
Original file line number Diff line number Diff line change
Expand Up @@ -1689,15 +1689,18 @@ def copy_data(self, default=None):
line._copy_data_extend_business_fields(values)
return data_list

def _field_to_sql(self, alias: str, fname: str, query: (Query | None) = None) -> SQL:
def _field_to_sql(self, alias: str, fname: str, query: (Query | None) = None, flush: bool = True) -> SQL:
if fname != 'payment_date':
return super()._field_to_sql(alias, fname, query)
return super()._field_to_sql(alias, fname, query, flush)
return SQL("""
CASE
WHEN discount_date >= %(today)s THEN discount_date
ELSE date_maturity
END
""", today=fields.Date.context_today(self))
WHEN %(discount_date)s >= %(today)s THEN %(discount_date)s
ELSE %(date_maturity)s
END""",
today=fields.Date.context_today(self),
discount_date=super()._field_to_sql(alias, "discount_date", query, flush),
date_maturity=super()._field_to_sql(alias, "date_maturity", query, flush),
)

def _order_field_to_sql(self, alias: str, field_name: str, direction: SQL, nulls: SQL, query: Query) -> SQL:
if field_name != 'payment_date':
Expand All @@ -1715,16 +1718,15 @@ def _search_panel_domain_image(self, field_name, domain, set_count=False, limit=
# Override in order to not read the complete move line table and use the index instead
query = self._search(domain, limit=1)
query.add_where('account.id = account_move_line.account_id')
query_str, query_param = query.select()
self.env.cr.execute(f"""
id_rows = self.env.execute_query(SQL("""
SELECT account.root_id
FROM account_account account,
LATERAL ({query_str}) line
LATERAL (%s) line
WHERE account.company_id IN %s
""", query_param + [tuple(self.env.companies.ids)])
""", query.select(), tuple(self.env.companies.ids)))
return {
root.id: {'id': root.id, 'display_name': root.display_name}
for root in self.env['account.root'].browse(id for [id] in self.env.cr.fetchall())
for root in self.env['account.root'].browse(id_ for [id_] in id_rows)
}

# -------------------------------------------------------------------------
Expand Down
16 changes: 10 additions & 6 deletions addons/account/models/partner.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from odoo.tools import DEFAULT_SERVER_DATETIME_FORMAT, mute_logger
from odoo.exceptions import ValidationError, UserError
from odoo.addons.base.models.res_partner import WARNING_MESSAGE, WARNING_HELP
from odoo.tools import SQL

_logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -816,13 +817,16 @@ def _retrieve_partner_with_vat(self, vat, extra_domain):
vat_prefix_regex = f'({country_prefix})?'
else:
vat_prefix_regex = '([A-z]{2})?'
query = self.env['res.partner']._search(extra_domain + [('active', '=', True)], limit=1)
query.add_where("res_partner.vat ~ %s", ['^%s0*%s$' % (vat_prefix_regex, vat_only_numeric)])
query_str, params = query.select()
self._cr.execute(query_str, params)
partner_row = self._cr.fetchone()
Partner = self.env['res.partner']
query = Partner._search(extra_domain + [('active', '=', True)], limit=1)
query.add_where(SQL(
"%s ~ %s",
Partner._field_to_sql(Partner._table, 'vat'),
f'^{vat_prefix_regex}0*{vat_only_numeric}$',
))
partner_row = list(query)
if partner_row:
partner = self.env['res.partner'].browse(partner_row[0])
partner = Partner.browse(partner_row[0])

return partner

Expand Down
78 changes: 40 additions & 38 deletions addons/analytic/models/analytic_mixin.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, fields, api, _
from odoo.tools import SQL, Query
from odoo.tools.float_utils import float_round, float_compare
from odoo.exceptions import UserError, ValidationError

Expand All @@ -12,11 +13,6 @@ class AnalyticMixin(models.AbstractModel):
'Analytic Distribution',
compute="_compute_analytic_distribution", store=True, copy=True, readonly=False,
)
# Json non stored to be able to search on analytic_distribution.
analytic_distribution_search = fields.Json(
store=False,
search="_search_analytic_distribution"
)
analytic_precision = fields.Integer(
store=False,
default=lambda self: self.env['decimal.precision'].precision_get("Percentage Analytic"),
Expand All @@ -36,40 +32,52 @@ def init(self):
self.env.cr.execute(query)
super().init()

@api.model
def fields_get(self, allfields=None, attributes=None):
""" Hide analytic_distribution_search from filterable/searchable fields"""
res = super().fields_get(allfields, attributes)
if res.get('analytic_distribution_search'):
res['analytic_distribution_search']['searchable'] = False
return res

def _compute_analytic_distribution(self):
pass

def _search_analytic_distribution(self, operator, value):
if operator not in ['=', '!=', 'ilike', 'not ilike'] or not isinstance(value, (str, bool)):
def _condition_to_sql(self, alias: str, fname: str, operator: str, value, query: Query) -> SQL:
ryv-odoo marked this conversation as resolved.
Show resolved Hide resolved
# Don't use this override when account_report_analytic_groupby is truly in the context
# Indeed, when account_report_analytic_groupby is in the context it means that `analytic_distribution`
# doesn't have the same format and the table is a temporary one, see _prepare_lines_for_analytic_groupby
if fname != 'analytic_distribution' or self.env.context.get('account_report_analytic_groupby'):
return super()._condition_to_sql(alias, fname, operator, value, query)

if operator not in ('=', '!=', 'ilike', 'not ilike', 'in', 'not in'):
raise UserError(_('Operation not supported'))
operator_name_search = '=' if operator in ('=', '!=') else 'ilike'
account_ids = list(self.env['account.analytic.account']._name_search(name=value, operator=operator_name_search))

query = f"""
SELECT id
FROM {self._table}
WHERE analytic_distribution ?| array[%s]
"""
operator_inselect = 'inselect' if operator in ('=', 'ilike') else 'not inselect'
return [('id', operator_inselect, (query, [[str(account_id) for account_id in account_ids]]))]
if operator in ('=', '!=') and isinstance(value, bool):
return super()._condition_to_sql(alias, fname, operator, value, query)

if isinstance(value, str) and operator in ('=', '!=', 'ilike', 'not ilike'):
ryv-odoo marked this conversation as resolved.
Show resolved Hide resolved
value = list(self.env['account.analytic.account']._name_search(
name=value, operator=('=' if operator in ('=', '!=') else 'ilike'),
))
operator = 'in' if operator in ('=', 'ilike') else 'not in'

@api.model
def _search(self, domain, offset=0, limit=None, order=None, access_rights_uid=None):
domain = self._apply_analytic_distribution_domain(domain)
return super()._search(domain, offset, limit, order, access_rights_uid)
analytic_distribution_sql = self._field_to_sql(alias, 'analytic_distribution', query)
value = [str(id_) for id_ in value if id_] # list of ids -> list of string
if operator == 'in': # 'in' -> ?|
return SQL(
"%s ?| ARRAY[%s]",
analytic_distribution_sql,
value,
)
if operator == 'not in':
return SQL(
"(NOT %s ?| ARRAY[%s] OR %s IS NULL)",
analytic_distribution_sql,
value,
analytic_distribution_sql,
)
raise UserError(_('Operation not supported'))

@api.model
def read_group(self, domain, fields, groupby, offset=0, limit=None, orderby=False, lazy=True):
domain = self._apply_analytic_distribution_domain(domain)
return super().read_group(domain, fields, groupby, offset, limit, orderby, lazy)
def _read_group_groupby(self, groupby_spec: str, query: Query) -> SQL:
if groupby_spec == 'analytic_distribution':
return SQL(
'jsonb_object_keys(%s)',
gawa-odoo marked this conversation as resolved.
Show resolved Hide resolved
self._field_to_sql(self._table, 'analytic_distribution', query),
)
return super()._read_group_groupby(groupby_spec, query)

def write(self, vals):
""" Format the analytic_distribution float value, so equality on analytic_distribution can be done """
Expand Down Expand Up @@ -106,9 +114,3 @@ def _sanitize_values(self, vals, decimal_precision):
vals['analytic_distribution'] = vals.get('analytic_distribution') and {
account_id: float_round(distribution, decimal_precision) for account_id, distribution in vals['analytic_distribution'].items()}
return vals

def _apply_analytic_distribution_domain(self, domain):
return [
('analytic_distribution_search', leaf[1], leaf[2]) if len(leaf) == 3 and leaf[0] == 'analytic_distribution' and isinstance(leaf[2], str) else leaf
for leaf in domain
]
14 changes: 7 additions & 7 deletions addons/calendar/models/res_partner.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from datetime import datetime

from odoo import api, fields, models
from odoo.tools import SQL


class Partner(models.Model):
Expand All @@ -29,16 +30,15 @@ def _compute_meeting(self):
)

query = self.env['calendar.event']._search([]) # ir.rules will be applied
query_str, params = query.subselect()

self.env.cr.execute(f"""
meeting_data = self.env.execute_query(SQL("""
SELECT res_partner_id, calendar_event_id, count(1)
FROM calendar_event_res_partner_rel
WHERE res_partner_id IN %s AND calendar_event_id IN {query_str}
WHERE res_partner_id IN %s AND calendar_event_id IN %s
GROUP BY res_partner_id, calendar_event_id
""", [tuple(all_partners.ids)] + params)

meeting_data = self.env.cr.fetchall()
""",
all_partners._ids,
query.subselect(),
))

# Create a dict {partner_id: event_ids} and fill with events linked to the partner
meetings = {}
Expand Down
1 change: 1 addition & 0 deletions addons/hr_timesheet/models/project_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
PROJECT_TASK_READABLE_FIELDS = {
'allow_timesheets',
'analytic_account_active',
'analytic_account_id', # To compute `analytic_account_active`
'effective_hours',
'encode_uom_in_days',
'allocated_hours',
Expand Down
8 changes: 5 additions & 3 deletions addons/mail/models/discuss/res_partner.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from odoo import api, fields, models
from odoo.osv import expression
from odoo.tools import SQL


class ResPartner(models.Model):
Expand Down Expand Up @@ -41,9 +42,10 @@ def search_for_channel_invite(self, search_term, channel_id=None, limit=30):
domain = expression.AND([domain, [("channel_ids", "not in", channel.id)]])
if channel.group_public_id:
domain = expression.AND([domain, [("user_ids.groups_id", "in", channel.group_public_id.id)]])
query = self.env["res.partner"]._search(domain, order="name, id")
query.order = 'LOWER("res_partner"."name"), "res_partner"."id"' # bypass lack of support for case insensitive order in search()
query.limit = int(limit)

query = self._search(domain, limit=limit)
# bypass lack of support for case insensitive order in search()
query.order = SQL('LOWER(%s), "res_partner"."id"', self._field_to_sql(self._table, 'name'))
return {
"count": self.env["res.partner"].search_count(domain),
"partners": list(self.env["res.partner"].browse(query).mail_partner_format().values()),
Expand Down
Loading