Coverage for adhoc-cicd-odoo-odoo / odoo / orm / fields.py: 84%
942 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-09 18:22 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-09 18:22 +0000
1# Part of Odoo. See LICENSE file for full copyright and licensing details.
3""" High-level objects for fields. """
4from __future__ import annotations
6import functools
7import collections
8import itertools
9import logging
10import operator as pyoperator
11import re
12import typing
13import warnings
14from collections.abc import Set as AbstractSet
15from operator import attrgetter
17from psycopg2.extras import Json as PsycopgJson
19from odoo.exceptions import AccessError, MissingError
20from odoo.tools import Query, SQL, reset_cached_properties, sql
21from odoo.tools.constants import PREFETCH_MAX
22from odoo.tools.misc import SENTINEL, ReadonlyDict, Sentinel, unique
24from .domains import Domain
25from .utils import COLLECTION_TYPES, SQL_OPERATORS, SUPERUSER_ID, expand_ids
27if typing.TYPE_CHECKING:
28 from collections.abc import Callable, Collection, Iterable, Iterator, MutableMapping
30 from .environments import Environment
31 from .identifiers import IdType
32 from .registry import Registry
33 from .types import BaseModel, DomainType, ModelType, Self, ValuesType
34 M = typing.TypeVar("M", bound=BaseModel)
35T = typing.TypeVar("T")
37IR_MODELS = (
38 'ir.model', 'ir.model.data', 'ir.model.fields', 'ir.model.fields.selection',
39 'ir.model.relation', 'ir.model.constraint', 'ir.module.module',
40)
42COMPANY_DEPENDENT_FIELDS = (
43 'char', 'float', 'boolean', 'integer', 'text', 'many2one', 'date', 'datetime', 'selection', 'html'
44)
45PYTHON_INEQUALITY_OPERATOR = {'<': pyoperator.lt, '>': pyoperator.gt, '<=': pyoperator.le, '>=': pyoperator.ge}
47_logger = logging.getLogger('odoo.fields')
50def resolve_mro(model: BaseModel, name: str, predicate) -> list[typing.Any]:
51 """ Return the list of successively overridden values of attribute ``name``
52 in mro order on ``model`` that satisfy ``predicate``. Model registry
53 classes are ignored.
54 """
55 result = []
56 for cls in model._model_classes__:
57 value = cls.__dict__.get(name, SENTINEL)
58 if value is SENTINEL:
59 continue
60 if not predicate(value): 60 ↛ 61line 60 didn't jump to line 61 because the condition on line 60 was never true
61 break
62 result.append(value)
63 return result
66def determine(needle, records: BaseModel, *args):
67 """ Simple helper for calling a method given as a string or a function.
69 :param needle: callable or name of method to call on ``records``
70 :param BaseModel records: recordset to call ``needle`` on or with
71 :params args: additional arguments to pass to the determinant
72 :returns: the determined value if the determinant is a method name or callable
73 :raise TypeError: if ``records`` is not a recordset, or ``needle`` is not
74 a callable or valid method name
75 """
76 if not isinstance(records, _models.BaseModel): 76 ↛ 77line 76 didn't jump to line 77 because the condition on line 76 was never true
77 raise TypeError("Determination requires a subject recordset")
78 if isinstance(needle, str):
79 needle = getattr(records, needle)
80 if needle.__name__.find('__'): 80 ↛ 86line 80 didn't jump to line 86 because the condition on line 80 was always true
81 return needle(*args)
82 elif callable(needle): 82 ↛ 86line 82 didn't jump to line 86 because the condition on line 82 was always true
83 if needle.__name__.find('__'): 83 ↛ 86line 83 didn't jump to line 86 because the condition on line 83 was always true
84 return needle(records, *args)
86 raise TypeError("Determination requires a callable or method name")
89_global_seq = itertools.count()
92class Field(typing.Generic[T]):
93 """The field descriptor contains the field definition, and manages accesses
94 and assignments of the corresponding field on records. The following
95 attributes may be provided when instantiating a field:
97 :param str string: the label of the field seen by users; if not
98 set, the ORM takes the field name in the class (capitalized).
100 :param str help: the tooltip of the field seen by users
102 :param bool readonly: whether the field is readonly (default: ``False``)
104 This only has an impact on the UI. Any field assignation in code will work
105 (if the field is a stored field or an inversable one).
107 :param bool required: whether the value of the field is required (default: ``False``)
109 :param str index: whether the field is indexed in database, and the kind of index.
110 Note: this has no effect on non-stored and virtual fields.
111 The possible values are:
113 * ``"btree"`` or ``True``: standard index, good for many2one
114 * ``"btree_not_null"``: BTREE index without NULL values (useful when most
115 values are NULL, or when NULL is never searched for)
116 * ``"trigram"``: Generalized Inverted Index (GIN) with trigrams (good for full-text search)
117 * ``None`` or ``False``: no index (default)
119 :param default: the default value for the field; this is either a static
120 value, or a function taking a recordset and returning a value; use
121 ``default=None`` to discard default values for the field
122 :type default: value or callable
124 :param str groups: comma-separated list of group xml ids (string); this
125 restricts the field access to the users of the given groups only
127 :param bool company_dependent: whether the field value is dependent of the current company;
129 The value is stored on the model table as jsonb dict with the company id as the key.
131 The field's default values stored in model ir.default are used as fallbacks for
132 unspecified values in the jsonb dict.
134 :param bool copy: whether the field value should be copied when the record
135 is duplicated (default: ``True`` for normal fields, ``False`` for
136 ``one2many`` and computed fields, including property fields and
137 related fields)
139 :param bool store: whether the field is stored in database
140 (default:``True``, ``False`` for computed fields)
142 :param bool default_export_compatible: whether the field must be exported
143 by default in an import-compatible export
145 :param str search: name of a method that implements search on the field.
146 The method takes an operator and value. Basic domain optimizations are
147 ran before calling this function.
148 For instance, all ``'='`` are transformed to ``'in'``, and boolean
149 fields conditions are made such that operator is ``'in'``/``'not in'``
150 and value is ``[True]``.
152 The method should ``return NotImplemented`` if it does not support the
153 operator.
154 In that case, the ORM can try to call it with other, semantically
155 equivalent, operators. For instance, try with the positive operator if
156 its corresponding negative operator is not implemented.
157 The method must return a :ref:`reference/orm/domains` that replaces
158 ``(field, operator, value)`` in its domain.
160 Note that a stored field can actually have a search method. The search
161 method will be invoked to rewrite the condition. This may be useful for
162 sanitizing the values used in the condition, for instance.
164 .. code-block:: python
166 def _search_partner_ref(self, operator, value):
167 if operator not in ('in', 'like'):
168 return NotImplemented
169 ... # add your logic here, example
170 return Domain('partner_id.ref', operator, value)
172 .. rubric:: Aggregation
174 :param str aggregator: default aggregate function used by the webclient
175 on this field when using "Group By" feature.
177 Supported aggregators are:
179 * ``count`` : number of rows
180 * ``count_distinct`` : number of distinct rows
181 * ``bool_and`` : true if all values are true, otherwise false
182 * ``bool_or`` : true if at least one value is true, otherwise false
183 * ``max`` : maximum value of all values
184 * ``min`` : minimum value of all values
185 * ``avg`` : the average (arithmetic mean) of all values
186 * ``sum`` : sum of all values
188 :param str group_expand: function used to expand results when grouping on the
189 current field for kanban/list/gantt views. For selection fields,
190 ``group_expand=True`` automatically expands groups for all selection keys.
192 .. code-block:: python
194 @api.model
195 def _read_group_selection_field(self, values, domain):
196 return ['choice1', 'choice2', ...] # available selection choices.
198 @api.model
199 def _read_group_many2one_field(self, records, domain):
200 return records + self.search([custom_domain])
202 .. rubric:: Computed Fields
204 :param str compute: name of a method that computes the field
206 .. seealso:: :ref:`Advanced Fields/Compute fields <reference/fields/compute>`
208 :param bool precompute: whether the field should be computed before record insertion
209 in database. Should be used to specify manually some fields as precompute=True
210 when the field can be computed before record insertion.
211 (e.g. avoid statistics fields based on search/_read_group), many2one
212 linking to the previous record, ... (default: `False`)
214 .. warning::
216 Precomputation only happens when no explicit value and no default
217 value is provided to create(). This means that a default value
218 disables the precomputation, even if the field is specified as
219 precompute=True.
221 Precomputing a field can be counterproductive if the records of the
222 given model are not created in batch. Consider the situation were
223 many records are created one by one. If the field is not
224 precomputed, it will normally be computed in batch at the flush(),
225 and the prefetching mechanism will help making the computation
226 efficient. On the other hand, if the field is precomputed, the
227 computation will be made one by one, and will therefore not be able
228 to take advantage of the prefetching mechanism.
230 Following the remark above, precomputed fields can be interesting on
231 the lines of a one2many, which are usually created in batch by the
232 ORM itself, provided that they are created by writing on the record
233 that contains them.
235 :param bool compute_sudo: whether the field should be recomputed as superuser
236 to bypass access rights (by default ``True`` for stored fields, ``False``
237 for non stored fields)
239 :param bool recursive: whether the field has recursive dependencies (the field
240 ``X`` has a dependency like ``parent_id.X``); declaring a field recursive
241 must be explicit to guarantee that recomputation is correct
243 :param str inverse: name of a method that inverses the field (optional)
245 :param str related: sequence of field names
247 .. seealso:: :ref:`Advanced fields/Related fields <reference/fields/related>`
248 """
250 type: str # type of the field (string)
251 relational: bool = False # whether the field is a relational one
252 translate: bool = False # whether the field is translated
253 is_text: bool = False # whether the field is a text type in the database
254 falsy_value: T | None = None # falsy value for comparisons (optional)
256 write_sequence: int = 0 # field ordering for write()
257 # Database column type (ident, spec) for non-company-dependent fields.
258 # Company-dependent fields are stored as jsonb (see column_type).
259 _column_type: tuple[str, str] | None = None
261 _args__: dict[str, typing.Any] | None = None # the parameters given to __init__()
262 _module: str | None = None # the field's module name
263 _modules: tuple[str, ...] = () # modules that define this field
264 _setup_done = True # whether the field is completely set up
265 _sequence: int # absolute ordering of the field
266 _base_fields__: tuple[Self, ...] = () # the fields defining self, in override order
267 _extra_keys__: tuple[str, ...] = () # unknown attributes set on the field
268 _direct: bool = False # whether self may be used directly (shared)
269 _toplevel: bool = False # whether self is on the model's registry class
271 inherited: bool = False # whether the field is inherited (_inherits)
272 inherited_field: Field | None = None # the corresponding inherited field
274 name: str = '' # name of the field
275 model_name: str = '' # name of the model of this field
276 comodel_name: str | None = None # name of the model of values (if relational)
278 store: bool = True # whether the field is stored in database
279 index: str | None = None # how the field is indexed in database
280 manual: bool = False # whether the field is a custom field
281 copy: bool = True # whether the field is copied over by BaseModel.copy()
282 _depends: Collection[str] | None = None # collection of field dependencies
283 _depends_context: Collection[str] | None = None # collection of context key dependencies
284 recursive: bool = False # whether self depends on itself
285 compute: str | Callable[[BaseModel], None] | None = None # compute(recs) computes field on recs
286 compute_sudo: bool = False # whether field should be recomputed as superuser
287 precompute: bool = False # whether field has to be computed before creation
288 inverse: str | Callable[[BaseModel], None] | None = None # inverse(recs) inverses field on recs
289 search: str | Callable[[BaseModel, str, typing.Any], DomainType] | None = None # search(recs, operator, value) searches on self
290 related: str | None = None # sequence of field names, for related fields
291 company_dependent: bool = False # whether ``self`` is company-dependent (property field)
292 default: Callable[[BaseModel], T] | T | None = None # default(recs) returns the default value
294 string: str | None = None # field label
295 export_string_translation: bool = True # whether the field label translations are exported
296 help: str | None = None # field tooltip
297 readonly: bool = False # whether the field is readonly
298 required: bool = False # whether the field is required (NOT NULL in database)
299 groups: str | None = None # csv list of group xml ids
300 change_default = False # whether the field may trigger a "user-onchange"
302 related_field: Field | None = None # corresponding related field
303 aggregator: str | None = None # operator for aggregating values
304 group_expand: str | Callable[[BaseModel, ModelType, DomainType], ModelType] | None = None # name of method to expand groups in formatted_read_group()
305 falsy_value_label: str | None = None # value to display when the field is not set (webclient attr)
306 prefetch: bool | str = True # the prefetch group (False means no group)
308 default_export_compatible: bool = False # whether the field must be exported by default in an import-compatible export
309 exportable: bool = True
311 # mapping from type name to field type
312 _by_type__: dict[str, Field] = {}
314 def __init__(self, string: str | Sentinel = SENTINEL, **kwargs):
315 kwargs['string'] = string
316 self._sequence = next(_global_seq)
317 self._args__ = ReadonlyDict({key: val for key, val in kwargs.items() if val is not SENTINEL})
319 def __str__(self):
320 if not self.name: 320 ↛ 321line 320 didn't jump to line 321 because the condition on line 320 was never true
321 return "<%s.%s>" % (__name__, type(self).__name__)
322 return "%s.%s" % (self.model_name, self.name)
324 def __repr__(self):
325 if not self.name:
326 return f"{'<%s.%s>'!r}" % (__name__, type(self).__name__)
327 return f"{'%s.%s'!r}" % (self.model_name, self.name)
329 def __init_subclass__(cls):
330 super().__init_subclass__()
331 if not hasattr(cls, 'type'):
332 return
334 if cls.type: 334 ↛ 338line 334 didn't jump to line 338 because the condition on line 334 was always true
335 cls._by_type__.setdefault(cls.type, cls)
337 # compute class attributes to avoid calling dir() on fields
338 cls.related_attrs = []
339 cls.description_attrs = []
340 for attr in dir(cls):
341 if attr.startswith('_related_'):
342 cls.related_attrs.append((attr[9:], attr))
343 elif attr.startswith('_description_'):
344 cls.description_attrs.append((attr[13:], attr))
345 cls.related_attrs = tuple(cls.related_attrs)
346 cls.description_attrs = tuple(cls.description_attrs)
348 ############################################################################
349 #
350 # Base field setup: things that do not depend on other models/fields
351 #
352 # The base field setup is done by field.__set_name__(), which determines the
353 # field's name, model name, module and its parameters.
354 #
355 # The dictionary field._args__ gives the parameters passed to the field's
356 # constructor. Most parameters have an attribute of the same name on the
357 # field. The parameters as attributes are assigned by the field setup.
358 #
359 # When several definition classes of the same model redefine a given field,
360 # the field occurrences are "merged" into one new field instantiated at
361 # runtime on the registry class of the model. The occurrences of the field
362 # are given to the new field as the parameter '_base_fields__'; it is a list
363 # of fields in override order (or reverse MRO).
364 #
365 # In order to save memory, a field should avoid having field._args__ and/or
366 # many attributes when possible. We call "direct" a field that can be set
367 # up directly from its definition class. Direct fields are non-related
368 # fields defined on models, and can be shared across registries. We call
369 # "toplevel" a field that is put on the model's registry class, and is
370 # therefore specific to the registry.
371 #
372 # Toplevel field are set up once, and are no longer set up from scratch
373 # after that. Those fields can save memory by discarding field._args__ and
374 # field._base_fields__ once set up, because those are no longer necessary.
375 #
376 # Non-toplevel non-direct fields are the fields on definition classes that
377 # may not be shared. In other words, those fields are never used directly,
378 # and are always recreated as toplevel fields. On those fields, the base
379 # setup is useless, because only field._args__ is used for setting up other
380 # fields. We therefore skip the base setup for those fields. The only
381 # attributes of those fields are: '_sequence', '_args__', 'model_name', 'name'
382 # and '_module', which makes their __dict__'s size minimal.
384 def __set_name__(self, owner: type[BaseModel], name: str) -> None:
385 """ Perform the base setup of a field.
387 :param owner: the owner class of the field (the model's definition or registry class)
388 :param name: the name of the field
389 """
390 # during initialization, when importing `_models` at the end of this
391 # file, it is not yet available and we already declare fields:
392 # id and display_name
393 assert '_models' not in globals() or isinstance(owner, _models.MetaModel)
394 self.model_name = owner._name
395 self.name = name
396 if getattr(owner, 'pool', None) is None: # models.is_model_definition(owner)
397 # only for fields on definition classes, not registry classes
398 self._module = owner._module
399 owner._field_definitions.append(self)
401 if not self._args__.get('related'):
402 self._direct = True
403 if self._direct or self._toplevel:
404 self._setup_attrs__(owner, name)
405 if self._toplevel:
406 # free memory from stuff that is no longer useful
407 self.__dict__.pop('_args__', None)
408 if not self.related:
409 # keep _base_fields__ on related fields for incremental model setup
410 self.__dict__.pop('_base_fields__', None)
412 #
413 # Setup field parameter attributes
414 #
416 def _get_attrs(self, model_class: type[BaseModel], name: str) -> dict[str, typing.Any]:
417 """ Return the field parameter attributes as a dictionary. """
418 # determine all inherited field attributes
419 attrs = {}
420 modules: list[str] = []
421 for field in self._args__.get('_base_fields__', ()):
422 if not isinstance(self, type(field)):
423 # 'self' overrides 'field' and their types are not compatible;
424 # so we ignore all the parameters collected so far
425 attrs.clear()
426 modules.clear()
427 continue
428 attrs.update(field._args__)
429 if field._module:
430 modules.append(field._module)
431 attrs.update(self._args__)
432 if self._module:
433 modules.append(self._module)
435 attrs['model_name'] = model_class._name
436 attrs['name'] = name
437 attrs['_module'] = modules[-1] if modules else None
438 # the following is faster than calling unique or using OrderedSet
439 attrs['_modules'] = tuple(unique(modules) if len(modules) > 1 else modules)
441 # initialize ``self`` with ``attrs``
442 if name == 'state':
443 # by default, `state` fields should be reset on copy
444 attrs['copy'] = attrs.get('copy', False)
445 if attrs.get('compute'):
446 # by default, computed fields are not stored, computed in superuser
447 # mode if stored, not copied (unless stored and explicitly not
448 # readonly), and readonly (unless inversible)
449 attrs['store'] = store = attrs.get('store', False)
450 attrs['compute_sudo'] = attrs.get('compute_sudo', store)
451 if not (attrs['store'] and not attrs.get('readonly', True)):
452 attrs['copy'] = attrs.get('copy', False)
453 attrs['readonly'] = attrs.get('readonly', not attrs.get('inverse'))
454 if attrs.get('related'):
455 # by default, related fields are not stored, computed in superuser
456 # mode, not copied and readonly
457 attrs['store'] = store = attrs.get('store', False)
458 attrs['compute_sudo'] = attrs.get('compute_sudo', attrs.get('related_sudo', True))
459 attrs['copy'] = attrs.get('copy', False)
460 attrs['readonly'] = attrs.get('readonly', True)
461 if attrs.get('precompute'):
462 if not attrs.get('compute') and not attrs.get('related'): 462 ↛ 463line 462 didn't jump to line 463 because the condition on line 462 was never true
463 warnings.warn(f"precompute attribute doesn't make any sense on non computed field {self}", stacklevel=1)
464 attrs['precompute'] = False
465 elif not attrs.get('store'): 465 ↛ 466line 465 didn't jump to line 466 because the condition on line 465 was never true
466 warnings.warn(f"precompute attribute has no impact on non stored field {self}", stacklevel=1)
467 attrs['precompute'] = False
468 if attrs.get('company_dependent'):
469 if attrs.get('required'): 469 ↛ 470line 469 didn't jump to line 470 because the condition on line 469 was never true
470 warnings.warn(f"company_dependent field {self} cannot be required", stacklevel=1)
471 if attrs.get('translate'): 471 ↛ 472line 471 didn't jump to line 472 because the condition on line 471 was never true
472 warnings.warn(f"company_dependent field {self} cannot be translated", stacklevel=1)
473 if self.type not in COMPANY_DEPENDENT_FIELDS: 473 ↛ 474line 473 didn't jump to line 474 because the condition on line 473 was never true
474 warnings.warn(f"company_dependent field {self} is not one of the allowed types {COMPANY_DEPENDENT_FIELDS}", stacklevel=1)
475 attrs['copy'] = attrs.get('copy', False)
476 # speed up search and on delete
477 attrs['index'] = attrs.get('index', 'btree_not_null')
478 attrs['prefetch'] = attrs.get('prefetch', 'company_dependent')
479 attrs['_depends_context'] = ('company',)
480 # parameters 'depends' and 'depends_context' are stored in attributes
481 # '_depends' and '_depends_context', respectively
482 if 'depends' in attrs:
483 attrs['_depends'] = tuple(attrs.pop('depends'))
484 if 'depends_context' in attrs:
485 attrs['_depends_context'] = tuple(attrs.pop('depends_context'))
487 if 'group_operator' in attrs: 487 ↛ 488line 487 didn't jump to line 488 because the condition on line 487 was never true
488 warnings.warn("Since Odoo 18, 'group_operator' is deprecated, use 'aggregator' instead", DeprecationWarning, stacklevel=2)
489 attrs['aggregator'] = attrs.pop('group_operator')
491 return attrs
493 def _setup_attrs__(self, model_class: type[BaseModel], name: str) -> None:
494 """ Initialize the field parameter attributes. """
495 attrs = self._get_attrs(model_class, name)
497 # determine parameters that must be validated
498 extra_keys = tuple(key for key in attrs if not hasattr(self, key))
499 if extra_keys:
500 attrs['_extra_keys__'] = extra_keys
502 self.__dict__.update(attrs)
504 # prefetch only stored, column, non-manual fields
505 if not self.store or not self.column_type or self.manual:
506 self.prefetch = False
508 if not self.string and not self.related:
509 # related fields get their string from their parent field
510 self.string = (
511 name[:-4] if name.endswith('_ids') else
512 name[:-3] if name.endswith('_id') else name
513 ).replace('_', ' ').title()
515 # self.default must be either None or a callable
516 if self.default is not None and not callable(self.default):
517 value = self.default
518 self.default = lambda model: value
520 ############################################################################
521 #
522 # Complete field setup: everything else
523 #
525 def prepare_setup(self) -> None:
526 self._setup_done = False
528 def setup(self, model: BaseModel) -> None:
529 """ Perform the complete setup of a field. """
530 if not self._setup_done:
531 # validate field params
532 for key in self._extra_keys__:
533 if not model._valid_field_parameter(self, key): 533 ↛ 534line 533 didn't jump to line 534 because the condition on line 533 was never true
534 _logger.warning(
535 "Field %s: unknown parameter %r, if this is an actual"
536 " parameter you may want to override the method"
537 " _valid_field_parameter on the relevant model in order to"
538 " allow it",
539 self, key
540 )
541 if self.related:
542 self.setup_related(model)
543 else:
544 self.setup_nonrelated(model)
546 if not isinstance(self.required, bool): 546 ↛ 547line 546 didn't jump to line 547 because the condition on line 546 was never true
547 warnings.warn(f'Property {self}.required should be a boolean ({self.required}).', stacklevel=1)
549 if not isinstance(self.readonly, bool): 549 ↛ 550line 549 didn't jump to line 550 because the condition on line 549 was never true
550 warnings.warn(f'Property {self}.readonly should be a boolean ({self.readonly}).', stacklevel=1)
552 self._setup_done = True
553 # column_type might be changed during Field.setup
554 reset_cached_properties(self)
555 #
556 # Setup of non-related fields
557 #
559 def setup_nonrelated(self, model: BaseModel) -> None:
560 """ Determine the dependencies and inverse field(s) of ``self``. """
561 pass
563 def get_depends(self, model: BaseModel) -> tuple[Iterable[str], Iterable[str]]:
564 """ Return the field's dependencies and cache dependencies. """
565 if self._depends is not None:
566 # the parameter 'depends' has priority over 'depends' on compute
567 return self._depends, self._depends_context or ()
569 if self.related:
570 if self._depends_context is not None:
571 depends_context = self._depends_context
572 else:
573 depends_context = []
574 field_model_name = model._name
575 for field_name in self.related.split('.'):
576 field_model = model.env[field_model_name]
577 field = field_model._fields[field_name]
578 depends_context.extend(field.get_depends(field_model)[1])
579 field_model_name = field.comodel_name
580 depends_context = tuple(unique(depends_context))
581 return [self.related], depends_context
583 if not self.compute:
584 return (), self._depends_context or ()
586 # determine the functions implementing self.compute
587 if isinstance(self.compute, str):
588 funcs = resolve_mro(model, self.compute, callable)
589 else:
590 funcs = [self.compute]
592 # collect depends and depends_context
593 depends = []
594 depends_context = list(self._depends_context or ())
595 for func in funcs:
596 deps = getattr(func, '_depends', ())
597 depends.extend(deps(model) if callable(deps) else deps)
598 depends_context.extend(getattr(func, '_depends_context', ()))
600 return depends, depends_context
602 #
603 # Setup of related fields
604 #
606 def setup_related(self, model: BaseModel) -> None:
607 """ Setup the attributes of a related field. """
608 assert isinstance(self.related, str), self.related
610 # determine the chain of fields, and make sure they are all set up
611 field_seq = []
612 model_name = self.model_name
613 for name in self.related.split('.'):
614 field = model.pool[model_name]._fields.get(name)
615 if field is None: 615 ↛ 616line 615 didn't jump to line 616 because the condition on line 615 was never true
616 raise KeyError(
617 f"Field {name} referenced in related field definition {self} does not exist."
618 )
619 if not field._setup_done:
620 field.setup(model.env[model_name])
621 field_seq.append(field)
622 model_name = field.comodel_name
624 # check type consistency
625 if self.type != field.type: 625 ↛ 626line 625 didn't jump to line 626 because the condition on line 625 was never true
626 raise TypeError("Type of related field %s is inconsistent with %s" % (self, field))
628 self.related_field = field
630 # if field's setup is invalidated, then self's setup must be invalidated, too
631 model.pool.field_setup_dependents.add(field, self)
633 # determine dependencies, compute, inverse, and search
634 self.compute = self._compute_related
635 if self.inherited or not (self.readonly or field.readonly):
636 self.inverse = self._inverse_related
637 if not self.store and all(f._description_searchable for f in field_seq):
638 # allow searching on self only if the related field is searchable
639 self.search = self._search_related
641 # A readonly related field without an inverse method should not have a
642 # default value, as it does not make sense.
643 if self.default and self.readonly and not self.inverse: 643 ↛ 644line 643 didn't jump to line 644 because the condition on line 643 was never true
644 _logger.warning("Redundant default on %s", self)
646 # copy attributes from field to self (string, help, etc.)
647 for attr, prop in self.related_attrs:
648 # check whether 'attr' is explicitly set on self (from its field
649 # definition), and ignore its class-level value (only a default)
650 if attr not in self.__dict__ and prop.startswith('_related_'):
651 setattr(self, attr, getattr(field, prop))
653 for attr in field._extra_keys__:
654 if not hasattr(self, attr) and model._valid_field_parameter(self, attr):
655 setattr(self, attr, getattr(field, attr))
657 # special cases of inherited fields
658 if self.inherited:
659 self.inherited_field = field
660 if field.required:
661 self.required = True
662 # add modules from delegate and target fields; the first one ensures
663 # that inherited fields introduced via an abstract model (_inherits
664 # being on the abstract model) are assigned an XML id
665 delegate_field = model._fields[self.related.split('.')[0]]
666 self._modules = tuple({*self._modules, *delegate_field._modules, *field._modules})
668 def traverse_related(self, record: BaseModel) -> tuple[BaseModel, Field]:
669 """ Traverse the fields of the related field `self` except for the last
670 one, and return it as a pair `(last_record, last_field)`. """
671 for name in self.related.split('.')[:-1]:
672 # take the first record when traversing
673 corecord = record[name]
674 record = next(iter(corecord), corecord)
675 return record, self.related_field
677 def _compute_related(self, records: BaseModel) -> None:
678 """ Compute the related field ``self`` on ``records``. """
679 #
680 # Traverse fields one by one for all records, in order to take advantage
681 # of prefetching for each field access. In order to clarify the impact
682 # of the algorithm, consider traversing 'foo.bar' for records a1 and a2,
683 # where 'foo' is already present in cache for a1, a2. Initially, both a1
684 # and a2 are marked for prefetching. As the commented code below shows,
685 # traversing all fields one record at a time will fetch 'bar' one record
686 # at a time.
687 #
688 # b1 = a1.foo # mark b1 for prefetching
689 # v1 = b1.bar # fetch/compute bar for b1
690 # b2 = a2.foo # mark b2 for prefetching
691 # v2 = b2.bar # fetch/compute bar for b2
692 #
693 # On the other hand, traversing all records one field at a time ensures
694 # maximal prefetching for each field access.
695 #
696 # b1 = a1.foo # mark b1 for prefetching
697 # b2 = a2.foo # mark b2 for prefetching
698 # v1 = b1.bar # fetch/compute bar for b1, b2
699 # v2 = b2.bar # value already in cache
700 #
701 # This difference has a major impact on performance, in particular in
702 # the case where 'bar' is a computed field that takes advantage of batch
703 # computation.
704 #
705 values = list(records)
706 for name in self.related.split('.')[:-1]:
707 try:
708 values = [next(iter(val := value[name]), val) for value in values]
709 except AccessError as e:
710 description = records.env['ir.model']._get(records._name).name
711 env = records.env
712 raise AccessError(env._(
713 "%(previous_message)s\n\nImplicitly accessed through '%(document_kind)s' (%(document_model)s).",
714 previous_message=e.args[0],
715 document_kind=description,
716 document_model=records._name,
717 ))
718 # assign final values to records
719 for record, value in zip(records, values):
720 record[self.name] = self._process_related(value[self.related_field.name], record.env)
722 def _process_related(self, value, env: Environment):
723 """No transformation by default, but allows override."""
724 return value
726 def _inverse_related(self, records: BaseModel) -> None:
727 """ Inverse the related field ``self`` on ``records``. """
728 # store record values, otherwise they may be lost by cache invalidation!
729 record_value = {record: record[self.name] for record in records}
730 for record in records:
731 target, field = self.traverse_related(record)
732 # update 'target' only if 'record' and 'target' are both real or
733 # both new (see `test_base_objects.py`, `test_basic`)
734 if target and bool(target.id) == bool(record.id):
735 target[field.name] = record_value[record]
737 def _search_related(self, records: BaseModel, operator: str, value) -> DomainType:
738 """ Determine the domain to search on field ``self``. """
740 # Compute the new domain for ('x.y.z', op, value)
741 # as ('x', 'any', [('y', 'any', [('z', op, value)])])
742 # If the followed relation is a nullable many2one, we accept null
743 # for that path as well.
745 # determine whether the related field can be null
746 falsy_value = self.falsy_value
747 if isinstance(value, COLLECTION_TYPES): 747 ↛ 750line 747 didn't jump to line 750 because the condition on line 747 was always true
748 value_is_null = any(val is False or val is None or val == falsy_value for val in value)
749 else:
750 value_is_null = value is False or value is None or value == falsy_value
751 can_be_null = ( # (..., '=', False) or (..., 'not in', [truthy vals])
752 (operator not in Domain.NEGATIVE_OPERATORS) == value_is_null
753 )
754 if operator in Domain.NEGATIVE_OPERATORS and not value_is_null: 754 ↛ 757line 754 didn't jump to line 757 because the condition on line 754 was never true
755 # we have a condition like 'not in' ['a']
756 # let's call back with a positive operator
757 return NotImplemented
759 # parse the path
760 field_seq = []
761 model_name = self.model_name
762 for fname in self.related.split('.'):
763 field = records.env[model_name]._fields[fname]
764 field_seq.append(field)
765 model_name = field.comodel_name
767 # build the domain backwards with the any operator
768 domain = Domain(field_seq[-1].name, operator, value)
769 for field in reversed(field_seq[:-1]):
770 domain = Domain(field.name, 'any!' if self.compute_sudo else 'any', domain)
771 if can_be_null and field.type == 'many2one' and not field.required: 771 ↛ 772line 771 didn't jump to line 772 because the condition on line 771 was never true
772 domain |= Domain(field.name, '=', False)
773 return domain
775 # properties used by setup_related() to copy values from related field
776 _related_comodel_name = property(attrgetter('comodel_name'))
777 _related_string = property(attrgetter('string'))
778 _related_help = property(attrgetter('help'))
779 _related_groups = property(attrgetter('groups'))
780 _related_aggregator = property(attrgetter('aggregator'))
782 @functools.cached_property
783 def column_type(self) -> tuple[str, str] | None:
784 """ Return the actual column type for this field, if stored as a column. """
785 return ('jsonb', 'jsonb') if self.company_dependent or self.translate else self._column_type
787 @property
788 def base_field(self) -> Self:
789 """ Return the base field of an inherited field, or ``self``. """
790 return self.inherited_field.base_field if self.inherited_field else self
792 #
793 # Company-dependent fields
794 #
796 def get_company_dependent_fallback(self, records):
797 assert self.company_dependent
798 fallback = records.env['ir.default'] \
799 .with_user(SUPERUSER_ID) \
800 .with_company(records.env.company) \
801 ._get_model_defaults(records._name).get(self.name)
802 fallback = self.convert_to_cache(fallback, records, validate=False)
803 return self.convert_to_record(fallback, records)
805 #
806 # Setup of field triggers
807 #
809 def resolve_depends(self, registry: Registry) -> Iterator[tuple[Field, ...]]:
810 """ Return the dependencies of `self` as a collection of field tuples. """
811 Model0 = registry[self.model_name]
813 for dotnames in registry.field_depends[self]:
814 field_seq: list[Field] = []
815 model_name = self.model_name
816 check_precompute = self.precompute
818 for index, fname in enumerate(dotnames.split('.')):
819 Model = registry[model_name]
820 if Model0._transient and not Model._transient:
821 # modifying fields on regular models should not trigger
822 # recomputations of fields on transient models
823 break
825 try:
826 field = Model._fields[fname]
827 except KeyError:
828 raise ValueError(
829 f"Wrong @depends on '{self.compute}' (compute method of field {self}). "
830 f"Dependency field '{fname}' not found in model {model_name}."
831 ) from None
832 if field is self and index and not self.recursive: 832 ↛ 833line 832 didn't jump to line 833 because the condition on line 832 was never true
833 self.recursive = True
834 warnings.warn(f"Field {self} should be declared with recursive=True", stacklevel=1)
836 # precomputed fields can depend on non-precomputed ones, as long
837 # as they are reachable through at least one many2one field
838 if check_precompute and field.store and field.compute and not field.precompute: 838 ↛ 839line 838 didn't jump to line 839 because the condition on line 838 was never true
839 warnings.warn(f"Field {self} cannot be precomputed as it depends on non-precomputed field {field}", stacklevel=1)
840 self.precompute = False
842 if field_seq and not field_seq[-1]._description_searchable: 842 ↛ 845line 842 didn't jump to line 845 because the condition on line 842 was never true
843 # the field before this one is not searchable, so there is
844 # no way to know which on records to recompute self
845 warnings.warn(
846 f"Field {field_seq[-1]!r} in dependency of {self} should be searchable. "
847 f"This is necessary to determine which records to recompute when {field} is modified. "
848 f"You should either make the field searchable, or simplify the field dependency.",
849 stacklevel=1,
850 )
852 field_seq.append(field)
854 # do not make self trigger itself: for instance, a one2many
855 # field line_ids with domain [('foo', ...)] will have
856 # 'line_ids.foo' as a dependency
857 if not (field is self and not index):
858 yield tuple(field_seq)
860 if field.type == 'one2many':
861 for inv_field in Model.pool.field_inverses[field]:
862 yield tuple(field_seq) + (inv_field,)
864 if check_precompute and field.type == 'many2one':
865 check_precompute = False
867 model_name = field.comodel_name
869 ############################################################################
870 #
871 # Field description
872 #
874 def get_description(self, env: Environment, attributes: Collection[str] | None = None) -> ValuesType:
875 """ Return a dictionary that describes the field ``self``. """
876 desc = {}
877 for attr, prop in self.description_attrs:
878 if attributes is not None and attr not in attributes:
879 continue
880 if not prop.startswith('_description_'): 880 ↛ 881line 880 didn't jump to line 881 because the condition on line 880 was never true
881 continue
882 value = getattr(self, prop)
883 if callable(value):
884 value = value(env)
885 if value is not None:
886 desc[attr] = value
888 return desc
890 # properties used by get_description()
891 _description_name = property(attrgetter('name'))
892 _description_type = property(attrgetter('type'))
893 _description_store = property(attrgetter('store'))
894 _description_manual = property(attrgetter('manual'))
895 _description_related = property(attrgetter('related'))
896 _description_company_dependent = property(attrgetter('company_dependent'))
897 _description_readonly = property(attrgetter('readonly'))
898 _description_required = property(attrgetter('required'))
899 _description_groups = property(attrgetter('groups'))
900 _description_change_default = property(attrgetter('change_default'))
901 _description_default_export_compatible = property(attrgetter('default_export_compatible'))
902 _description_exportable = property(attrgetter('exportable'))
904 def _description_depends(self, env: Environment):
905 return env.registry.field_depends[self]
907 @property
908 def _description_searchable(self) -> bool:
909 return bool(self.store or self.search)
911 def _description_sortable(self, env: Environment):
912 if self.column_type and self.store: # shortcut
913 return True
914 if self.inherited_field and self.inherited_field._description_sortable(env): 914 ↛ 916line 914 didn't jump to line 916 because the condition on line 914 was never true
915 # avoid compuation for inherited field
916 return True
918 model = env[self.model_name]
919 query = model._as_query(ordered=False)
920 try:
921 model._order_field_to_sql(model._table, self.name, SQL(), SQL(), query)
922 return True
923 except (ValueError, AccessError):
924 return False
926 def _description_groupable(self, env: Environment):
927 if self.column_type and self.store: # shortcut
928 return True
929 if self.inherited_field and self.inherited_field._description_groupable(env): 929 ↛ 931line 929 didn't jump to line 931 because the condition on line 929 was never true
930 # avoid compuation for inherited field
931 return True
933 model = env[self.model_name]
934 query = model._as_query(ordered=False)
935 groupby = self.name if self.type not in ('date', 'datetime') else f"{self.name}:month"
936 try:
937 model._read_group_groupby(model._table, groupby, query)
938 return True
939 except (ValueError, AccessError):
940 return False
942 def _description_aggregator(self, env: Environment):
943 if not self.aggregator or (self.column_type and self.store): # shortcut
944 return self.aggregator
945 if self.inherited_field and self.inherited_field._description_aggregator(env): 945 ↛ 947line 945 didn't jump to line 947 because the condition on line 945 was never true
946 # avoid compuation for inherited field
947 return self.inherited_field.aggregator
949 model = env[self.model_name]
950 query = model._as_query(ordered=False)
951 try:
952 model._read_group_select(f"{self.name}:{self.aggregator}", query)
953 return self.aggregator
954 except (ValueError, AccessError):
955 return None
957 def _description_string(self, env: Environment) -> str:
958 if self.string and env.lang:
959 model_name = self.base_field.model_name
960 field_string = env['ir.model.fields'].get_field_string(model_name)
961 return field_string.get(self.name) or self.string
962 return self.string
964 def _description_help(self, env: Environment):
965 if self.help and env.lang:
966 model_name = self.base_field.model_name
967 field_help = env['ir.model.fields'].get_field_help(model_name)
968 return field_help.get(self.name) or self.help
969 return self.help
971 def _description_falsy_value_label(self, env) -> str | None:
972 return env._(self.falsy_value_label) if self.falsy_value_label else None # pylint: disable=gettext-variable
974 def is_editable(self) -> bool:
975 """ Return whether the field can be editable in a view. """
976 return not self.readonly
978 ############################################################################
979 #
980 # Conversion of values
981 #
983 def convert_to_column(self, value, record, values=None, validate=True):
984 """ Convert ``value`` from the ``write`` format to the SQL parameter
985 format for SQL conditions. This is used to compare a field's value when
986 the field actually stores multiple values (translated or company-dependent).
987 """
988 if value is None or value is False:
989 return None
990 if isinstance(value, str):
991 return value
992 elif isinstance(value, bytes): 992 ↛ 993line 992 didn't jump to line 993 because the condition on line 992 was never true
993 return value.decode()
994 else:
995 return str(value)
997 def convert_to_column_insert(self, value, record, values=None, validate=True):
998 """ Convert ``value`` from the ``write`` format to the SQL parameter
999 format for INSERT queries. This method handles the case of fields that
1000 store multiple values (translated or company-dependent).
1001 """
1002 value = self.convert_to_column(value, record, values, validate)
1003 if not self.company_dependent:
1004 return value
1005 fallback = record.env['ir.default']._get_model_defaults(record._name).get(self.name)
1006 if value == self.convert_to_column(fallback, record):
1007 return None
1008 return PsycopgJson({record.env.company.id: value})
1010 def get_column_update(self, record: BaseModel):
1011 """ Return the value of record in cache as an SQL parameter formatted
1012 for UPDATE queries.
1013 """
1014 field_cache = record.env.transaction.field_data[self]
1015 record_id = record.id
1016 if self.company_dependent:
1017 values = {}
1018 for ctx_key, cache in field_cache.items():
1019 if (value := cache.get(record_id, SENTINEL)) is not SENTINEL:
1020 values[ctx_key[0]] = self.convert_to_column(value, record)
1021 return PsycopgJson(values) if values else None
1022 if self in record.env._field_depends_context:
1023 # field that will be written to the database depends on context;
1024 # find the first value that is set
1025 # If we have more than one value, it is a logical error in the
1026 # design of the model. In that case, we pick one at random because
1027 # a stored field can have only one value.
1028 for ctx_key, cache in field_cache.items(): 1028 ↛ 1032line 1028 didn't jump to line 1032 because the loop on line 1028 didn't complete
1029 if (value := cache.get(record_id, SENTINEL)) is not SENTINEL: 1029 ↛ 1028line 1029 didn't jump to line 1028 because the condition on line 1029 was always true
1030 break
1031 else:
1032 raise AssertionError(f"Value not in cache for field {self} and id={record_id}")
1033 else:
1034 value = field_cache[record_id]
1035 return self.convert_to_column_insert(value, record, validate=False)
1037 def convert_to_cache(self, value, record, validate=True):
1038 """ Convert ``value`` to the cache format; ``value`` may come from an
1039 assignment, or have the format of methods :meth:`BaseModel.read` or
1040 :meth:`BaseModel.write`. If the value represents a recordset, it should
1041 be added for prefetching on ``record``.
1043 :param value:
1044 :param record:
1045 :param bool validate: when True, field-specific validation of ``value``
1046 will be performed
1047 """
1048 return value
1050 def convert_to_record(self, value, record):
1051 """ Convert ``value`` from the cache format to the record format.
1052 If the value represents a recordset, it should share the prefetching of
1053 ``record``.
1054 """
1055 return False if value is None else value
1057 def convert_to_read(self, value, record, use_display_name=True):
1058 """ Convert ``value`` from the record format to the format returned by
1059 method :meth:`BaseModel.read`.
1061 :param value:
1062 :param record:
1063 :param bool use_display_name: when True, the value's display name will be
1064 computed using `display_name`, if relevant for the field
1065 """
1066 return False if value is None else value
1068 def convert_to_write(self, value, record):
1069 """ Convert ``value`` from any format to the format of method
1070 :meth:`BaseModel.write`.
1071 """
1072 cache_value = self.convert_to_cache(value, record, validate=False)
1073 record_value = self.convert_to_record(cache_value, record)
1074 return self.convert_to_read(record_value, record)
1076 def convert_to_export(self, value, record):
1077 """ Convert ``value`` from the record format to the export format. """
1078 if not value:
1079 return ''
1080 return value
1082 def convert_to_display_name(self, value, record):
1083 """ Convert ``value`` from the record format to a suitable display name. """
1084 return str(value) if value else False
1086 ############################################################################
1087 #
1088 # Update database schema
1089 #
1091 @property
1092 def column_order(self) -> int:
1093 """ Prescribed column order in table. """
1094 return 0 if self.column_type is None else sql.SQL_ORDER_BY_TYPE[self.column_type[0]]
1096 def update_db(self, model: BaseModel, columns: dict[str, dict[str, typing.Any]]) -> bool:
1097 """ Update the database schema to implement this field.
1099 :param model: an instance of the field's model
1100 :param columns: a dict mapping column names to their configuration in database
1101 :return: ``True`` if the field must be recomputed on existing rows
1102 """
1103 if not self.column_type:
1104 return False
1106 column = columns.get(self.name)
1108 # create/update the column, not null constraint; the index will be
1109 # managed by registry.check_indexes()
1110 self.update_db_column(model, column)
1111 self.update_db_notnull(model, column)
1113 # optimization for computing simple related fields like 'foo_id.bar'
1114 if (
1115 not column
1116 and self.related and self.related.count('.') == 1
1117 and self.related_field.store and not self.related_field.compute
1118 and not (self.related_field.type == 'binary' and self.related_field.attachment)
1119 and self.related_field.type not in ('one2many', 'many2many')
1120 ):
1121 join_field = model._fields[self.related.split('.')[0]]
1122 if (
1123 join_field.type == 'many2one'
1124 and join_field.store and not join_field.compute
1125 ):
1126 model.pool.post_init(self.update_db_related, model)
1127 # discard the "classical" computation
1128 return False
1130 return not column
1132 def update_db_column(self, model: BaseModel, column: dict[str, typing.Any]):
1133 """ Create/update the column corresponding to ``self``.
1135 :param model: an instance of the field's model
1136 :param column: the column's configuration (dict) if it exists, or ``None``
1137 """
1138 if not column:
1139 # the column does not exist, create it
1140 sql.create_column(model.env.cr, model._table, self.name, self.column_type[1], self.string)
1141 return
1142 if column['udt_name'] == self.column_type[0]: 1142 ↛ 1144line 1142 didn't jump to line 1144 because the condition on line 1142 was always true
1143 return
1144 self._convert_db_column(model, column)
1146 def _convert_db_column(self, model: BaseModel, column: dict[str, typing.Any]):
1147 """ Convert the given database column to the type of the field. """
1148 sql.convert_column(model.env.cr, model._table, self.name, self.column_type[1])
1150 def update_db_notnull(self, model: BaseModel, column: dict[str, typing.Any]):
1151 """ Add or remove the NOT NULL constraint on ``self``.
1153 :param model: an instance of the field's model
1154 :param column: the column's configuration (dict) if it exists, or ``None``
1155 """
1156 has_notnull = column and column['is_nullable'] == 'NO'
1158 if not column or (self.required and not has_notnull):
1159 # the column is new or it becomes required; initialize its values
1160 if model._table_has_rows():
1161 model._init_column(self.name)
1163 if self.required and not has_notnull:
1164 # _init_column may delay computations in post-init phase
1165 @model.pool.post_init
1166 def add_not_null():
1167 # At the time this function is called, the model's _fields may have been reset, although
1168 # the model's class is still the same. Retrieve the field to see whether the NOT NULL
1169 # constraint still applies.
1170 field = model._fields[self.name]
1171 if not field.required or not field.store: 1171 ↛ 1172line 1171 didn't jump to line 1172 because the condition on line 1171 was never true
1172 return
1173 if field.compute:
1174 records = model.browse(id_ for id_, in model.env.execute_query(SQL(
1175 "SELECT id FROM %s AS t WHERE %s IS NULL",
1176 SQL.identifier(model._table), model._field_to_sql('t', field.name),
1177 )))
1178 model.env.add_to_compute(field, records)
1179 # Flush values before adding NOT NULL constraint.
1180 model.flush_model([field.name])
1181 model.pool.post_constraint(
1182 model.env.cr,
1183 lambda cr: sql.set_not_null(cr, model._table, field.name),
1184 key=f"add_not_null:{model._table}:{field.name}",
1185 )
1187 elif not self.required and has_notnull:
1188 sql.drop_not_null(model.env.cr, model._table, self.name)
1190 def update_db_related(self, model: BaseModel) -> None:
1191 """ Compute a stored related field directly in SQL. """
1192 comodel = model.env[self.related_field.model_name]
1193 join_field, comodel_field = self.related.split('.')
1194 model.env.cr.execute(SQL(
1195 """ UPDATE %(model_table)s AS x
1196 SET %(model_field)s = y.%(comodel_field)s
1197 FROM %(comodel_table)s AS y
1198 WHERE x.%(join_field)s = y.id """,
1199 model_table=SQL.identifier(model._table),
1200 model_field=SQL.identifier(self.name),
1201 comodel_table=SQL.identifier(comodel._table),
1202 comodel_field=SQL.identifier(comodel_field),
1203 join_field=SQL.identifier(join_field),
1204 ))
1206 ############################################################################
1207 #
1208 # SQL generation methods
1209 #
1211 def to_sql(self, model: BaseModel, alias: str) -> SQL:
1212 """ Return an :class:`SQL` object that represents the value of the given
1213 field from the given table alias.
1215 The query object is necessary for fields that need to add tables to the query.
1216 """
1217 if not self.store or not self.column_type:
1218 raise ValueError(f"Cannot convert {self} to SQL because it is not stored")
1219 sql_field = SQL.identifier(alias, self.name, to_flush=self)
1220 if self.company_dependent:
1221 fallback = self.get_company_dependent_fallback(model)
1222 fallback = self.convert_to_column(self.convert_to_write(fallback, model), model)
1223 # in _read_group_orderby the result of field to sql will be mogrified and split to
1224 # e.g SQL('COALESCE(%s->%s') and SQL('to_jsonb(%s))::boolean') as 2 orderby values
1225 # and concatenated by SQL(',') in the final result, which works in an unexpected way
1226 sql_field = SQL(
1227 "COALESCE(%(column)s->%(company_id)s,to_jsonb(%(fallback)s::%(column_type)s))",
1228 column=sql_field,
1229 company_id=str(model.env.company.id),
1230 fallback=fallback,
1231 column_type=SQL(self._column_type[1]),
1232 )
1233 if self.type in ('boolean', 'integer', 'float', 'monetary'):
1234 return SQL('(%s)::%s', sql_field, SQL(self._column_type[1]))
1235 # here the specified value for a company might be NULL e.g. '{"1": null}'::jsonb
1236 # the result of current sql_field might be 'null'::jsonb
1237 # ('null'::jsonb)::text == 'null'
1238 # ('null'::jsonb->>0)::text IS NULL
1239 return SQL('(%s->>0)::%s', sql_field, SQL(self._column_type[1]))
1241 return sql_field
1243 def property_to_sql(self, field_sql: SQL, property_name: str, model: BaseModel, alias: str, query: Query) -> SQL:
1244 """ Return an :class:`SQL` object that represents the value of the given
1245 expression from the given table alias.
1247 The query object is necessary for fields that need to add tables to the query.
1248 """
1249 raise ValueError(f"Invalid field property {property_name!r} on {self}")
1251 def condition_to_sql(self, field_expr: str, operator: str, value, model: BaseModel, alias: str, query: Query) -> SQL:
1252 """ Return an :class:`SQL` object that represents the domain condition
1253 given by the triple ``(field_expr, operator, value)`` with the given
1254 table alias, and in the context of the given query.
1256 This method should use the model to resolve the SQL and check access
1257 of the field.
1258 """
1259 sql_expr = self._condition_to_sql(field_expr, operator, value, model, alias, query)
1260 if self.company_dependent:
1261 sql_expr = self._condition_to_sql_company(sql_expr, field_expr, operator, value, model, alias, query)
1262 return sql_expr
1264 def _condition_to_sql(self, field_expr: str, operator: str, value, model: BaseModel, alias: str, query: Query) -> SQL:
1265 sql_field = model._field_to_sql(alias, field_expr, query)
1267 if field_expr == self.name: 1267 ↛ 1272line 1267 didn't jump to line 1272 because the condition on line 1267 was always true
1268 def _value_to_column(v):
1269 return self.convert_to_column(v, model, validate=False)
1270 else:
1271 # reading a property, keep value as-is
1272 def _value_to_column(v):
1273 return v
1275 # support for SQL value
1276 if operator in SQL_OPERATORS and isinstance(value, SQL): 1276 ↛ 1277line 1276 didn't jump to line 1277 because the condition on line 1276 was never true
1277 warnings.warn("Since 19.0, use Domain.custom(to_sql=lambda model, alias, query: SQL(...))", DeprecationWarning)
1278 return SQL("%s%s%s", sql_field, SQL_OPERATORS[operator], value)
1280 # nullability
1281 can_be_null = self not in model.env.registry.not_null_fields
1283 # operator: in (equality)
1284 if operator in ('in', 'not in'):
1285 assert isinstance(value, COLLECTION_TYPES), \
1286 f"condition_to_sql() 'in' operator expects a collection, not a {value!r}"
1287 params = tuple(_value_to_column(v) for v in value if v is not False and v is not None)
1288 null_in_condition = len(params) < len(value)
1289 # if we have a value treated as null
1290 if (null_value := self.falsy_value) is not None:
1291 null_value = _value_to_column(null_value)
1292 if null_value in params:
1293 null_in_condition = True
1294 elif null_in_condition:
1295 params = (*params, null_value)
1297 sql = None
1298 if params:
1299 sql = SQL("%s%s%s", sql_field, SQL_OPERATORS[operator], params)
1301 if (operator == 'in') == null_in_condition:
1302 # field in {val, False} => field IN vals OR field IS NULL
1303 # field not in {val} => field NOT IN vals OR field IS NULL
1304 if not can_be_null: 1304 ↛ 1305line 1304 didn't jump to line 1305 because the condition on line 1304 was never true
1305 return sql or SQL("FALSE")
1306 sql_null = SQL("%s IS NULL", sql_field)
1307 return SQL("(%s OR %s)", sql, sql_null) if sql else sql_null
1309 elif operator == 'not in' and null_in_condition and not sql:
1310 # if we have a base query, null values are already exluded
1311 return SQL("%s IS NOT NULL", sql_field) if can_be_null else SQL("TRUE")
1313 assert sql, f"Missing sql query for {operator} {value!r}"
1314 return sql
1316 # operator: like
1317 if operator.endswith('like'):
1318 # cast value to text for any like comparison
1319 sql_left = sql_field if self.is_text else SQL("%s::text", sql_field)
1321 # add wildcard and unaccent depending on the operator
1322 need_wildcard = '=' not in operator
1323 if need_wildcard:
1324 sql_value = SQL("%s", f"%{value}%")
1325 else:
1326 sql_value = SQL("%s", str(value))
1327 if operator.endswith('ilike'):
1328 sql_left = model.env.registry.unaccent(sql_left)
1329 sql_value = model.env.registry.unaccent(sql_value)
1331 sql = SQL("%s%s%s", sql_left, SQL_OPERATORS[operator], sql_value)
1332 if operator in Domain.NEGATIVE_OPERATORS and can_be_null:
1333 sql = SQL("(%s OR %s IS NULL)", sql, sql_field)
1334 return sql
1336 # operator: inequality
1337 if operator in ('>', '<', '>=', '<='):
1338 accept_null_value = False
1339 if (null_value := self.falsy_value) is not None:
1340 value = self.convert_to_cache(value, model) or null_value
1341 accept_null_value = can_be_null and (
1342 null_value < value if operator == '<' else
1343 null_value > value if operator == '>' else
1344 null_value <= value if operator == '<=' else
1345 null_value >= value # operator == '>='
1346 )
1347 sql_value = SQL("%s", _value_to_column(value))
1349 sql = SQL("%s%s%s", sql_field, SQL_OPERATORS[operator], sql_value)
1350 if accept_null_value:
1351 sql = SQL("(%s OR %s IS NULL)", sql, sql_field)
1352 return sql
1354 # operator: any
1355 # Note: relational operators overwrite this function for a more specific
1356 # behaviour, here we check just the field against the subselect.
1357 # Example usage: ('id', 'any!', Query | SQL)
1358 if operator in ('any!', 'not any!'): 1358 ↛ 1368line 1358 didn't jump to line 1368 because the condition on line 1358 was always true
1359 if isinstance(value, Query): 1359 ↛ 1361line 1359 didn't jump to line 1361 because the condition on line 1359 was always true
1360 subselect = value.subselect()
1361 elif isinstance(value, SQL):
1362 subselect = SQL("(%s)", value)
1363 else:
1364 raise TypeError(f"condition_to_sql() operator 'any!' accepts SQL or Query, got {value}")
1365 sql_operator = SQL_OPERATORS["in" if operator == "any!" else "not in"]
1366 return SQL("%s%s%s", sql_field, sql_operator, subselect)
1368 raise NotImplementedError(f"Invalid operator {operator!r} for SQL in domain term {(field_expr, operator, value)!r}")
1370 def _condition_to_sql_company(self, sql_expr: SQL, field_expr: str, operator: str, value, model: BaseModel, alias: str, query: Query) -> SQL:
1371 """ Add a not null condition on the field for company-dependent fields to use an existing index for better performance."""
1372 if (
1373 self.company_dependent
1374 and self.index == 'btree_not_null'
1375 and not (self.type in ('datetime', 'date') and field_expr != self.name) # READ_GROUP_NUMBER_GRANULARITY is not supported
1376 and model.env['ir.default']._evaluate_condition_with_fallback(model._name, field_expr, operator, value) is False
1377 ):
1378 return SQL('(%s IS NOT NULL AND %s)', SQL.identifier(alias, self.name), sql_expr)
1379 return sql_expr
1381 ############################################################################
1382 #
1383 # Expressions and filtering of records
1384 #
1386 def expression_getter(self, field_expr: str) -> Callable[[BaseModel], typing.Any]:
1387 """ Given some field expression (what you find in domain conditions),
1388 return a function that returns the corresponding expression for a record::
1390 field = record._fields['create_date']
1391 get_value = field.expression_getter('create_date.month_number')
1392 month_number = get_value(record)
1393 """
1394 if field_expr == self.name: 1394 ↛ 1396line 1394 didn't jump to line 1396 because the condition on line 1394 was always true
1395 return self.__get__
1396 raise ValueError(f"Expression not supported on {self}: {field_expr!r}")
1398 def filter_function(self, records: M, field_expr: str, operator: str, value) -> Callable[[M], M]:
1399 assert operator not in Domain.NEGATIVE_OPERATORS, "only positive operators are implemented"
1400 getter = self.expression_getter(field_expr)
1401 # assert not isinstance(value, (SQL, Query))
1403 # -------------------------------------------------
1404 # operator: in (equality)
1405 if operator == 'in':
1406 assert isinstance(value, COLLECTION_TYPES) and value, \
1407 f"filter_function() 'in' operator expects a collection, not a {type(value)}"
1408 if not isinstance(value, AbstractSet): 1408 ↛ 1409line 1408 didn't jump to line 1409 because the condition on line 1408 was never true
1409 value = set(value)
1410 if False in value or self.falsy_value in value:
1411 if len(value) == 1: 1411 ↛ 1413line 1411 didn't jump to line 1413 because the condition on line 1411 was always true
1412 return lambda rec: not getter(rec)
1413 return lambda rec: (val := getter(rec)) in value or not val
1414 return lambda rec: getter(rec) in value
1416 # -------------------------------------------------
1417 # operator: like
1418 if operator.endswith('like'): 1418 ↛ 1420line 1418 didn't jump to line 1420 because the condition on line 1418 was never true
1419 # we may get a value which is not a string
1420 if operator.endswith('ilike'):
1421 # ilike uses unaccent and lower-case comparison
1422 unaccent_python = records.env.registry.unaccent_python
1424 def unaccent(x):
1425 return unaccent_python(str(x).lower()) if x else ''
1426 else:
1427 def unaccent(x):
1428 return str(x) if x else ''
1430 # build a regex that matches the SQL-like expression
1431 # note that '\' is used for escaping in SQL
1432 def build_like_regex(value: str, exact: bool):
1433 yield '^' if exact else '.*'
1434 escaped = False
1435 for char in value:
1436 if escaped:
1437 escaped = False
1438 yield re.escape(char)
1439 elif char == '\\':
1440 escaped = True
1441 elif char == '%':
1442 yield '.*'
1443 elif char == '_':
1444 yield '.'
1445 else:
1446 yield re.escape(char)
1447 if exact:
1448 yield '$'
1449 # no need to match r'.*' in else because we only use .match()
1451 like_regex = re.compile("".join(build_like_regex(unaccent(value), "=" in operator)), flags=re.DOTALL)
1452 return lambda rec: like_regex.match(unaccent(getter(rec)))
1454 # -------------------------------------------------
1455 # operator: inequality
1456 if pyop := PYTHON_INEQUALITY_OPERATOR.get(operator): 1456 ↛ 1479line 1456 didn't jump to line 1479 because the condition on line 1456 was always true
1457 can_be_null = False
1458 if (null_value := self.falsy_value) is not None: 1458 ↛ 1459line 1458 didn't jump to line 1459 because the condition on line 1458 was never true
1459 value = value or null_value
1460 can_be_null = (
1461 null_value < value if operator == '<' else
1462 null_value > value if operator == '>' else
1463 null_value <= value if operator == '<=' else
1464 null_value >= value # operator == '>='
1465 )
1467 def check_inequality(rec):
1468 rec_value = getter(rec)
1469 try:
1470 if rec_value is False or rec_value is None: 1470 ↛ 1471line 1470 didn't jump to line 1471 because the condition on line 1470 was never true
1471 return can_be_null
1472 return pyop(rec_value, value)
1473 except (ValueError, TypeError):
1474 # ignoring error, type mismatch
1475 return False
1476 return check_inequality
1478 # -------------------------------------------------
1479 raise NotImplementedError(f"Invalid simple operator {operator!r}")
1481 ############################################################################
1482 #
1483 # Alternatively stored fields: if fields don't have a `column_type` (not
1484 # stored as regular db columns) they go through a read/create/write
1485 # protocol instead
1486 #
1488 def read(self, records: BaseModel) -> None:
1489 """ Read the value of ``self`` on ``records``, and store it in cache. """
1490 if not self.column_type:
1491 raise NotImplementedError("Method read() undefined on %s" % self)
1493 def create(self, record_values: Collection[tuple[BaseModel, typing.Any]]) -> None:
1494 """ Write the value of ``self`` on the given records, which have just
1495 been created.
1497 :param record_values: a list of pairs ``(record, value)``, where
1498 ``value`` is in the format of method :meth:`BaseModel.write`
1499 """
1500 for record, value in record_values:
1501 self.write(record, value)
1503 def write(self, records: BaseModel, value: typing.Any) -> None:
1504 """ Write the value of ``self`` on ``records``. This method must update
1505 the cache and prepare database updates.
1507 :param records:
1508 :param value: a value in any format
1509 """
1510 # discard recomputation of self on records
1511 records.env.remove_to_compute(self, records)
1513 # discard the records that are not modified
1514 cache_value = self.convert_to_cache(value, records)
1515 records = self._filter_not_equal(records, cache_value)
1516 if not records:
1517 return
1519 # update the cache
1520 self._update_cache(records, cache_value, dirty=True)
1522 ############################################################################
1523 #
1524 # Cache management methods
1525 #
1527 def _get_cache(self, env: Environment) -> MutableMapping[IdType, typing.Any]:
1528 """ Return the field's cache, i.e., a mutable mapping from record id to
1529 a cache value. The cache may be environment-specific. This mapping is
1530 the way to retrieve a field's value for a given record.
1532 Calling this function multiple times, always returns the same mapping
1533 instance for a given environment, unless the transaction was entirely
1534 invalidated.
1535 """
1536 try:
1537 return env._field_cache_memo[self]
1538 except KeyError:
1539 field_cache = self._get_cache_impl(env)
1540 env._field_cache_memo[self] = field_cache
1541 return field_cache
1543 def _get_cache_impl(self, env: Environment) -> MutableMapping[IdType, typing.Any]:
1544 """ Implementation of :meth:`_get_cache`. This method may provide a
1545 view to the actual cache, depending on the needs of the field.
1546 """
1547 cache = env.transaction.field_data[self]
1548 if self in env._field_depends_context:
1549 cache = cache.setdefault(env.cache_key(self), {})
1550 return cache
1552 def _invalidate_cache(self, env: Environment, ids: Collection[IdType] | None = None) -> None:
1553 """ Invalidate the field's cache for the given ids, or all record ids if ``None``. """
1554 cache = env.transaction.field_data.get(self)
1555 if not cache:
1556 return
1558 caches = cache.values() if self in env._field_depends_context else (cache,)
1559 for field_cache in caches:
1560 if ids is None:
1561 field_cache.clear()
1562 continue
1563 for id_ in ids:
1564 field_cache.pop(id_, None)
1566 def _get_all_cache_ids(self, env: Environment) -> Collection[IdType]:
1567 """ Return all the record ids that have a value in cache in any environment. """
1568 cache = env.transaction.field_data[self]
1569 if self in env._field_depends_context:
1570 # trick to cheaply "merge" the keys of the environment-specific dicts
1571 return collections.ChainMap(*cache.values())
1572 return cache
1574 def _cache_missing_ids(self, records: BaseModel) -> Iterator[IdType]:
1575 """ Generator of ids that have no value in cache. """
1576 field_cache = self._get_cache(records.env)
1577 return (id_ for id_ in records._ids if id_ not in field_cache)
1579 def _filter_not_equal(self, records: ModelType, cache_value: typing.Any) -> ModelType:
1580 """ Return the subset of ``records`` for which the value of ``self`` is
1581 either not in cache, or different from ``cache_value``.
1582 """
1583 field_cache = self._get_cache(records.env)
1584 return records.browse(
1585 record_id
1586 for record_id in records._ids
1587 if field_cache.get(record_id, SENTINEL) != cache_value
1588 )
1590 def _to_prefetch(self, record: ModelType) -> ModelType:
1591 """ Return a recordset including ``record`` to prefetch the field. """
1592 ids = expand_ids(record.id, record._prefetch_ids)
1593 field_cache = self._get_cache(record.env)
1594 prefetch_ids = (id_ for id_ in ids if id_ not in field_cache)
1595 return record.browse(itertools.islice(prefetch_ids, PREFETCH_MAX))
1597 def _insert_cache(self, records: BaseModel, values: Iterable) -> None:
1598 """ Update the cache of the given records with the corresponding values,
1599 ignoring the records that don't have a value in cache already. This
1600 enables to keep the pending updates of records, and flush them later.
1601 """
1602 field_cache = self._get_cache(records.env)
1603 # call setdefault for all ids, values (looping in C)
1604 # this is ~15% faster than the equivalent:
1605 # ```
1606 # for record, value in zip(records._ids, values):
1607 # field_cache.setdefault(record, value)
1608 # ```
1609 collections.deque(map(field_cache.setdefault, records._ids, values), maxlen=0)
1611 def _update_cache(self, records: BaseModel, cache_value: typing.Any, dirty: bool = False) -> None:
1612 """ Update the value in the cache for the given records, and optionally
1613 make the field dirty for those records (for stored column fields only).
1615 One can normally make a clean field dirty but not the other way around.
1616 Updating a dirty field without ``dirty=True`` is a programming error and
1617 logs an error.
1619 :param dirty: whether ``field`` must be made dirty on ``record`` after
1620 the update
1621 """
1622 env = records.env
1623 field_cache = self._get_cache(env)
1624 for id_ in records._ids:
1625 field_cache[id_] = cache_value
1627 # dirty only makes sense for stored column fields
1628 if self.column_type and self.store:
1629 if dirty:
1630 env._field_dirty[self].update(id_ for id_ in records._ids if id_)
1631 else:
1632 dirty_ids = env._field_dirty.get(self)
1633 if dirty_ids and not dirty_ids.isdisjoint(records._ids): 1633 ↛ 1634line 1633 didn't jump to line 1634 because the condition on line 1633 was never true
1634 _logger.error(
1635 "Field._update_cache() updating the value on %s.%s where dirty flag is already set",
1636 records, self.name, stack_info=True,
1637 )
1639 ############################################################################
1640 #
1641 # Descriptor methods
1642 #
1644 def __get__(self, record: BaseModel, owner=None) -> T:
1645 """ return the value of field ``self`` on ``record`` """
1646 if record is None:
1647 return self # the field is accessed through the owner class
1649 env = record.env
1650 if not (env.su or record._has_field_access(self, 'read')): 1650 ↛ 1653line 1650 didn't jump to line 1653 because the condition on line 1650 was never true
1651 # optimization: we called _has_field_access() to avoid an extra
1652 # function call in _check_field_access()
1653 record._check_field_access(self, 'read')
1655 record_len = len(record._ids)
1656 if record_len != 1:
1657 if record_len: 1657 ↛ 1659line 1657 didn't jump to line 1659 because the condition on line 1657 was never true
1658 # let ensure_one() raise the proper exception
1659 record.ensure_one()
1660 assert False, "unreachable"
1661 # null record -> return the null value for this field
1662 value = self.convert_to_cache(False, record, validate=False)
1663 return self.convert_to_record(value, record)
1665 if self.compute and self.store:
1666 # process pending computations
1667 self.recompute(record)
1669 record_id = record._ids[0]
1670 field_cache = self._get_cache(env)
1671 try:
1672 value = field_cache[record_id]
1673 # convert to record may also throw a KeyError if the value is not
1674 # in cache, in that case, the fallbacks should be implemented to
1675 # read it correctly
1676 return self.convert_to_record(value, record)
1677 except KeyError:
1678 pass
1679 # behavior in case of cache miss:
1680 #
1681 # on a real record:
1682 # stored -> fetch from database (computation done above)
1683 # not stored and computed -> compute
1684 # not stored and not computed -> default
1685 #
1686 # on a new record w/ origin:
1687 # stored and not (computed and readonly) -> fetch from origin
1688 # stored and computed and readonly -> compute
1689 # not stored and computed -> compute
1690 # not stored and not computed -> default
1691 #
1692 # on a new record w/o origin:
1693 # stored and computed -> compute
1694 # stored and not computed -> new delegate or default
1695 # not stored and computed -> compute
1696 # not stored and not computed -> default
1697 #
1698 if self.store and record_id:
1699 # real record: fetch from database
1700 recs = self._to_prefetch(record)
1701 try:
1702 recs._fetch_field(self)
1703 fallback_single = False
1704 except AccessError:
1705 if len(recs) == 1:
1706 raise
1707 fallback_single = True
1708 if fallback_single: 1708 ↛ 1709line 1708 didn't jump to line 1709 because the condition on line 1708 was never true
1709 record._fetch_field(self)
1710 value = field_cache.get(record_id, SENTINEL)
1711 if value is SENTINEL: 1711 ↛ 1712line 1711 didn't jump to line 1712 because the condition on line 1711 was never true
1712 raise MissingError("\n".join([
1713 env._("Record does not exist or has been deleted."),
1714 env._("(Record: %(record)s, User: %(user)s)", record=record, user=env.uid),
1715 ])) from None
1717 elif self.store and record._origin and not (self.compute and self.readonly):
1718 # new record with origin: fetch from origin, and assign the
1719 # records to prefetch in cache (which is necessary for
1720 # relational fields to "map" prefetching ids to their value)
1721 recs = self._to_prefetch(record)
1722 try:
1723 for rec in recs:
1724 if (rec_origin := rec._origin): 1724 ↛ 1723line 1724 didn't jump to line 1723 because the condition on line 1724 was always true
1725 value = self.convert_to_cache(rec_origin[self.name], rec, validate=False)
1726 self._update_cache(rec, value)
1727 fallback_single = False
1728 except (AccessError, KeyError, MissingError):
1729 if len(recs) == 1:
1730 raise
1731 fallback_single = True
1732 if fallback_single: 1732 ↛ 1733line 1732 didn't jump to line 1733 because the condition on line 1732 was never true
1733 value = self.convert_to_cache(record._origin[self.name], record, validate=False)
1734 self._update_cache(record, value)
1735 # get the final value (see patches in x2many fields)
1736 value = field_cache[record_id]
1738 elif self.compute:
1739 # non-stored field or new record without origin: compute
1740 if env.is_protected(self, record):
1741 value = self.convert_to_cache(False, record, validate=False)
1742 self._update_cache(record, value)
1743 else:
1744 recs = record if self.recursive else self._to_prefetch(record)
1745 try:
1746 self.compute_value(recs)
1747 fallback_single = False
1748 except (AccessError, MissingError):
1749 fallback_single = True
1750 if fallback_single: 1750 ↛ 1751line 1750 didn't jump to line 1751 because the condition on line 1750 was never true
1751 self.compute_value(record)
1752 recs = record
1754 missing_recs_ids = tuple(self._cache_missing_ids(recs))
1755 if missing_recs_ids:
1756 missing_recs = record.browse(missing_recs_ids)
1757 if self.readonly and not self.store: 1757 ↛ 1758line 1757 didn't jump to line 1758 because the condition on line 1757 was never true
1758 raise ValueError(f"Compute method failed to assign {missing_recs}.{self.name}")
1759 # fallback to null value if compute gives nothing, do it for every unset record
1760 false_value = self.convert_to_cache(False, record, validate=False)
1761 self._update_cache(missing_recs, false_value)
1763 # cache could have been entirely invalidated by compute
1764 # as some compute methods call indirectly env.invalidate_all()
1765 field_cache = self._get_cache(env)
1766 value = field_cache[record_id]
1768 elif self.type == 'many2one' and self.delegate and not record_id:
1769 # parent record of a new record: new record, with the same
1770 # values as record for the corresponding inherited fields
1771 def is_inherited_field(name):
1772 field = record._fields[name]
1773 return field.inherited and field.related.split('.')[0] == self.name
1775 parent = record.env[self.comodel_name].new({
1776 name: value
1777 for name, value in record._cache.items()
1778 if is_inherited_field(name)
1779 })
1780 # in case the delegate field has inverse one2many fields, this
1781 # updates the inverse fields as well
1782 value = self.convert_to_cache(parent, record, validate=False)
1783 self._update_cache(record, value)
1784 # set inverse fields on new records in the comodel
1785 # TODO move this logic to _update_cache?
1786 if inv_recs := parent.filtered(lambda r: not r.id): 1786 ↛ 1807line 1786 didn't jump to line 1807 because the condition on line 1786 was always true
1787 for invf in env.registry.field_inverses[self]:
1788 invf._update_inverse(inv_recs, record)
1790 else:
1791 # non-stored field or stored field on new record: default value
1792 value = self.convert_to_cache(False, record, validate=False)
1793 self._update_cache(record, value)
1794 defaults = record.default_get([self.name])
1795 if self.name in defaults:
1796 # The null value above is necessary to convert x2many field
1797 # values. For instance, converting [(Command.LINK, id)]
1798 # accesses the field's current value, then adds the given
1799 # id. Without an initial value, the conversion ends up here
1800 # to determine the field's value, and generates an infinite
1801 # recursion.
1802 value = self.convert_to_cache(defaults[self.name], record)
1803 self._update_cache(record, value)
1804 # get the final value (see patches in x2many fields)
1805 value = field_cache[record_id]
1807 return self.convert_to_record(value, record)
1809 def __set__(self, records: BaseModel, value) -> None:
1810 """ set the value of field ``self`` on ``records`` """
1811 protected_ids = []
1812 new_ids = []
1813 other_ids = []
1814 for record_id in records._ids:
1815 if record_id in records.env._protected.get(self, ()):
1816 protected_ids.append(record_id)
1817 elif not record_id:
1818 new_ids.append(record_id)
1819 else:
1820 other_ids.append(record_id)
1822 if protected_ids:
1823 # records being computed: no business logic, no recomputation
1824 protected_records = records.__class__(records.env, tuple(protected_ids), records._prefetch_ids)
1825 self.write(protected_records, value)
1827 if new_ids:
1828 # new records: no business logic
1829 new_records = records.__class__(records.env, tuple(new_ids), records._prefetch_ids)
1830 with records.env.protecting(records.pool.field_computed.get(self, [self]), new_records):
1831 if self.relational: 1831 ↛ 1832line 1831 didn't jump to line 1832 because the condition on line 1831 was never true
1832 new_records.modified([self.name], before=True)
1833 self.write(new_records, value)
1834 new_records.modified([self.name])
1836 if self.inherited: 1836 ↛ 1838line 1836 didn't jump to line 1838 because the condition on line 1836 was never true
1837 # special case: also assign parent records if they are new
1838 parents = new_records[self.related.split('.')[0]]
1839 parents.filtered(lambda r: not r.id)[self.name] = value
1841 if other_ids:
1842 # base case: full business logic
1843 records = records.__class__(records.env, tuple(other_ids), records._prefetch_ids)
1844 write_value = self.convert_to_write(value, records)
1845 records.write({self.name: write_value})
1847 ############################################################################
1848 #
1849 # Computation of field values
1850 #
1852 def recompute(self, records: BaseModel) -> None:
1853 """ Process the pending computations of ``self`` on ``records``. This
1854 should be called only if ``self`` is computed and stored.
1855 """
1856 to_compute_ids = records.env.transaction.tocompute.get(self)
1857 if not to_compute_ids:
1858 return
1860 def apply_except_missing(func, records):
1861 """ Apply `func` on `records`, with a fallback ignoring non-existent records. """
1862 try:
1863 func(records)
1864 return
1865 except MissingError:
1866 pass
1868 existing = records.exists()
1869 if existing:
1870 func(existing)
1871 # mark the field as computed on missing records, otherwise they
1872 # remain to compute forever, which may lead to an infinite loop
1873 missing = records - existing
1874 for f in records.pool.field_computed[self]:
1875 records.env.remove_to_compute(f, missing)
1877 if self.recursive:
1878 # recursive computed fields are computed record by record, in order
1879 # to recursively handle dependencies inside records
1880 def recursive_compute(records):
1881 for record in records:
1882 if record.id in to_compute_ids:
1883 self.compute_value(record)
1885 apply_except_missing(recursive_compute, records)
1886 return
1888 for record in records:
1889 if record.id in to_compute_ids:
1890 ids = expand_ids(record.id, to_compute_ids)
1891 recs = record.browse(itertools.islice(ids, PREFETCH_MAX))
1892 try:
1893 apply_except_missing(self.compute_value, recs)
1894 continue
1895 except AccessError:
1896 pass
1897 self.compute_value(record)
1899 def compute_value(self, records: BaseModel) -> None:
1900 """ Invoke the compute method on ``records``; the results are in cache. """
1901 env = records.env
1902 if self.compute_sudo:
1903 records = records.sudo()
1904 fields = records.pool.field_computed[self]
1906 # Just in case the compute method does not assign a value, we already
1907 # mark the computation as done. This is also necessary if the compute
1908 # method accesses the old value of the field: the field will be fetched
1909 # with _read(), which will flush() it. If the field is still to compute,
1910 # the latter flush() will recursively compute this field!
1911 for field in fields:
1912 if field.store:
1913 env.remove_to_compute(field, records)
1915 try:
1916 with records.env.protecting(fields, records):
1917 records._compute_field_value(self)
1918 except Exception:
1919 for field in fields:
1920 if field.store:
1921 env.add_to_compute(field, records)
1922 raise
1924 def determine_inverse(self, records):
1925 """ Given the value of ``self`` on ``records``, inverse the computation. """
1926 determine(self.inverse, records)
1928 def determine_domain(self, records: BaseModel, operator: str, value) -> typing.Any:
1929 """ Return a domain representing a condition on ``self``. """
1930 return determine(self.search, records, operator, value)
1932 def determine_group_expand(self, records, values, domain):
1933 """ Return a domain representing a condition on ``self``. """
1934 return determine(self.group_expand, records, values, domain)
1937# forward-reference to models because we have this last cyclic dependency
1938# it is used in this file only for asserts
1939from . import models as _models # noqa: E402