"""
-import copy
import datetime
import functools
import itertools
from . import api
from . import tools
from .api import Environment
-from .exceptions import except_orm, AccessError, MissingError
+from .exceptions import except_orm, AccessError, MissingError, ValidationError
from .osv import fields
from .osv.query import Query
-from .tools import lazy_property
+from .tools import lazy_property, ormcache
from .tools.config import config
from .tools.misc import CountingStream, DEFAULT_SERVER_DATETIME_FORMAT, DEFAULT_SERVER_DATE_FORMAT
from .tools.safe_eval import safe_eval as eval
if field_type in FIELDS_TO_PGTYPES:
pg_type = (FIELDS_TO_PGTYPES[field_type], FIELDS_TO_PGTYPES[field_type])
elif issubclass(field_type, fields.float):
- if f.digits:
+ # Explicit support for "falsy" digits (0, False) to indicate a
+ # NUMERIC field with no fixed precision. The values will be saved
+ # in the database with all significant digits.
+ # FLOAT8 type is still the default when there is no precision because
+ # it is faster for most operations (sums, etc.)
+ if f.digits is not None:
pg_type = ('numeric', 'NUMERIC')
else:
pg_type = ('float8', 'DOUBLE PRECISION')
if not self._custom:
self.module_to_models.setdefault(self._module, []).append(self)
+ # check for new-api conversion error: leave comma after field definition
+ for key, val in attrs.iteritems():
+ if type(val) is tuple and len(val) == 1 and isinstance(val[0], Field):
+ _logger.error("Trailing comma after field definition: %s.%s", self, key)
+
+ # transform columns into new-style fields (enables field inheritance)
+ for name, column in self._columns.iteritems():
+ if name in self.__dict__:
+ _logger.warning("In class %s, field %r overriding an existing value", self, name)
+ setattr(self, name, column.to_field())
+
class NewId(object):
""" Pseudo-ids for new records. """
IdType = (int, long, basestring, NewId)
+# maximum number of prefetched records
+PREFETCH_MAX = 200
+
# special columns automatically created by the ORM
LOG_ACCESS_COLUMNS = ['create_uid', 'create_date', 'write_uid', 'write_date']
MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS
# This is similar to _inherit_fields but:
# 1. includes self fields,
# 2. uses column_info instead of a triple.
+ # Warning: _all_columns is deprecated, use _fields instead
_all_columns = {}
_table = None
@classmethod
def _add_field(cls, name, field):
""" Add the given `field` under the given `name` in the class """
- field.set_class_name(cls, name)
-
- # add field in _fields (for reflection)
+ # add field as an attribute and in cls._fields (for reflection)
+ if not isinstance(getattr(cls, name, field), Field):
+ _logger.warning("In model %r, field %r overriding existing value", cls._name, name)
+ setattr(cls, name, field)
cls._fields[name] = field
- # add field as an attribute, unless another kind of value already exists
- if isinstance(getattr(cls, name, field), Field):
- setattr(cls, name, field)
- else:
- _logger.warning("In model %r, member %r is not a field", cls._name, name)
+ # basic setup of field
+ field.set_class_name(cls, name)
- if field.store:
+ if field.store or field.column:
cls._columns[name] = field.to_column()
else:
# remove potential column that may be overridden by field
cls._columns.pop(name, None)
@classmethod
+ def _pop_field(cls, name):
+ """ Remove the field with the given `name` from the model.
+ This method should only be used for manual fields.
+ """
+ field = cls._fields.pop(name)
+ cls._columns.pop(name, None)
+ cls._all_columns.pop(name, None)
+ if hasattr(cls, name):
+ delattr(cls, name)
+ return field
+
+ @classmethod
def _add_magic_fields(cls):
""" Introduce magic fields on the current class
"""
def add(name, field):
""" add `field` with the given `name` if it does not exist yet """
- if name not in cls._columns and name not in cls._fields:
+ if name not in cls._fields:
cls._add_field(name, field)
# cyclic import
)
columns.update(cls._columns)
- defaults = dict(parent_class._defaults)
- defaults.update(cls._defaults)
-
inherits = dict(parent_class._inherits)
inherits.update(cls._inherits)
'_name': name,
'_register': False,
'_columns': columns,
- '_defaults': defaults,
'_inherits': inherits,
'_depends': depends,
'_constraints': constraints,
'_name': name,
'_register': False,
'_columns': dict(cls._columns),
- '_defaults': dict(cls._defaults),
+ '_defaults': {}, # filled by Field._determine_default()
'_inherits': dict(cls._inherits),
'_depends': dict(cls._depends),
'_constraints': list(cls._constraints),
}
cls = type(cls._name, (cls,), attrs)
- # float fields are registry-dependent (digit attribute); duplicate them
- # to avoid issues
- for key, col in cls._columns.items():
- if col._type == 'float':
- cls._columns[key] = copy.copy(col)
-
# instantiate the model, and initialize it
model = object.__new__(cls)
model.__init__(pool, cr)
pool._store_function[model].sort(key=lambda x: x[4])
@classmethod
- def _init_manual_fields(cls, pool, cr):
+ def _init_manual_fields(cls, cr):
# Check whether the query is already done
- if pool.fields_by_model is not None:
- manual_fields = pool.fields_by_model.get(cls._name, [])
+ if cls.pool.fields_by_model is not None:
+ manual_fields = cls.pool.fields_by_model.get(cls._name, [])
else:
cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (cls._name, 'manual'))
manual_fields = cr.dictfetchall()
for field in manual_fields:
- if field['name'] in cls._columns:
+ if field['name'] in cls._fields:
continue
attrs = {
+ 'manual': True,
'string': field['field_description'],
'required': bool(field['required']),
'readonly': bool(field['readonly']),
- 'domain': eval(field['domain']) if field['domain'] else None,
- 'size': field['size'] or None,
- 'ondelete': field['on_delete'],
- 'translate': (field['translate']),
- 'manual': True,
- '_prefetch': False,
- #'select': int(field['select_level'])
}
- if field['serialization_field_id']:
- cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],))
- attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']})
- if field['ttype'] in ['many2one', 'one2many', 'many2many']:
- attrs.update({'relation': field['relation']})
- cls._columns[field['name']] = fields.sparse(**attrs)
- elif field['ttype'] == 'selection':
- cls._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
- elif field['ttype'] == 'reference':
- cls._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
+ # FIXME: ignore field['serialization_field_id']
+ if field['ttype'] in ('char', 'text', 'html'):
+ attrs['translate'] = bool(field['translate'])
+ attrs['size'] = field['size'] or None
+ elif field['ttype'] in ('selection', 'reference'):
+ attrs['selection'] = eval(field['selection'])
elif field['ttype'] == 'many2one':
- cls._columns[field['name']] = fields.many2one(field['relation'], **attrs)
+ attrs['comodel_name'] = field['relation']
+ attrs['ondelete'] = field['on_delete']
+ attrs['domain'] = eval(field['domain']) if field['domain'] else None
elif field['ttype'] == 'one2many':
- cls._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
+ attrs['comodel_name'] = field['relation']
+ attrs['inverse_name'] = field['relation_field']
+ attrs['domain'] = eval(field['domain']) if field['domain'] else None
elif field['ttype'] == 'many2many':
+ attrs['comodel_name'] = field['relation']
_rel1 = field['relation'].replace('.', '_')
_rel2 = field['model'].replace('.', '_')
- _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
- cls._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
- else:
- cls._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
+ attrs['relation'] = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
+ attrs['column1'] = 'id1'
+ attrs['column2'] = 'id2'
+ attrs['domain'] = eval(field['domain']) if field['domain'] else None
+ cls._add_field(field['name'], Field.by_type[field['ttype']](**attrs))
@classmethod
def _init_constraints_onchanges(cls):
cls._onchange_methods = defaultdict(list)
for attr, func in getmembers(cls, callable):
if hasattr(func, '_constrains'):
- if not all(name in cls._fields for name in func._constrains):
- _logger.warning("@constrains%r parameters must be field names", func._constrains)
cls._constraint_methods.append(func)
if hasattr(func, '_onchange'):
- if not all(name in cls._fields for name in func._onchange):
- _logger.warning("@onchange%r parameters must be field names", func._onchange)
for name in func._onchange:
cls._onchange_methods[name].append(func)
"TransientModels must have log_access turned on, " \
"in order to implement their access rights policy"
- # retrieve new-style fields and duplicate them (to avoid clashes with
- # inheritance between different models)
+ # retrieve new-style fields (from above registry class) and duplicate
+ # them (to avoid clashes with inheritance between different models)
cls._fields = {}
- for attr, field in getmembers(cls, Field.__instancecheck__):
- if not field._origin:
- cls._add_field(attr, field.copy())
+ above = cls.__bases__[0]
+ for attr, field in getmembers(above, Field.__instancecheck__):
+ cls._add_field(attr, field.new())
# introduce magic fields
cls._add_magic_fields()
# register stuff about low-level function fields and custom fields
cls._init_function_fields(pool, cr)
- cls._init_manual_fields(pool, cr)
-
- # process _inherits
- cls._inherits_check()
- cls._inherits_reload()
# register constraints and onchange methods
cls._init_constraints_onchanges()
- # check defaults
- for k in cls._defaults:
- assert k in cls._fields, \
- "Model %s has a default for nonexiting field %s" % (cls._name, k)
-
- # restart columns
- for column in cls._columns.itervalues():
- column.restart()
-
- # validate rec_name
- if cls._rec_name:
- assert cls._rec_name in cls._fields, \
- "Invalid rec_name %s for model %s" % (cls._rec_name, cls._name)
- elif 'name' in cls._fields:
- cls._rec_name = 'name'
-
# prepare ormcache, which must be shared by all instances of the model
cls._ormcache = {}
+ @api.model
+ @ormcache()
+ def _is_an_ordinary_table(self):
+ self.env.cr.execute("""\
+ SELECT 1
+ FROM pg_class
+ WHERE relname = %s
+ AND relkind = %s""", [self._table, 'r'])
+ return bool(self.env.cr.fetchone())
+
def __export_xml_id(self):
""" Return a valid xml_id for the record `self`. """
+ if not self._is_an_ordinary_table():
+ raise Exception(
+ "You can not export the column ID of model %s, because the "
+ "table %s is not an ordinary table."
+ % (self._name, self._table))
ir_model_data = self.sudo().env['ir.model.data']
data = ir_model_data.search([('model', '=', self._name), ('res_id', '=', self.id)])
if data:
* "id" is the External ID for the record
* ".id" is the Database ID for the record
"""
- columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
- # Fake columns to avoid special cases in extractor
- columns[None] = fields.char('rec_name')
- columns['id'] = fields.char('External ID')
- columns['.id'] = fields.integer('Database ID')
+ from openerp.fields import Char, Integer
+ fields = dict(self._fields)
+ # Fake fields to avoid special cases in extractor
+ fields[None] = Char('rec_name')
+ fields['id'] = Char('External ID')
+ fields['.id'] = Integer('Database ID')
# m2o fields can't be on multiple lines so exclude them from the
# is_relational field rows filter, but special-case it later on to
# be handled with relational fields (as it can have subfields)
- is_relational = lambda field: columns[field]._type in ('one2many', 'many2many', 'many2one')
+ is_relational = lambda field: fields[field].relational
get_o2m_values = itemgetter_tuple(
[index for index, field in enumerate(fields_)
- if columns[field[0]]._type == 'one2many'])
+ if fields[field[0]].type == 'one2many'])
get_nono2m_values = itemgetter_tuple(
[index for index, field in enumerate(fields_)
- if columns[field[0]]._type != 'one2many'])
+ if fields[field[0]].type != 'one2many'])
# Checks if the provided row has any non-empty non-relational field
def only_o2m_values(row, f=get_nono2m_values, g=get_o2m_values):
return any(g(row)) and not any(f(row))
for relfield in set(
field[0] for field in fields_
if is_relational(field[0])):
- column = columns[relfield]
# FIXME: how to not use _obj without relying on fields_get?
- Model = self.pool[column._obj]
+ Model = self.pool[fields[relfield].comodel_name]
# get only cells for this sub-field, should be strictly
- # non-empty, field path [None] is for name_get column
+ # non-empty, field path [None] is for name_get field
indices, subfields = zip(*((index, field[1:] or [None])
for index, field in enumerate(fields_)
if field[0] == relfield))
"""
if context is None: context = {}
Converter = self.pool['ir.fields.converter']
- columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
Translation = self.pool['ir.translation']
+ fields = dict(self._fields)
field_names = dict(
(f, (Translation._get_source(cr, uid, self._name + ',' + f, 'field',
context.get('lang'))
- or column.string))
- for f, column in columns.iteritems())
+ or field.string))
+ for f, field in fields.iteritems())
convert = Converter.for_model(cr, uid, self, context=context)
(', '.join(names), res_msg)
)
if errors:
- raise except_orm('ValidateError', '\n'.join(errors))
+ raise ValidationError('\n'.join(errors))
# new-style constraint methods
for check in self._constraint_methods:
if set(check._constrains) & field_names:
- check(self)
+ try:
+ check(self)
+ except ValidationError, e:
+ raise
+ except Exception, e:
+ raise ValidationError("Error while validating constraint\n\n%s" % tools.ustr(e))
+
+ @api.model
+ def default_get(self, fields_list):
+ """ default_get(fields) -> default_values
- def default_get(self, cr, uid, fields_list, context=None):
- """ Return default values for the fields in `fields_list`. Default
- values are determined by the context, user defaults, and the model
- itself.
+ Return default values for the fields in `fields_list`. Default
+ values are determined by the context, user defaults, and the model
+ itself.
- :param fields_list: a list of field names
- :return: a dictionary mapping each field name to its corresponding
- default value; the keys of the dictionary are the fields in
- `fields_list` that have a default value different from ``False``.
+ :param fields_list: a list of field names
+ :return: a dictionary mapping each field name to its corresponding
+ default value, if it has one.
- This method should not be overridden. In order to change the
- mechanism for determining default values, you should override method
- :meth:`add_default_value` instead.
"""
# trigger view init hook
- self.view_init(cr, uid, fields_list, context)
+ self.view_init(fields_list)
+
+ defaults = {}
+ parent_fields = defaultdict(list)
- # use a new record to determine default values; evaluate fields on the
- # new record and put default values in result
- record = self.new(cr, uid, {}, context=context)
- result = {}
for name in fields_list:
- if name in self._fields:
- value = record[name]
- if name in record._cache:
- result[name] = value # it really is a default value
+ # 1. look up context
+ key = 'default_' + name
+ if key in self._context:
+ defaults[name] = self._context[key]
+ continue
- # convert default values to the expected format
- result = self._convert_to_write(result)
- for key, val in result.items():
- if isinstance(val, NewId):
- del result[key] # ignore new records in defaults
+ # 2. look up ir_values
+ # Note: performance is good, because get_defaults_dict is cached!
+ ir_values_dict = self.env['ir.values'].get_defaults_dict(self._name)
+ if name in ir_values_dict:
+ defaults[name] = ir_values_dict[name]
+ continue
- return result
+ field = self._fields.get(name)
- def add_default_value(self, field):
- """ Set the default value of `field` to the new record `self`.
- The value must be assigned to `self`.
- """
- assert not self.id, "Expected new record: %s" % self
- cr, uid, context = self.env.args
- name = field.name
+ # 3. look up property fields
+ # TODO: get rid of this one
+ if field and field.company_dependent:
+ defaults[name] = self.env['ir.property'].get(name, self._name)
+ continue
- # 1. look up context
- key = 'default_' + name
- if key in context:
- self[name] = context[key]
- return
+ # 4. look up field.default
+ if field and field.default:
+ defaults[name] = field.default(self)
+ continue
- # 2. look up ir_values
- # Note: performance is good, because get_defaults_dict is cached!
- ir_values_dict = self.env['ir.values'].get_defaults_dict(self._name)
- if name in ir_values_dict:
- self[name] = ir_values_dict[name]
- return
+ # 5. delegate to parent model
+ if field and field.inherited:
+ field = field.related_field
+ parent_fields[field.model_name].append(field.name)
- # 3. look up property fields
- # TODO: get rid of this one
- column = self._columns.get(name)
- if isinstance(column, fields.property):
- self[name] = self.env['ir.property'].get(name, self._name)
- return
+ # convert default values to the right format
+ defaults = self._convert_to_cache(defaults, validate=False)
+ defaults = self._convert_to_write(defaults)
- # 4. look up _defaults
- if name in self._defaults:
- value = self._defaults[name]
- if callable(value):
- value = value(self._model, cr, uid, context)
- self[name] = value
- return
+ # add default values for inherited fields
+ for model, names in parent_fields.iteritems():
+ defaults.update(self.env[model].default_get(names))
- # 5. delegate to field
- field.determine_default(self)
+ return defaults
def fields_get_keys(self, cr, user, context=None):
res = self._columns.keys()
return view
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
- """
+ """ fields_view_get([view_id | view_type='form'])
+
Get the detailed composition of the requested view like fields, model, view architecture
:param view_id: id of the view or None
cr, uid, self._name, node, view_id, context=context)
def search_count(self, cr, user, args, context=None):
+ """ search_count(args) -> int
+
+ Returns the number of records in the current model matching :ref:`the
+ provided domain <reference/orm/domains>`.
+ """
res = self.search(cr, user, args, context=context, count=True)
if isinstance(res, list):
return len(res)
@api.returns('self')
def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
- """
- Search for records based on a search domain.
-
- :param cr: database cursor
- :param user: current user id
- :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
- :param offset: optional number of results to skip in the returned values (default: 0)
- :param limit: optional max number of records to return (default: **None**)
- :param order: optional columns to sort by (default: self._order=id )
- :param context: optional context arguments, like lang, time zone
- :type context: dictionary
- :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
- :return: id or list of ids of records matching the criteria
- :rtype: integer or list of integers
- :raise AccessError: * if user tries to bypass access rules for read on the requested object.
+ """ search(args[, offset=0][, limit=None][, order=None])
- **Expressing a search domain (args)**
+ Searches for records based on the ``args``
+ :ref:`search domain <reference/orm/domains>`.
- Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
-
- * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
- * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
- The semantics of most of these operators are obvious.
- The ``child_of`` operator will look for records who are children or grand-children of a given record,
- according to the semantics of this model (i.e following the relationship field named by
- ``self._parent_name``, by default ``parent_id``.
- * **value** must be a valid value to compare with the values of **field_name**, depending on its type.
-
- Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
- These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
- Be very careful about this when you combine them the first time.
-
- Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
-
- [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
-
- The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
-
- (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
+ :param args: :ref:`A search domain <reference/orm/domains>`. Use an empty
+ list to match all records.
+ :param int offset: number of results to ignore (default: none)
+ :param int limit: maximum number of records to return (default: all)
+ :param str order: sort string
+ :returns: at most ``limit`` records matching the search criteria
+ :raise AccessError: * if user tries to bypass access rules for read on the requested object.
"""
return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
@api.depends(lambda self: (self._rec_name,) if self._rec_name else ())
def _compute_display_name(self):
- for i, got_name in enumerate(self.name_get()):
- self[i].display_name = got_name[1]
+ names = dict(self.name_get())
+ for record in self:
+ record.display_name = names.get(record.id, False)
@api.multi
def name_get(self):
- """ Return a textual representation for the records in `self`.
- By default this is the value of field ``display_name``.
+ """ name_get() -> [(id, name), ...]
- :rtype: list(tuple)
- :return: list of pairs ``(id, text_repr)`` for all records
+ Returns a textual representation for the records in ``self``.
+ By default this is the value of the ``display_name`` field.
+
+ :return: list of pairs ``(id, text_repr)`` for each records
+ :rtype: list(tuple)
"""
result = []
name = self._rec_name
@api.model
def name_create(self, name):
- """ Create a new record by calling :meth:`~.create` with only one value
- provided: the display name of the new record.
+ """ name_create(name) -> record
+
+ Create a new record by calling :meth:`~.create` with only one value
+ provided: the display name of the new record.
- The new record will be initialized with any default values
- applicable to this model, or provided through the context. The usual
- behavior of :meth:`~.create` applies.
+ The new record will be initialized with any default values
+ applicable to this model, or provided through the context. The usual
+ behavior of :meth:`~.create` applies.
- :param name: display name of the record to create
- :rtype: tuple
- :return: the :meth:`~.name_get` pair value of the created record
+ :param name: display name of the record to create
+ :rtype: tuple
+ :return: the :meth:`~.name_get` pair value of the created record
"""
if self._rec_name:
record = self.create({self._rec_name: name})
@api.model
def name_search(self, name='', args=None, operator='ilike', limit=100):
- """ Search for records that have a display name matching the given
- `name` pattern when compared with the given `operator`, while also
- matching the optional search domain (`args`).
-
- This is used for example to provide suggestions based on a partial
- value for a relational field. Sometimes be seen as the inverse
- function of :meth:`~.name_get`, but it is not guaranteed to be.
-
- This method is equivalent to calling :meth:`~.search` with a search
- domain based on `display_name` and then :meth:`~.name_get` on the
- result of the search.
-
- :param name: the name pattern to match
- :param list args: optional search domain (see :meth:`~.search` for
- syntax), specifying further restrictions
- :param str operator: domain operator for matching `name`, such as
- ``'like'`` or ``'='``.
- :param int limit: optional max number of records to return
- :rtype: list
- :return: list of pairs ``(id, text_repr)`` for all matching records.
+ """ name_search(name='', args=None, operator='ilike', limit=100) -> records
+
+ Search for records that have a display name matching the given
+ `name` pattern when compared with the given `operator`, while also
+ matching the optional search domain (`args`).
+
+ This is used for example to provide suggestions based on a partial
+ value for a relational field. Sometimes be seen as the inverse
+ function of :meth:`~.name_get`, but it is not guaranteed to be.
+
+ This method is equivalent to calling :meth:`~.search` with a search
+ domain based on ``display_name`` and then :meth:`~.name_get` on the
+ result of the search.
+
+ :param str name: the name pattern to match
+ :param list args: optional search domain (see :meth:`~.search` for
+ syntax), specifying further restrictions
+ :param str operator: domain operator for matching `name`, such as
+ ``'like'`` or ``'='``.
+ :param int limit: optional max number of records to return
+ :rtype: list
+ :return: list of pairs ``(id, text_repr)`` for all matching records.
"""
return self._name_search(name, args, operator, limit=limit)
pass
- def _read_group_fill_results(self, cr, uid, domain, groupby, remaining_groupbys, aggregated_fields,
+ def _read_group_fill_results(self, cr, uid, domain, groupby, remaining_groupbys,
+ aggregated_fields, count_field,
read_group_result, read_group_order=None, context=None):
"""Helper method for filling in empty groups for all possible values of
the field being grouped by"""
result.append(left_side)
known_values[grouped_value] = left_side
else:
- count_attr = groupby + '_count'
- known_values[grouped_value].update({count_attr: left_side[count_attr]})
+ known_values[grouped_value].update({count_field: left_side[count_field]})
def append_right(right_side):
grouped_value = right_side[0]
if not grouped_value in known_values:
order_field = order_split[0]
if order_field in groupby_fields:
- if self._all_columns[order_field.split(':')[0]].column._type == 'many2one':
+ if self._fields[order_field.split(':')[0]].type == 'many2one':
order_clause = self._generate_order_by(order_part, query).replace('ORDER BY ', '')
if order_clause:
orderby_terms.append(order_clause)
field name, type, time informations, qualified name, ...
"""
split = gb.split(':')
- field_type = self._all_columns[split[0]].column._type
+ field_type = self._fields[split[0]].type
gb_function = split[1] if len(split) == 2 else None
temporal = field_type in ('date', 'datetime')
tz_convert = field_type == 'datetime' and context.get('tz') in pytz.all_timezones
qualified_field = self._inherits_join_calc(split[0], query)
if temporal:
display_formats = {
- 'day': 'dd MMM YYYY',
- 'week': "'W'w YYYY",
- 'month': 'MMMM YYYY',
- 'quarter': 'QQQ YYYY',
- 'year': 'YYYY'
+ # Careful with week/year formats:
+ # - yyyy (lower) must always be used, *except* for week+year formats
+ # - YYYY (upper) must always be used for week+year format
+ # e.g. 2006-01-01 is W52 2005 in some locales (de_DE),
+ # and W1 2006 for others
+ #
+ # Mixing both formats, e.g. 'MMM YYYY' would yield wrong results,
+ # such as 2006-01-01 being formatted as "January 2005" in some locales.
+ # Cfr: http://babel.pocoo.org/docs/dates/#date-fields
+ 'day': 'dd MMM yyyy', # yyyy = normal year
+ 'week': "'W'w YYYY", # w YYYY = ISO week-year
+ 'month': 'MMMM yyyy',
+ 'quarter': 'QQQ yyyy',
+ 'year': 'yyyy',
}
time_intervals = {
'day': dateutil.relativedelta.relativedelta(days=1),
assert gb in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
groupby_def = self._columns.get(gb) or (self._inherit_fields.get(gb) and self._inherit_fields.get(gb)[2])
assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
- if not (gb in self._all_columns):
+ if not (gb in self._fields):
# Don't allow arbitrary values, as this would be a SQL injection vector!
raise except_orm(_('Invalid group_by'),
_('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(gb,))
f for f in fields
if f not in ('id', 'sequence')
if f not in groupby_fields
- if f in self._all_columns
- if self._all_columns[f].column._type in ('integer', 'float')
- if getattr(self._all_columns[f].column, '_classic_write')]
+ if f in self._fields
+ if self._fields[f].type in ('integer', 'float')
+ if getattr(self._fields[f].base_field.column, '_classic_write')
+ ]
- field_formatter = lambda f: (self._all_columns[f].column.group_operator or 'sum', self._inherits_join_calc(f, query), f)
+ field_formatter = lambda f: (self._fields[f].group_operator or 'sum', self._inherits_join_calc(f, query), f)
select_terms = ["%s(%s) AS %s" % field_formatter(f) for f in aggregated_fields]
for gb in annotated_groupbys:
count_field = groupby_fields[0] if len(groupby_fields) >= 1 else '_'
else:
count_field = '_'
+ count_field += '_count'
prefix_terms = lambda prefix, terms: (prefix + " " + ",".join(terms)) if terms else ''
prefix_term = lambda prefix, term: ('%s %s' % (prefix, term)) if term else ''
query = """
- SELECT min(%(table)s.id) AS id, count(%(table)s.id) AS %(count_field)s_count %(extra_fields)s
+ SELECT min(%(table)s.id) AS id, count(%(table)s.id) AS %(count_field)s %(extra_fields)s
FROM %(from)s
%(where)s
%(groupby)s
# method _read_group_fill_results need to be completely reimplemented
# in a sane way
result = self._read_group_fill_results(cr, uid, domain, groupby_fields[0], groupby[len(annotated_groupbys):],
- aggregated_fields, result, read_group_order=order,
+ aggregated_fields, count_field, result, read_group_order=order,
context=context)
return result
if val is not False:
cr.execute(update_query, (ss[1](val), key))
- def _check_selection_field_value(self, cr, uid, field, value, context=None):
- """Raise except_orm if value is not among the valid values for the selection field"""
- if self._columns[field]._type == 'reference':
- val_model, val_id_str = value.split(',', 1)
- val_id = False
- try:
- val_id = long(val_id_str)
- except ValueError:
- pass
- if not val_id:
- raise except_orm(_('ValidateError'),
- _('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
- val = val_model
- else:
- val = value
- if isinstance(self._columns[field].selection, (tuple, list)):
- if val in dict(self._columns[field].selection):
- return
- elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
- return
- raise except_orm(_('ValidateError'),
- _('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._name, field))
+ @api.model
+ def _check_selection_field_value(self, field, value):
+ """ Check whether value is among the valid values for the given
+ selection/reference field, and raise an exception if not.
+ """
+ field = self._fields[field]
+ field.convert_to_cache(value, self)
def _check_removed_columns(self, cr, log=False):
# iterate on the database columns to drop the NOT NULL constraints
_schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
self._table, column['attname'])
- def _save_constraint(self, cr, constraint_name, type):
+ def _save_constraint(self, cr, constraint_name, type, definition):
"""
Record the creation of a constraint for this model, to make it possible
to delete it later when the module is uninstalled. Type can be either
return
assert type in ('f', 'u')
cr.execute("""
- SELECT 1 FROM ir_model_constraint, ir_module_module
+ SELECT type, definition FROM ir_model_constraint, ir_module_module
WHERE ir_model_constraint.module=ir_module_module.id
AND ir_model_constraint.name=%s
AND ir_module_module.name=%s
""", (constraint_name, self._module))
- if not cr.rowcount:
+ constraints = cr.dictfetchone()
+ if not constraints:
cr.execute("""
INSERT INTO ir_model_constraint
- (name, date_init, date_update, module, model, type)
+ (name, date_init, date_update, module, model, type, definition)
VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
(SELECT id FROM ir_module_module WHERE name=%s),
- (SELECT id FROM ir_model WHERE model=%s), %s)""",
- (constraint_name, self._module, self._name, type))
+ (SELECT id FROM ir_model WHERE model=%s), %s, %s)""",
+ (constraint_name, self._module, self._name, type, definition))
+ elif constraints['type'] != type or (definition and constraints['definition'] != definition):
+ cr.execute("""
+ UPDATE ir_model_constraint
+ SET date_update=now() AT TIME ZONE 'UTC', type=%s, definition=%s
+ WHERE name=%s AND module = (SELECT id FROM ir_module_module WHERE name=%s)""",
+ (type, definition, constraint_name, self._module))
def _save_relation_table(self, cr, relation_table):
"""
def _set_default_value_on_column(self, cr, column_name, context=None):
- # ideally should use add_default_value but fails
- # due to ir.values not being ready
+ # ideally, we should use default_get(), but it fails due to ir.values
+ # not being ready
- # get old-style default
+ # get default value
default = self._defaults.get(column_name)
if callable(default):
default = default(self, cr, SUPERUSER_ID, context)
- # get new_style default if no old-style
- if default is None:
- record = self.new(cr, SUPERUSER_ID, context=context)
- field = self._fields[column_name]
- field.determine_default(record)
- defaults = dict(record._cache)
- if column_name in defaults:
- default = field.convert_to_write(defaults[column_name])
-
column = self._columns[column_name]
ss = column._symbol_set
db_default = ss[1](default)
self._create_table(cr)
has_rows = False
else:
- cr.execute('SELECT COUNT(1) FROM "%s"' % (self._table,))
- has_rows = cr.fetchone()[0]
+ cr.execute('SELECT 1 FROM "%s" LIMIT 1' % self._table)
+ has_rows = cr.rowcount
cr.commit()
if self._parent_store:
dest_model = self.pool[f._obj]
ref = dest_model._table
# ir_actions is inherited so foreign key doesn't work on it
- if ref != 'ir_actions':
+ if dest_model._auto and ref != 'ir_actions':
self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete)
if f.select:
cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
""" Create the foreign keys recorded by _auto_init. """
for t, k, r, d in self._foreign_keys:
cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
- self._save_constraint(cr, "%s_%s_fkey" % (t, k), 'f')
+ self._save_constraint(cr, "%s_%s_fkey" % (t, k), 'f', False)
cr.commit()
del self._foreign_keys
for (key, con, _) in self._sql_constraints:
conname = '%s_%s' % (self._table, key)
- self._save_constraint(cr, conname, 'u')
- cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
- existing_constraints = cr.dictfetchall()
+ # using 1 to get result if no imc but one pgc
+ cr.execute("""SELECT definition, 1
+ FROM ir_model_constraint imc
+ RIGHT JOIN pg_constraint pgc
+ ON (pgc.conname = imc.name)
+ WHERE pgc.conname=%s
+ """, (conname, ))
+ existing_constraints = cr.dictfetchone()
sql_actions = {
'drop': {
'execute': False,
# constraint does not exists:
sql_actions['add']['execute'] = True
sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
- elif unify_cons_text(con) not in [unify_cons_text(item['condef']) for item in existing_constraints]:
+ elif unify_cons_text(con) != existing_constraints['definition']:
# constraint exists but its definition has changed:
sql_actions['drop']['execute'] = True
- sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
+ sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints['definition'] or '', )
sql_actions['add']['execute'] = True
sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
# we need to add the constraint:
+ self._save_constraint(cr, conname, 'u', unify_cons_text(con))
sql_actions = [item for item in sql_actions.values()]
sql_actions.sort(key=lambda x: x['order'])
for sql_action in [action for action in sql_actions if action['execute']]:
#
@classmethod
- def _inherits_reload_src(cls):
- """ Recompute the _inherit_fields mapping on each _inherits'd child model."""
- for model in cls.pool.values():
- if cls._name in model._inherits:
- model._inherits_reload()
-
- @classmethod
def _inherits_reload(cls):
- """ Recompute the _inherit_fields mapping.
-
- This will also call itself on each inherits'd child model.
+ """ Recompute the _inherit_fields mapping, and inherited fields. """
+ struct = {}
+ fields = {}
+ for parent_model, parent_field in cls._inherits.iteritems():
+ parent = cls.pool[parent_model]
+ # old-api struct for _inherit_fields
+ for name, column in parent._columns.iteritems():
+ struct[name] = (parent_model, parent_field, column, parent_model)
+ for name, source in parent._inherit_fields.iteritems():
+ struct[name] = (parent_model, parent_field, source[2], source[3])
+ # new-api fields for _fields
+ for name, field in parent._fields.iteritems():
+ fields[name] = field.new(
+ inherited=True,
+ related=(parent_field, name),
+ related_sudo=False,
+ )
- """
- res = {}
- for table in cls._inherits:
- other = cls.pool[table]
- for col in other._columns.keys():
- res[col] = (table, cls._inherits[table], other._columns[col], table)
- for col in other._inherit_fields.keys():
- res[col] = (table, cls._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
- cls._inherit_fields = res
+ # old-api stuff
+ cls._inherit_fields = struct
cls._all_columns = cls._get_column_infos()
- # interface columns with new-style fields
- for attr, column in cls._columns.items():
- if attr not in cls._fields:
- cls._add_field(attr, column.to_field())
-
- # interface inherited fields with new-style fields (note that the
- # reverse order is for being consistent with _all_columns above)
- for parent_model, parent_field in reversed(cls._inherits.items()):
- for attr, field in cls.pool[parent_model]._fields.iteritems():
- if attr not in cls._fields:
- new_field = field.copy(related=(parent_field, attr), _origin=field)
- cls._add_field(attr, new_field)
-
- cls._inherits_reload_src()
+ # add inherited fields that are not redefined locally
+ for name, field in fields.iteritems():
+ if name not in cls._fields:
+ cls._add_field(name, field)
@classmethod
def _get_column_infos(cls):
@api.model
def _prepare_setup_fields(self):
""" Prepare the setup of fields once the models have been loaded. """
- for field in self._fields.itervalues():
- field.reset()
+ type(self)._setup_done = False
+ for name, field in self._fields.items():
+ if field.inherited:
+ del self._fields[name]
+ else:
+ field.reset()
@api.model
- def _setup_fields(self, partial=False):
+ def _setup_fields(self):
""" Setup the fields (dependency triggers, etc). """
- for field in self._fields.itervalues():
- if partial and field.manual and \
- field.relational and field.comodel_name not in self.pool:
- # do not set up manual fields that refer to unknown models
- continue
+ cls = type(self)
+ if cls._setup_done:
+ return
+ cls._setup_done = True
+
+ # first make sure that parent models are all set up
+ for parent in self._inherits:
+ self.env[parent]._setup_fields()
+
+ # retrieve custom fields
+ if not self._context.get('_setup_fields_partial'):
+ cls._init_manual_fields(self._cr)
+
+ # retrieve inherited fields
+ cls._inherits_check()
+ cls._inherits_reload()
+
+ # set up fields
+ for field in cls._fields.itervalues():
field.setup(self.env)
+ # update columns (fields may have changed)
+ for name, field in cls._fields.iteritems():
+ if field.column:
+ cls._columns[name] = field.to_column()
+
# group fields by compute to determine field.computed_fields
fields_by_compute = defaultdict(list)
- for field in self._fields.itervalues():
+ for field in cls._fields.itervalues():
if field.compute:
field.computed_fields = fields_by_compute[field.compute]
field.computed_fields.append(field)
else:
field.computed_fields = []
- def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
- """ Return the definition of each field.
+ # check constraints
+ for func in cls._constraint_methods:
+ if not all(name in cls._fields for name in func._constrains):
+ _logger.warning("@constrains%r parameters must be field names", func._constrains)
+ for name in cls._onchange_methods:
+ if name not in cls._fields:
+ func = cls._onchange_methods[name]
+ _logger.warning("@onchange%r parameters must be field names", func._onchange)
+
+ # check defaults
+ for name in cls._defaults:
+ assert name in cls._fields, \
+ "Model %s has a default for nonexiting field %s" % (cls._name, name)
+
+ # validate rec_name
+ if cls._rec_name:
+ assert cls._rec_name in cls._fields, \
+ "Invalid rec_name %s for model %s" % (cls._rec_name, cls._name)
+ elif 'name' in cls._fields:
+ cls._rec_name = 'name'
+ elif 'x_name' in cls._fields:
+ cls._rec_name = 'x_name'
+
+ def fields_get(self, cr, user, allfields=None, context=None, write_access=True, attributes=None):
+ """ fields_get([fields][, attributes])
+
+ Return the definition of each field.
The returned value is a dictionary (indiced by field name) of
dictionaries. The _inherits'd fields are included. The string, help,
and selection (if present) attributes are translated.
- :param cr: database cursor
- :param user: current user id
- :param allfields: list of fields
- :param context: context arguments, like lang, time zone
- :return: dictionary of field dictionaries, each one describing a field of the business object
- :raise AccessError: * if user has no create/write rights on the requested object
-
+ :param allfields: list of fields to document, all if empty or not provided
+ :param attributes: list of description attributes to return for each field, all if empty or not provided
"""
recs = self.browse(cr, user, [], context)
+ has_access = functools.partial(recs.check_access_rights, raise_exception=False)
+ readonly = not (has_access('write') or has_access('create'))
+
res = {}
for fname, field in self._fields.iteritems():
if allfields and fname not in allfields:
continue
+ if not field.setup_done:
+ continue
if field.groups and not recs.user_has_groups(field.groups):
continue
- res[fname] = field.get_description(recs.env)
- # if user cannot create or modify records, make all fields readonly
- has_access = functools.partial(recs.check_access_rights, raise_exception=False)
- if not (has_access('write') or has_access('create')):
- for description in res.itervalues():
+ description = field.get_description(recs.env)
+ if readonly:
description['readonly'] = True
description['states'] = {}
+ if attributes:
+ description = {k: v for k, v in description.iteritems()
+ if k in attributes}
+ res[fname] = description
return res
return fields
- # new-style implementation of read(); old-style is defined below
+ # add explicit old-style implementation to read()
+ @api.v7
+ def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
+ records = self.browse(cr, user, ids, context)
+ result = BaseModel.read(records, fields, load=load)
+ return result if isinstance(ids, list) else (bool(result) and result[0])
+
+ # new-style implementation of read()
@api.v8
def read(self, fields=None, load='_classic_read'):
- """ Read the given fields for the records in `self`.
-
- :param fields: optional list of field names to return (default is
- all fields)
- :param load: deprecated, this argument is ignored
- :return: a list of dictionaries mapping field names to their values,
- with one dictionary per record
- :raise AccessError: if user has no read rights on some of the given
- records
+ """ read([fields])
+
+ Reads the requested fields for the records in `self`, low-level/RPC
+ method. In Python code, prefer :meth:`~.browse`.
+
+ :param fields: list of field names to return (default is all fields)
+ :return: a list of dictionaries mapping field names to their values,
+ with one dictionary per record
+ :raise AccessError: if user has no read rights on some of the given
+ records
"""
# check access rights
self.check_access_rights('read')
return result
- # add explicit old-style implementation to read()
- @api.v7
- def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
- records = self.browse(cr, user, ids, context)
- result = BaseModel.read(records, fields, load=load)
- return result if isinstance(ids, list) else (bool(result) and result[0])
-
@api.multi
def _prefetch_field(self, field):
""" Read from the database in order to fetch `field` (:class:`Field`
instance) for `self` in cache.
"""
# fetch the records of this model without field_name in their cache
- records = self
+ records = self._in_cache_without(field)
- # by default, simply fetch field
- fnames = {field.name}
+ if len(records) > PREFETCH_MAX:
+ records = records[:PREFETCH_MAX] | self
- if self.env.in_draft:
- # we may be doing an onchange, do not prefetch other fields
- pass
- elif field in self.env.todo:
- # field must be recomputed, do not prefetch records to recompute
- records -= self.env.todo[field]
- elif self._columns[field.name]._prefetch:
- # here we can optimize: prefetch all classic and many2one fields
- fnames = set(fname
+ # determine which fields can be prefetched
+ if not self.env.in_draft and \
+ self._context.get('prefetch_fields', True) and \
+ self._columns[field.name]._prefetch:
+ # prefetch all classic and many2one fields that the user can access
+ fnames = {fname
for fname, fcolumn in self._columns.iteritems()
- if fcolumn._prefetch)
+ if fcolumn._prefetch
+ if not fcolumn.groups or self.user_has_groups(fcolumn.groups)
+ }
+ else:
+ fnames = {field.name}
+
+ # important: never prefetch fields to recompute!
+ get_recs_todo = self.env.field_todo
+ for fname in list(fnames):
+ if get_recs_todo(self._fields[fname]):
+ if fname == field.name:
+ records -= get_recs_todo(field)
+ else:
+ fnames.discard(fname)
# fetch records with read()
assert self in records and field.name in fnames
pass
# check the cache, and update it if necessary
- if field not in self._cache:
+ if not self._cache.contains(field):
for values in result:
record = self.browse(values.pop('id'))
record._cache.update(record._convert_to_cache(values, validate=False))
- if field not in self._cache:
+ if not self._cache.contains(field):
e = AccessError("No value found for %s.%s" % (self, field.name))
self._cache[field] = FailedValue(e)
env = self.env
cr, user, context = env.args
+ # FIXME: The query construction needs to be rewritten using the internal Query
+ # object, as in search(), to avoid ambiguous column references when
+ # reading/sorting on a table that is auto_joined to another table with
+ # common columns (e.g. the magical columns)
+
# Construct a clause for the security rules.
# 'tables' holds the list of tables necessary for the SELECT, including
# the ir.rule clauses, and contains at least self._table.
'order': self._parent_order or self._order,
}
- empty = self.browse()
- prefetch = set()
- todo = set()
- for field in (self._fields[name] for name in field_names):
- prefetch.update(self._in_cache_without(field).ids)
- todo.update(self.env.todo.get(field, empty).ids)
- records = self.browse(prefetch - todo | set(self.ids))
-
result = []
- for sub_ids in cr.split_for_in_conditions(records.ids):
+ for sub_ids in cr.split_for_in_conditions(self.ids):
cr.execute(query, [tuple(sub_ids)] + rule_params)
result.extend(cr.dictfetchall())
# store failed values in cache for the records that could not be read
fetched = self.browse(ids)
- missing = records - fetched
+ missing = self - fetched
if missing:
- extras = fetched - records
+ extras = fetched - self
if extras:
raise AccessError(
_("Database fetch misses ids ({}) and has extra ids ({}), may be caused by a type incoherence in a previous request").format(
return True
def unlink(self, cr, uid, ids, context=None):
- """
- Delete records with given ids
+ """ unlink()
+
+ Deletes the records of the current set
- :param cr: database cursor
- :param uid: current user id
- :param ids: id or list of ids
- :param context: (optional) context arguments, like lang, time zone
- :return: True
:raise AccessError: * if user has no unlink rights on the requested object
* if user tries to bypass access rules for unlink on the requested object
:raise UserError: if the record is default property for other records
if isinstance(ids, (int, long)):
ids = [ids]
- result_store = self._store_get_values(cr, uid, ids, self._all_columns.keys(), context)
+ result_store = self._store_get_values(cr, uid, ids, self._fields.keys(), context)
# for recomputing new-style fields
recs = self.browse(cr, uid, ids, context)
self.check_access_rule(cr, uid, ids, 'unlink', context=context)
pool_model_data = self.pool.get('ir.model.data')
ir_values_obj = self.pool.get('ir.values')
+ ir_attachment_obj = self.pool.get('ir.attachment')
for sub_ids in cr.split_for_in_conditions(ids):
cr.execute('delete from ' + self._table + ' ' \
'where id IN %s', (sub_ids,))
if ir_value_ids:
ir_values_obj.unlink(cr, uid, ir_value_ids, context=context)
+ # For the same reason, removing the record relevant to ir_attachment
+ # The search is performed with sql as the search method of ir_attachment is overridden to hide attachments of deleted records
+ cr.execute('select id from ir_attachment where res_model = %s and res_id in %s', (self._name, sub_ids))
+ ir_attachment_ids = [ir_attachment[0] for ir_attachment in cr.fetchall()]
+ if ir_attachment_ids:
+ ir_attachment_obj.unlink(cr, uid, ir_attachment_ids, context=context)
+
# invalidate the *whole* cache, since the orm does not handle all
# changes made in the database, like cascading delete!
recs.invalidate_cache()
#
@api.multi
def write(self, vals):
- """
- Update records in `self` with the given field values.
-
- :param vals: field values to update, e.g {'field_name': new_field_value, ...}
- :type vals: dictionary
- :return: True
- :raise AccessError: * if user has no write rights on the requested object
- * if user tries to bypass access rules for write on the requested object
- :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
- :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
-
- **Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
-
- + For a many2many field, a list of tuples is expected.
- Here is the list of tuple that are accepted, with the corresponding semantics ::
-
- (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
- (1, ID, { values }) update the linked record with id = ID (write *values* on it)
- (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
- (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
- (4, ID) link to existing record with id = ID (adds a relationship)
- (5) unlink all (like using (3,ID) for all linked records)
- (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
+ """ write(vals)
- Example:
- [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
+ Updates all records in the current set with the provided values.
- + For a one2many field, a lits of tuples is expected.
- Here is the list of tuple that are accepted, with the corresponding semantics ::
+ :param dict vals: fields to update and the value to set on them e.g::
- (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
- (1, ID, { values }) update the linked record with id = ID (write *values* on it)
- (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
+ {'foo': 1, 'bar': "Qux"}
- Example:
- [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
+ will set the field ``foo`` to ``1`` and the field ``bar`` to
+ ``"Qux"`` if those are valid (otherwise it will trigger an error).
- + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
- + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
+ :raise AccessError: * if user has no write rights on the requested object
+ * if user tries to bypass access rules for write on the requested object
+ :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
+ :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
+ * For numeric fields (:class:`~openerp.fields.Integer`,
+ :class:`~openerp.fields.Float`) the value should be of the
+ corresponding type
+ * For :class:`~openerp.fields.Boolean`, the value should be a
+ :class:`python:bool`
+ * For :class:`~openerp.fields.Selection`, the value should match the
+ selection values (generally :class:`python:str`, sometimes
+ :class:`python:int`)
+ * For :class:`~openerp.fields.Many2one`, the value should be the
+ database identifier of the record to set
+ * Other non-relational fields use a string for value
+
+ .. danger::
+
+ for historical and compatibility reasons,
+ :class:`~openerp.fields.Date` and
+ :class:`~openerp.fields.Datetime` fields use strings as values
+ (written and read) rather than :class:`~python:datetime.date` or
+ :class:`~python:datetime.datetime`. These date strings are
+ UTC-only and formatted according to
+ :const:`openerp.tools.misc.DEFAULT_SERVER_DATE_FORMAT` and
+ :const:`openerp.tools.misc.DEFAULT_SERVER_DATETIME_FORMAT`
+ * .. _openerp/models/relationals/format:
+
+ :class:`~openerp.fields.One2many` and
+ :class:`~openerp.fields.Many2many` use a special "commands" format to
+ manipulate the set of records stored in/associated with the field.
+
+ This format is a list of triplets executed sequentially, where each
+ triplet is a command to execute on the set of records. Not all
+ commands apply in all situations. Possible commands are:
+
+ ``(0, _, values)``
+ adds a new record created from the provided ``value`` dict.
+ ``(1, id, values)``
+ updates an existing record of id ``id`` with the values in
+ ``values``. Can not be used in :meth:`~.create`.
+ ``(2, id, _)``
+ removes the record of id ``id`` from the set, then deletes it
+ (from the database). Can not be used in :meth:`~.create`.
+ ``(3, id, _)``
+ removes the record of id ``id`` from the set, but does not
+ delete it. Can not be used on
+ :class:`~openerp.fields.One2many`. Can not be used in
+ :meth:`~.create`.
+ ``(4, id, _)``
+ adds an existing record of id ``id`` to the set. Can not be
+ used on :class:`~openerp.fields.One2many`.
+ ``(5, _, _)``
+ removes all records from the set, equivalent to using the
+ command ``3`` on every record explicitly. Can not be used on
+ :class:`~openerp.fields.One2many`. Can not be used in
+ :meth:`~.create`.
+ ``(6, _, ids)``
+ replaces all existing records in the set by the ``ids`` list,
+ equivalent to using the command ``5`` followed by a command
+ ``4`` for each ``id`` in ``ids``. Can not be used on
+ :class:`~openerp.fields.One2many`.
+
+ .. note:: Values marked as ``_`` in the list above are ignored and
+ can be anything, generally ``0`` or ``False``.
"""
if not self:
return True
# split up fields into old-style and pure new-style ones
old_vals, new_vals, unknown = {}, {}, []
for key, val in vals.iteritems():
- if key in self._columns:
- old_vals[key] = val
- elif key in self._fields:
- new_vals[key] = val
+ field = self._fields.get(key)
+ if field:
+ if field.column or field.inherited:
+ old_vals[key] = val
+ if field.inverse and not field.inherited:
+ new_vals[key] = val
else:
unknown.append(key)
readonly = None
self.check_field_access_rights(cr, user, 'write', vals.keys())
+ deleted_related = defaultdict(list)
for field in vals.keys():
fobj = None
if field in self._columns:
fobj = self._inherit_fields[field][2]
if not fobj:
continue
+ if fobj._type in ['one2many', 'many2many'] and vals[field]:
+ for wtuple in vals[field]:
+ if isinstance(wtuple, (tuple, list)) and wtuple[0] == 2:
+ deleted_related[fobj._obj].append(wtuple[1])
groups = fobj.write
if groups:
cr.execute(query, (tuple(ids),))
parents_changed = map(operator.itemgetter(0), cr.fetchall())
- upd0 = []
- upd1 = []
+ updates = [] # list of (column, expr) or (column, pattern, value)
upd_todo = []
updend = []
direct = []
totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
for field in vals:
- field_column = self._all_columns.get(field) and self._all_columns.get(field).column
- if field_column and field_column.deprecated:
- _logger.warning('Field %s.%s is deprecated: %s', self._name, field, field_column.deprecated)
+ ffield = self._fields.get(field)
+ if ffield and ffield.deprecated:
+ _logger.warning('Field %s.%s is deprecated: %s', self._name, field, ffield.deprecated)
if field in self._columns:
- if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
- if (not totranslate) or not self._columns[field].translate:
- upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
- upd1.append(self._columns[field]._symbol_set[1](vals[field]))
+ column = self._columns[field]
+ if hasattr(column, 'selection') and vals[field]:
+ self._check_selection_field_value(cr, user, field, vals[field], context=context)
+ if column._classic_write and not hasattr(column, '_fnct_inv'):
+ if (not totranslate) or not column.translate:
+ updates.append((field, '%s', column._symbol_set[1](vals[field])))
direct.append(field)
else:
upd_todo.append(field)
else:
updend.append(field)
- if field in self._columns \
- and hasattr(self._columns[field], 'selection') \
- and vals[field]:
- self._check_selection_field_value(cr, user, field, vals[field], context=context)
if self._log_access:
- upd0.append('write_uid=%s')
- upd0.append("write_date=(now() at time zone 'UTC')")
- upd1.append(user)
+ updates.append(('write_uid', '%s', user))
+ updates.append(('write_date', "(now() at time zone 'UTC')"))
+ direct.append('write_uid')
+ direct.append('write_date')
- if len(upd0):
+ if updates:
self.check_access_rule(cr, user, ids, 'write', context=context)
+ query = 'UPDATE "%s" SET %s WHERE id IN %%s' % (
+ self._table, ','.join('"%s"=%s' % u[:2] for u in updates),
+ )
+ params = tuple(u[2] for u in updates if len(u) > 2)
for sub_ids in cr.split_for_in_conditions(ids):
- cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
- 'where id IN %s', upd1 + [sub_ids])
+ cr.execute(query, params + (sub_ids,))
if cr.rowcount != len(sub_ids):
raise MissingError(_('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
self.write(cr, user, ids, {f: vals[f]}, context=context_wo_lang)
self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
+ # invalidate and mark new-style fields to recompute; do this before
+ # setting other fields, because it can require the value of computed
+ # fields, e.g., a one2many checking constraints on records
+ recs.modified(direct)
+
# call the 'set' method of fields which are not classic_write
upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
for id in ids:
result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
+ # for recomputing new-style fields
+ recs.modified(upd_todo)
+
unknown_fields = updend[:]
for table in self._inherits:
col = self._inherits[table]
result += self._store_get_values(cr, user, ids, vals.keys(), context)
result.sort()
- # for recomputing new-style fields
- recs.modified(modified_fields)
-
done = {}
for order, model_name, ids_to_update, fields_to_recompute in result:
key = (model_name, tuple(fields_to_recompute))
for id in ids_to_update:
if id not in done[key]:
done[key][id] = True
- todo.append(id)
+ if id not in deleted_related[model_name]:
+ todo.append(id)
self.pool[model_name]._store_set_values(cr, user, todo, fields_to_recompute, context)
# recompute new-style fields
@api.model
@api.returns('self', lambda value: value.id)
def create(self, vals):
- """ Create a new record for the model.
-
- The values for the new record are initialized using the dictionary
- `vals`, and if necessary the result of :meth:`default_get`.
-
- :param vals: field values like ``{'field_name': field_value, ...}``,
- see :meth:`write` for details about the values format
- :return: new record created
- :raise AccessError: * if user has no create rights on the requested object
- * if user tries to bypass access rules for create on the requested object
- :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
- :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
+ """ create(vals) -> record
+
+ Creates a new record for the model.
+
+ The new record is initialized using the values from ``vals`` and
+ if necessary those from :meth:`~.default_get`.
+
+ :param dict vals:
+ values for the model's fields, as a dictionary::
+
+ {'field_name': field_value, ...}
+
+ see :meth:`~.write` for details
+ :return: new record created
+ :raise AccessError: * if user has no create rights on the requested object
+ * if user tries to bypass access rules for create on the requested object
+ :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
+ :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
"""
self.check_access_rights('create')
# split up fields into old-style and pure new-style ones
old_vals, new_vals, unknown = {}, {}, []
for key, val in vals.iteritems():
- if key in self._all_columns:
- old_vals[key] = val
- elif key in self._fields:
- new_vals[key] = val
+ field = self._fields.get(key)
+ if field:
+ if field.column or field.inherited:
+ old_vals[key] = val
+ if field.inverse and not field.inherited:
+ new_vals[key] = val
else:
unknown.append(key)
record_id = tocreate[table].pop('id', None)
- # When linking/creating parent records, force context without 'no_store_function' key that
- # defers stored functions computing, as these won't be computed in batch at the end of create().
- parent_context = dict(context)
- parent_context.pop('no_store_function', None)
-
if record_id is None or not record_id:
- record_id = self.pool[table].create(cr, user, tocreate[table], context=parent_context)
+ record_id = self.pool[table].create(cr, user, tocreate[table], context=context)
else:
- self.pool[table].write(cr, user, [record_id], tocreate[table], context=parent_context)
+ self.pool[table].write(cr, user, [record_id], tocreate[table], context=context)
updates.append((self._inherits[table], '%s', record_id))
id_new, = cr.fetchone()
recs = self.browse(cr, user, id_new, context)
- upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
if self._parent_store and not context.get('defer_parent_store_computation'):
if self.pool._init:
cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
recs.invalidate_cache(['parent_left', 'parent_right'])
+ # invalidate and mark new-style fields to recompute; do this before
+ # setting other fields, because it can require the value of computed
+ # fields, e.g., a one2many checking constraints on records
+ recs.modified([u[0] for u in updates])
+
+ # call the 'set' method of fields which are not classic_write
+ upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
+
# default element in context must be remove when call a one2many or many2many
rel_context = context.copy()
for c in context.items():
for field in upd_todo:
result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
+ # for recomputing new-style fields
+ recs.modified(upd_todo)
+
# check Python constraints
recs._validate_fields(vals)
- if not context.get('no_store_function', False):
+ if context.get('recompute', True):
result += self._store_get_values(cr, user, [id_new],
list(set(vals.keys() + self._inherits.values())),
context)
if not (model_name, ids, fields2) in done:
self.pool[model_name]._store_set_values(cr, user, ids, fields2, context)
done.append((model_name, ids, fields2))
-
# recompute new-style fields
- modified_fields = list(vals)
- if self._log_access:
- modified_fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
- recs.modified(modified_fields)
recs.recompute()
- if self._log_create and not (context and context.get('no_store_function', False)):
+ if self._log_create and context.get('recompute', True):
message = self._description + \
" '" + \
self.name_get(cr, user, [id_new], context=context)[0][1] + \
for f in value.keys():
if f in field_dict[id]:
value.pop(f)
- upd0 = []
- upd1 = []
+ updates = [] # list of (column, pattern, value)
for v in value:
if v not in val:
continue
- if self._columns[v]._type == 'many2one':
+ column = self._columns[v]
+ if column._type == 'many2one':
try:
value[v] = value[v][0]
except:
pass
- upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
- upd1.append(self._columns[v]._symbol_set[1](value[v]))
- upd1.append(id)
- if upd0 and upd1:
- cr.execute('update "' + self._table + '" set ' + \
- ','.join(upd0) + ' where id = %s', upd1)
+ updates.append((v, '%s', column._symbol_set[1](value[v])))
+ if updates:
+ query = 'UPDATE "%s" SET %s WHERE id = %%s' % (
+ self._table, ','.join('"%s"=%s' % u[:2] for u in updates),
+ )
+ params = tuple(u[2] for u in updates)
+ cr.execute(query, params + (id,))
else:
for f in val:
+ column = self._columns[f]
# use admin user for accessing objects having rules defined on store fields
- result = self._columns[f].get(cr, self, ids, f, SUPERUSER_ID, context=context)
+ result = column.get(cr, self, ids, f, SUPERUSER_ID, context=context)
for r in result.keys():
if field_flag:
if r in field_dict.keys():
if f in field_dict[r]:
result.pop(r)
for id, value in result.items():
- if self._columns[f]._type == 'many2one':
+ if column._type == 'many2one':
try:
value = value[0]
except:
pass
- cr.execute('update "' + self._table + '" set ' + \
- '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
+ query = 'UPDATE "%s" SET "%s"=%%s WHERE id = %%s' % (
+ self._table, f,
+ )
+ cr.execute(query, (column._symbol_set[1](value), id))
- # invalidate the cache for the modified fields
+ # invalidate and mark new-style fields to recompute
self.browse(cr, uid, ids, context).modified(fields)
return True
domain = domain[:]
# if the object has a field named 'active', filter out all inactive
# records unless they were explicitely asked for
- if 'active' in self._all_columns and (active_test and context.get('active_test', True)):
+ if 'active' in self._fields and active_test and context.get('active_test', True):
if domain:
# the item[0] trick below works for domain items and '&'/'|'/'!'
# operators too
order_split = order_part.strip().split(' ')
order_field = order_split[0].strip()
order_direction = order_split[1].strip() if len(order_split) == 2 else ''
+ order_column = None
inner_clause = None
if order_field == 'id':
order_by_elements.append('"%s"."%s" %s' % (self._table, order_field, order_direction))
continue # ignore non-readable or "non-joinable" fields
else:
raise ValueError( _("Sorting field %s not found on model %s") %( order_field, self._name))
+ if order_column and order_column._type == 'boolean':
+ inner_clause = "COALESCE(%s, false)" % inner_clause
if inner_clause:
if isinstance(inner_clause, list):
for clause in inner_clause:
# build a black list of fields that should not be copied
blacklist = set(MAGIC_COLUMNS + ['parent_left', 'parent_right'])
+ whitelist = set(name for name, field in self._fields.iteritems() if not field.inherited)
+
def blacklist_given_fields(obj):
# blacklist the fields that are given by inheritance
for other, field_to_other in obj._inherits.items():
if field_to_other in default:
# all the fields of 'other' are given by the record: default[field_to_other],
# except the ones redefined in self
- blacklist.update(set(self.pool[other]._all_columns) - set(self._columns))
+ blacklist.update(set(self.pool[other]._fields) - whitelist)
else:
blacklist_given_fields(self.pool[other])
# blacklist deprecated fields
- for name, field in obj._columns.items():
+ for name, field in obj._fields.iteritems():
if field.deprecated:
blacklist.add(name)
blacklist_given_fields(self)
- fields_to_copy = dict((f,fi) for f, fi in self._all_columns.iteritems()
- if fi.column.copy
+ fields_to_copy = dict((f,fi) for f, fi in self._fields.iteritems()
+ if fi.copy
if f not in default
if f not in blacklist)
raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
res = dict(default)
- for f, colinfo in fields_to_copy.iteritems():
- field = colinfo.column
- if field._type == 'many2one':
+ for f, field in fields_to_copy.iteritems():
+ if field.type == 'many2one':
res[f] = data[f] and data[f][0]
- elif field._type == 'one2many':
- other = self.pool[field._obj]
+ elif field.type == 'one2many':
+ other = self.pool[field.comodel_name]
# duplicate following the order of the ids because we'll rely on
# it later for copying translations in copy_translation()!
lines = [other.copy_data(cr, uid, line_id, context=context) for line_id in sorted(data[f])]
# the lines are duplicated using the wrong (old) parent, but then
# are reassigned to the correct one thanks to the (0, 0, ...)
res[f] = [(0, 0, line) for line in lines if line]
- elif field._type == 'many2many':
+ elif field.type == 'many2many':
res[f] = [(6, 0, data[f])]
else:
res[f] = data[f]
seen_map[self._name].append(old_id)
trans_obj = self.pool.get('ir.translation')
- # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
- fields = self.fields_get(cr, uid, context=context)
- for field_name, field_def in fields.items():
+ for field_name, field in self._fields.iteritems():
+ if not field.copy:
+ continue
# removing the lang to compare untranslated values
context_wo_lang = dict(context, lang=None)
old_record, new_record = self.browse(cr, uid, [old_id, new_id], context=context_wo_lang)
# we must recursively copy the translations for o2o and o2m
- if field_def['type'] == 'one2many':
- target_obj = self.pool[field_def['relation']]
+ if field.type == 'one2many':
+ target_obj = self.pool[field.comodel_name]
# here we rely on the order of the ids to match the translations
# as foreseen in copy_data()
old_children = sorted(r.id for r in old_record[field_name])
for (old_child, new_child) in zip(old_children, new_children):
target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
# and for translatable fields we keep them for copy
- elif field_def.get('translate'):
+ elif getattr(field, 'translate', False):
if field_name in self._columns:
trans_name = self._name + "," + field_name
target_id = new_id
@api.returns('self', lambda value: value.id)
def copy(self, cr, uid, id, default=None, context=None):
- """
+ """ copy(default=None)
+
Duplicate record with given id updating it with default values
- :param cr: database cursor
- :param uid: current user id
- :param id: id of the record to copy
- :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
- :type default: dictionary
- :param context: context arguments, like lang, time zone
- :type context: dictionary
- :return: id of the newly created record
+ :param dict default: dictionary of field values to override in the
+ original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
+ :returns: new record
"""
if context is None:
@api.multi
@api.returns('self')
def exists(self):
- """ Return the subset of records in `self` that exist, and mark deleted
- records as such in cache. It can be used as a test on records::
+ """ exists() -> records
+
+ Returns the subset of records in `self` that exist, and marks deleted
+ records as such in cache. It can be used as a test on records::
- if record.exists():
- ...
+ if record.exists():
+ ...
- By convention, new records are returned as existing.
+ By convention, new records are returned as existing.
"""
- ids = filter(None, self._ids) # ids to check in database
+ ids, new_ids = [], []
+ for i in self._ids:
+ (ids if isinstance(i, (int, long)) else new_ids).append(i)
if not ids:
return self
query = """SELECT id FROM "%s" WHERE id IN %%s""" % self._table
- self._cr.execute(query, (ids,))
- ids = ([r[0] for r in self._cr.fetchall()] + # ids in database
- [id for id in self._ids if not id]) # new ids
- existing = self.browse(ids)
+ self._cr.execute(query, [tuple(ids)])
+ ids = [r[0] for r in self._cr.fetchall()]
+ existing = self.browse(ids + new_ids)
if len(existing) < len(self):
# mark missing records in cache with a failed value
exc = MissingError(_("Record does not exist or has been deleted."))
:return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
"""
- field = self._all_columns.get(field_name)
- field = field.column if field else None
- if not field or field._type != 'many2many' or field._obj != self._name:
+ field = self._fields.get(field_name)
+ if not (field and field.type == 'many2many' and
+ field.comodel_name == self._name and field.store):
# field must be a many2many on itself
raise ValueError('invalid field_name: %r' % (field_name,))
- query = 'SELECT distinct "%s" FROM "%s" WHERE "%s" IN %%s' % (field._id2, field._rel, field._id1)
+ query = 'SELECT distinct "%s" FROM "%s" WHERE "%s" IN %%s' % \
+ (field.column2, field.relation, field.column1)
ids_parent = ids[:]
while ids_parent:
ids_parent2 = []
result, record_ids = [], list(command[2])
# read the records and apply the updates
- other_model = self.pool[self._all_columns[field_name].column._obj]
+ other_model = self.pool[self._fields[field_name].comodel_name]
for record in other_model.read(cr, uid, record_ids, fields=fields, context=context):
record.update(updates.get(record['id'], {}))
result.append(record)
env.prefetch[cls._name].update(ids)
return records
+ @api.v7
+ def browse(self, cr, uid, arg=None, context=None):
+ ids = _normalize_ids(arg)
+ #assert all(isinstance(id, IdType) for id in ids), "Browsing invalid ids: %s" % ids
+ return self._browse(Environment(cr, uid, context or {}), ids)
+
@api.v8
def browse(self, arg=None):
- """ Return an instance corresponding to `arg` and attached to
- `self.env`; `arg` is either a record id, or a collection of record ids.
+ """ browse([ids]) -> records
+
+ Returns a recordset for the ids provided as parameter in the current
+ environment.
+
+ Can take no ids, a single id or a sequence of ids.
"""
ids = _normalize_ids(arg)
#assert all(isinstance(id, IdType) for id in ids), "Browsing invalid ids: %s" % ids
return self._browse(self.env, ids)
- @api.v7
- def browse(self, cr, uid, arg=None, context=None):
- ids = _normalize_ids(arg)
- #assert all(isinstance(id, IdType) for id in ids), "Browsing invalid ids: %s" % ids
- return self._browse(Environment(cr, uid, context or {}), ids)
-
#
# Internal properties, for manipulating the instance's implementation
#
@property
def ids(self):
- """ Return the list of non-false record ids of this instance. """
+ """ List of actual record ids in this recordset (ignores placeholder
+ ids for records to create)
+ """
return filter(None, list(self._ids))
# backward-compatibility with former browse records
#
def ensure_one(self):
- """ Return `self` if it is a singleton instance, otherwise raise an
- exception.
+ """ Verifies that the current recorset holds a single record. Raises
+ an exception otherwise.
"""
if len(self) == 1:
return self
raise except_orm("ValueError", "Expected singleton: %s" % self)
def with_env(self, env):
- """ Return an instance equivalent to `self` attached to `env`.
+ """ Returns a new version of this recordset attached to the provided
+ environment
+
+ :type env: :class:`~openerp.api.Environment`
"""
return self._browse(env, self._ids)
def sudo(self, user=SUPERUSER_ID):
- """ Return an instance equivalent to `self` attached to an environment
- based on `self.env` with the given `user`.
+ """ sudo([user=SUPERUSER])
+
+ Returns a new version of this recordset attached to the provided
+ user.
"""
return self.with_env(self.env(user=user))
def with_context(self, *args, **kwargs):
- """ Return an instance equivalent to `self` attached to an environment
- based on `self.env` with another context. The context is given by
- `self._context` or the positional argument if given, and modified by
- `kwargs`.
+ """ with_context([context][, **overrides]) -> records
+
+ Returns a new version of this recordset attached to an extended
+ context.
+
+ The extended context is either the provided ``context`` in which
+ ``overrides`` are merged or the *current* context in which
+ ``overrides`` are merged e.g.::
+
+ # current context is {'key1': True}
+ r2 = records.with_context({}, key2=True)
+ # -> r2._context is {'key2': True}
+ r2 = records.with_context(key2=True)
+ # -> r2._context is {'key1': True, 'key2': True}
"""
context = dict(args[0] if args else self._context, **kwargs)
return self.with_env(self.env(context=context))
def _convert_to_write(self, values):
""" Convert the `values` dictionary into the format of :meth:`write`. """
fields = self._fields
- return dict(
- (name, fields[name].convert_to_write(value))
- for name, value in values.iteritems()
- if name in self._fields
- )
+ result = {}
+ for name, value in values.iteritems():
+ if name in fields:
+ value = fields[name].convert_to_write(value)
+ if not isinstance(value, NewId):
+ result[name] = value
+ return result
#
# Record traversal and update
def _mapped_func(self, func):
""" Apply function `func` on all records in `self`, and return the
- result as a list or a recordset (if `func` return recordsets).
+ result as a list or a recordset (if `func` returns recordsets).
"""
- vals = [func(rec) for rec in self]
- val0 = vals[0] if vals else func(self)
- if isinstance(val0, BaseModel):
- return reduce(operator.or_, vals, val0)
- return vals
+ if self:
+ vals = [func(rec) for rec in self]
+ return reduce(operator.or_, vals) if isinstance(vals[0], BaseModel) else vals
+ else:
+ vals = func(self)
+ return vals if isinstance(vals, BaseModel) else []
def mapped(self, func):
""" Apply `func` on all records in `self`, and return the result as a
func = lambda rec: filter(None, rec.mapped(name))
return self.browse([rec.id for rec in self if func(rec)])
- def sorted(self, key=None):
- """ Return the recordset `self` ordered by `key` """
+ def sorted(self, key=None, reverse=False):
+ """ Return the recordset `self` ordered by `key`.
+
+ :param key: either a function of one argument that returns a
+ comparison key for each record, or ``None``, in which case
+ records are ordered according the default model's order
+
+ :param reverse: if ``True``, return the result in reverse order
+ """
if key is None:
- return self.search([('id', 'in', self.ids)])
+ recs = self.search([('id', 'in', self.ids)])
+ return self.browse(reversed(recs._ids)) if reverse else recs
else:
- return self.browse(map(int, sorted(self, key=key)))
+ return self.browse(map(int, sorted(self, key=key, reverse=reverse)))
def update(self, values):
""" Update record `self[0]` with `values`. """
#
# New records - represent records that do not exist in the database yet;
- # they are used to compute default values and perform onchanges.
+ # they are used to perform onchanges.
#
@api.model
def new(self, values={}):
- """ Return a new record instance attached to `self.env`, and
- initialized with the `values` dictionary. Such a record does not
- exist in the database.
+ """ new([values]) -> record
+
+ Return a new record instance attached to the current environment and
+ initialized with the provided ``value``. The record is *not* created
+ in database, it only exists in memory.
"""
record = self.browse([NewId()])
record._cache.update(record._convert_to_cache(values, update=True))
return record
#
- # Dirty flag, to mark records modified (in draft mode)
+ # Dirty flags, to mark record fields modified (in draft mode)
#
- @property
- def _dirty(self):
+ def _is_dirty(self):
""" Return whether any record in `self` is dirty. """
dirty = self.env.dirty
return any(record in dirty for record in self)
- @_dirty.setter
- def _dirty(self, value):
- """ Mark the records in `self` as dirty. """
- if value:
- map(self.env.dirty.add, self)
- else:
- map(self.env.dirty.discard, self)
+ def _get_dirty(self):
+ """ Return the list of field names for which `self` is dirty. """
+ dirty = self.env.dirty
+ return list(dirty.get(self, ()))
+
+ def _set_dirty(self, field_name):
+ """ Mark the records in `self` as dirty for the given `field_name`. """
+ dirty = self.env.dirty
+ for record in self:
+ dirty[record].add(field_name)
#
# "Dunder" methods
yield self._browse(self.env, (id,))
def __contains__(self, item):
- """ Test whether `item` is a subset of `self` or a field name. """
- if isinstance(item, BaseModel):
- if self._name == item._name:
- return set(item._ids) <= set(self._ids)
- raise except_orm("ValueError", "Mixing apples and oranges: %s in %s" % (item, self))
- if isinstance(item, basestring):
+ """ Test whether `item` (record or field name) is an element of `self`.
+ In the first case, the test is fully equivalent to::
+
+ any(item == record for record in self)
+ """
+ if isinstance(item, BaseModel) and self._name == item._name:
+ return len(item) == 1 and item.id in self._ids
+ elif isinstance(item, basestring):
return item in self._fields
- return item in self.ids
+ else:
+ raise except_orm("ValueError", "Mixing apples and oranges: %s in %s" % (item, self))
def __add__(self, other):
""" Return the concatenation of two recordsets. """
""" If `field` must be recomputed on some record in `self`, return the
corresponding records that must be recomputed.
"""
- for env in [self.env] + list(iter(self.env.all)):
- if env.todo.get(field) and env.todo[field] & self:
- return env.todo[field]
+ return self.env.check_todo(field, self)
def _recompute_todo(self, field):
""" Mark `field` to be recomputed. """
- todo = self.env.todo
- todo[field] = (todo.get(field) or self.browse()) | self
+ self.env.add_todo(field, self)
def _recompute_done(self, field):
- """ Mark `field` as being recomputed. """
- todo = self.env.todo
- if field in todo:
- recs = todo.pop(field) - self
- if recs:
- todo[field] = recs
+ """ Mark `field` as recomputed. """
+ self.env.remove_todo(field, self)
@api.model
def recompute(self):
""" Recompute stored function fields. The fields and records to
recompute have been determined by method :meth:`modified`.
"""
- for env in list(iter(self.env.all)):
- while env.todo:
- field, recs = next(env.todo.iteritems())
- # evaluate the fields to recompute, and save them to database
- for rec, rec1 in zip(recs, recs.with_context(recompute=False)):
- try:
- values = rec._convert_to_write({
- f.name: rec[f.name] for f in field.computed_fields
- })
- rec1._write(values)
- except MissingError:
- pass
- # mark the computed fields as done
- map(recs._recompute_done, field.computed_fields)
+ while self.env.has_todo():
+ field, recs = self.env.get_todo()
+ # evaluate the fields to recompute, and save them to database
+ for rec, rec1 in zip(recs, recs.with_context(recompute=False)):
+ try:
+ values = rec._convert_to_write({
+ f.name: rec[f.name] for f in field.computed_fields
+ })
+ rec1._write(values)
+ except MissingError:
+ pass
+ # mark the computed fields as done
+ map(recs._recompute_done, field.computed_fields)
#
# Generic onchange method
# dummy assignment: trigger invalidations on the record
for name in todo:
- record[name] = record[name]
+ value = record[name]
+ field = self._fields[name]
+ if not field_name and field.type == 'many2one' and field.delegate and not value:
+ # do not nullify all fields of parent record for new records
+ continue
+ record[name] = value
result = {'value': {}}
# determine which fields have been modified
for name, oldval in values.iteritems():
+ field = self._fields[name]
newval = record[name]
- if newval != oldval or getattr(newval, '_dirty', False):
- field = self._fields[name]
- result['value'][name] = field.convert_to_write(
- newval, record._origin, subfields[name],
- )
- todo.add(name)
+ if field.type in ('one2many', 'many2many'):
+ if newval != oldval or newval._is_dirty():
+ # put new value in result
+ result['value'][name] = field.convert_to_write(
+ newval, record._origin, subfields.get(name),
+ )
+ todo.add(name)
+ else:
+ # keep result: newval may have been dirty before
+ pass
+ else:
+ if newval != oldval:
+ # put new value in result
+ result['value'][name] = field.convert_to_write(
+ newval, record._origin, subfields.get(name),
+ )
+ todo.add(name)
+ else:
+ # clean up result to not return another value
+ result['value'].pop(name, None)
# At the moment, the client does not support updates on a *2many field
# while this one is modified by the user.
def __init__(self, records):
self._recs = records
+ def contains(self, field):
+ """ Return whether `records[0]` has a value for `field` in cache. """
+ if isinstance(field, basestring):
+ field = self._recs._fields[field]
+ return self._recs.id in self._recs.env.cache[field]
+
def __contains__(self, field):
""" Return whether `records[0]` has a regular value for `field` in cache. """
if isinstance(field, basestring):