1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
24 Object relational mapping to database (postgresql) module
25 * Hierarchical structure
26 * Constraints consistency, validations
27 * Object meta Data depends on its status
28 * Optimised processing by complex query (multiple actions at once)
29 * Default fields value
30 * Permissions optimisation
31 * Persistant object: DB postgresql
33 * Multi-level caching system
34 * 2 different inheritancies
36 - classicals (varchar, integer, boolean, ...)
37 - relations (one2many, many2one, many2many)
57 import dateutil.parser
59 from lxml import etree
63 import openerp.tools as tools
64 from openerp.tools.config import config
65 from openerp.tools.misc import CountingStream
66 from openerp.tools.safe_eval import safe_eval as eval
67 from openerp.tools.translate import _
68 from openerp import SUPERUSER_ID
69 from query import Query
71 _logger = logging.getLogger(__name__)
72 _schema = logging.getLogger(__name__ + '.schema')
74 # List of etree._Element subclasses that we choose to ignore when parsing XML.
75 from openerp.tools import SKIPPED_ELEMENT_TYPES
77 regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
78 regex_object_name = re.compile(r'^[a-z0-9_.]+$')
80 AUTOINIT_RECALCULATE_STORED_FIELDS = 1000
82 def transfer_field_to_modifiers(field, modifiers):
85 for attr in ('invisible', 'readonly', 'required'):
86 state_exceptions[attr] = []
87 default_values[attr] = bool(field.get(attr))
88 for state, modifs in (field.get("states",{})).items():
90 if default_values[modif[0]] != modif[1]:
91 state_exceptions[modif[0]].append(state)
93 for attr, default_value in default_values.items():
94 if state_exceptions[attr]:
95 modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])]
97 modifiers[attr] = default_value
100 # Don't deal with groups, it is done by check_group().
101 # Need the context to evaluate the invisible attribute on tree views.
102 # For non-tree views, the context shouldn't be given.
103 def transfer_node_to_modifiers(node, modifiers, context=None, in_tree_view=False):
104 if node.get('attrs'):
105 modifiers.update(eval(node.get('attrs')))
107 if node.get('states'):
108 if 'invisible' in modifiers and isinstance(modifiers['invisible'], list):
109 # TODO combine with AND or OR, use implicit AND for now.
110 modifiers['invisible'].append(('state', 'not in', node.get('states').split(',')))
112 modifiers['invisible'] = [('state', 'not in', node.get('states').split(','))]
114 for a in ('invisible', 'readonly', 'required'):
116 v = bool(eval(node.get(a), {'context': context or {}}))
117 if in_tree_view and a == 'invisible':
118 # Invisible in a tree view has a specific meaning, make it a
119 # new key in the modifiers attribute.
120 modifiers['tree_invisible'] = v
121 elif v or (a not in modifiers or not isinstance(modifiers[a], list)):
122 # Don't set the attribute to False if a dynamic value was
123 # provided (i.e. a domain from attrs or states).
127 def simplify_modifiers(modifiers):
128 for a in ('invisible', 'readonly', 'required'):
129 if a in modifiers and not modifiers[a]:
133 def transfer_modifiers_to_node(modifiers, node):
135 simplify_modifiers(modifiers)
136 node.set('modifiers', simplejson.dumps(modifiers))
138 def setup_modifiers(node, field=None, context=None, in_tree_view=False):
139 """ Processes node attributes and field descriptors to generate
140 the ``modifiers`` node attribute and set it on the provided node.
142 Alters its first argument in-place.
144 :param node: ``field`` node from an OpenERP view
145 :type node: lxml.etree._Element
146 :param dict field: field descriptor corresponding to the provided node
147 :param dict context: execution context used to evaluate node attributes
148 :param bool in_tree_view: triggers the ``tree_invisible`` code
149 path (separate from ``invisible``): in
150 tree view there are two levels of
151 invisibility, cell content (a column is
152 present but the cell itself is not
153 displayed) with ``invisible`` and column
154 invisibility (the whole column is
155 hidden) with ``tree_invisible``.
159 if field is not None:
160 transfer_field_to_modifiers(field, modifiers)
161 transfer_node_to_modifiers(
162 node, modifiers, context=context, in_tree_view=in_tree_view)
163 transfer_modifiers_to_node(modifiers, node)
165 def test_modifiers(what, expected):
167 if isinstance(what, basestring):
168 node = etree.fromstring(what)
169 transfer_node_to_modifiers(node, modifiers)
170 simplify_modifiers(modifiers)
171 json = simplejson.dumps(modifiers)
172 assert json == expected, "%s != %s" % (json, expected)
173 elif isinstance(what, dict):
174 transfer_field_to_modifiers(what, modifiers)
175 simplify_modifiers(modifiers)
176 json = simplejson.dumps(modifiers)
177 assert json == expected, "%s != %s" % (json, expected)
182 # openerp.osv.orm.modifiers_tests()
183 def modifiers_tests():
184 test_modifiers('<field name="a"/>', '{}')
185 test_modifiers('<field name="a" invisible="1"/>', '{"invisible": true}')
186 test_modifiers('<field name="a" readonly="1"/>', '{"readonly": true}')
187 test_modifiers('<field name="a" required="1"/>', '{"required": true}')
188 test_modifiers('<field name="a" invisible="0"/>', '{}')
189 test_modifiers('<field name="a" readonly="0"/>', '{}')
190 test_modifiers('<field name="a" required="0"/>', '{}')
191 test_modifiers('<field name="a" invisible="1" required="1"/>', '{"invisible": true, "required": true}') # TODO order is not guaranteed
192 test_modifiers('<field name="a" invisible="1" required="0"/>', '{"invisible": true}')
193 test_modifiers('<field name="a" invisible="0" required="1"/>', '{"required": true}')
194 test_modifiers("""<field name="a" attrs="{'invisible': [('b', '=', 'c')]}"/>""", '{"invisible": [["b", "=", "c"]]}')
196 # The dictionary is supposed to be the result of fields_get().
197 test_modifiers({}, '{}')
198 test_modifiers({"invisible": True}, '{"invisible": true}')
199 test_modifiers({"invisible": False}, '{}')
202 def check_object_name(name):
203 """ Check if the given name is a valid openerp object name.
205 The _name attribute in osv and osv_memory object is subject to
206 some restrictions. This function returns True or False whether
207 the given name is allowed or not.
209 TODO: this is an approximation. The goal in this approximation
210 is to disallow uppercase characters (in some places, we quote
211 table/column names and in other not, which leads to this kind
214 psycopg2.ProgrammingError: relation "xxx" does not exist).
216 The same restriction should apply to both osv and osv_memory
217 objects for consistency.
220 if regex_object_name.match(name) is None:
224 def raise_on_invalid_object_name(name):
225 if not check_object_name(name):
226 msg = "The _name attribute %s is not valid." % name
228 raise except_orm('ValueError', msg)
230 POSTGRES_CONFDELTYPES = {
238 def intersect(la, lb):
239 return filter(lambda x: x in lb, la)
241 def fix_import_export_id_paths(fieldname):
243 Fixes the id fields in import and exports, and splits field paths
246 :param str fieldname: name of the field to import/export
247 :return: split field name
250 fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
251 fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
252 return fixed_external_id.split('/')
254 class except_orm(Exception):
255 def __init__(self, name, value):
258 self.args = (name, value)
260 class BrowseRecordError(Exception):
263 class browse_null(object):
264 """ Readonly python database object browser
270 def __getitem__(self, name):
273 def __getattr__(self, name):
274 return None # XXX: return self ?
282 def __nonzero__(self):
285 def __unicode__(self):
289 raise NotImplementedError("Iteration is not allowed on %s" % self)
293 # TODO: execute an object method on browse_record_list
295 class browse_record_list(list):
296 """ Collection of browse objects
298 Such an instance will be returned when doing a ``browse([ids..])``
299 and will be iterable, yielding browse() objects
302 def __init__(self, lst, context=None):
305 super(browse_record_list, self).__init__(lst)
306 self.context = context
309 class browse_record(object):
310 """ An object that behaves like a row of an object's table.
311 It has attributes after the columns of the corresponding object.
315 uobj = pool.get('res.users')
316 user_rec = uobj.browse(cr, uid, 104)
320 def __init__(self, cr, uid, id, table, cache, context=None,
321 list_class=browse_record_list, fields_process=None):
323 :param table: the browsed object (inherited from orm)
324 :param dict cache: a dictionary of model->field->data to be shared
325 across browse objects, thus reducing the SQL
326 read()s. It can speed up things a lot, but also be
327 disastrous if not discarded after write()/unlink()
329 :param dict context: dictionary with an optional context
331 if fields_process is None:
335 self._list_class = list_class
339 self._table = table # deprecated, use _model!
341 self._table_name = self._table._name
342 self.__logger = logging.getLogger('openerp.osv.orm.browse_record.' + self._table_name)
343 self._context = context
344 self._fields_process = fields_process
346 cache.setdefault(table._name, {})
347 self._data = cache[table._name]
349 # if not (id and isinstance(id, (int, long,))):
350 # raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
351 # if not table.exists(cr, uid, id, context):
352 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
354 if id not in self._data:
355 self._data[id] = {'id': id}
359 def __getitem__(self, name):
363 if name not in self._data[self._id]:
364 # build the list of fields we will fetch
366 # fetch the definition of the field which was asked for
367 if name in self._table._columns:
368 col = self._table._columns[name]
369 elif name in self._table._inherit_fields:
370 col = self._table._inherit_fields[name][2]
371 elif hasattr(self._table, str(name)):
372 attr = getattr(self._table, name)
373 if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
374 def function_proxy(*args, **kwargs):
375 if 'context' not in kwargs and self._context:
376 kwargs.update(context=self._context)
377 return attr(self._cr, self._uid, [self._id], *args, **kwargs)
378 return function_proxy
382 error_msg = "Field '%s' does not exist in object '%s'" % (name, self)
383 self.__logger.warning(error_msg)
384 if self.__logger.isEnabledFor(logging.DEBUG):
385 self.__logger.debug(''.join(traceback.format_stack()))
386 raise KeyError(error_msg)
388 prefetchable = lambda f: f._classic_write and f._prefetch and not f.groups and not f.deprecated
390 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
391 if prefetchable(col):
392 # gen the list of "local" (ie not inherited) fields which are classic or many2one
393 field_filter = lambda x: prefetchable(x[1])
394 fields_to_fetch = filter(field_filter, self._table._columns.items())
395 # gen the list of inherited fields
396 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
397 # complete the field list with the inherited fields which are classic or many2one
398 fields_to_fetch += filter(field_filter, inherits)
399 # otherwise we fetch only that field
401 fields_to_fetch = [(name, col)]
403 ids = filter(lambda id: name not in self._data[id], self._data.keys())
405 field_names = map(lambda x: x[0], fields_to_fetch)
407 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
408 except (openerp.exceptions.AccessError, except_orm):
411 # prefetching attempt failed, perhaps we're violating ACL restrictions involuntarily
412 _logger.info('Prefetching attempt for fields %s on %s failed for ids %s, re-trying just for id %s', field_names, self._model._name, ids, self._id)
414 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
416 # TODO: improve this, very slow for reports
417 if self._fields_process:
418 lang = self._context.get('lang', 'en_US') or 'en_US'
419 lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
421 raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
422 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
424 for field_name, field_column in fields_to_fetch:
425 if field_column._type in self._fields_process:
426 for result_line in field_values:
427 result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
428 if result_line[field_name]:
429 result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
432 # Where did those ids come from? Perhaps old entries in ir_model_dat?
433 _logger.warning("No field_values found for ids %s in %s", ids, self)
434 raise KeyError('Field %s not found in %s'%(name, self))
435 # create browse records for 'remote' objects
436 for result_line in field_values:
438 for field_name, field_column in fields_to_fetch:
439 if field_column._type == 'many2one':
440 if result_line[field_name]:
441 obj = self._table.pool[field_column._obj]
442 if isinstance(result_line[field_name], (list, tuple)):
443 value = result_line[field_name][0]
445 value = result_line[field_name]
447 # FIXME: this happen when a _inherits object
448 # overwrite a field of it parent. Need
449 # testing to be sure we got the right
450 # object and not the parent one.
451 if not isinstance(value, browse_record):
453 # In some cases the target model is not available yet, so we must ignore it,
454 # which is safe in most cases, this value will just be loaded later when needed.
455 # This situation can be caused by custom fields that connect objects with m2o without
456 # respecting module dependencies, causing relationships to be connected to soon when
457 # the target is not loaded yet.
459 new_data[field_name] = browse_record(self._cr,
460 self._uid, value, obj, self._cache,
461 context=self._context,
462 list_class=self._list_class,
463 fields_process=self._fields_process)
465 new_data[field_name] = value
467 new_data[field_name] = browse_null()
469 new_data[field_name] = browse_null()
470 elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
471 new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool[field_column._obj], self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
472 elif field_column._type == 'reference':
473 if result_line[field_name]:
474 if isinstance(result_line[field_name], browse_record):
475 new_data[field_name] = result_line[field_name]
477 ref_obj, ref_id = result_line[field_name].split(',')
478 ref_id = long(ref_id)
480 obj = self._table.pool[ref_obj]
481 new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
483 new_data[field_name] = browse_null()
485 new_data[field_name] = browse_null()
487 new_data[field_name] = result_line[field_name]
488 self._data[result_line['id']].update(new_data)
490 if not name in self._data[self._id]:
491 # How did this happen? Could be a missing model due to custom fields used too soon, see above.
492 self.__logger.error("Fields to fetch: %s, Field values: %s", field_names, field_values)
493 self.__logger.error("Cached: %s, Table: %s", self._data[self._id], self._table)
494 raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
495 return self._data[self._id][name]
497 def __getattr__(self, name):
502 exc_info = sys.exc_info()
503 raise AttributeError, "Got %r while trying to get attribute %s on a %s record." % (e, name, self._table._name), exc_info[2]
505 def __contains__(self, name):
506 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
509 raise NotImplementedError("Iteration is not allowed on %s" % self)
511 def __hasattr__(self, name):
518 return "browse_record(%s, %d)" % (self._table_name, self._id)
520 def __eq__(self, other):
521 if not isinstance(other, browse_record):
523 return (self._table_name, self._id) == (other._table_name, other._id)
525 def __ne__(self, other):
526 if not isinstance(other, browse_record):
528 return (self._table_name, self._id) != (other._table_name, other._id)
530 # we need to define __unicode__ even though we've already defined __str__
531 # because we have overridden __getattr__
532 def __unicode__(self):
533 return unicode(str(self))
536 return hash((self._table_name, self._id))
541 """Force refreshing this browse_record's data and all the data of the
542 records that belong to the same cache, by emptying the cache completely,
543 preserving only the record identifiers (for prefetching optimizations).
545 for model, model_cache in self._cache.iteritems():
546 # only preserve the ids of the records that were in the cache
547 cached_ids = dict([(i, {'id': i}) for i in model_cache.keys()])
548 self._cache[model].clear()
549 self._cache[model].update(cached_ids)
551 def pg_varchar(size=0):
552 """ Returns the VARCHAR declaration for the provided size:
554 * If no size (or an empty or negative size is provided) return an
556 * Otherwise return a VARCHAR(n)
558 :type int size: varchar size, optional
562 if not isinstance(size, int):
563 raise TypeError("VARCHAR parameter should be an int, got %s"
566 return 'VARCHAR(%d)' % size
569 FIELDS_TO_PGTYPES = {
570 fields.boolean: 'bool',
571 fields.integer: 'int4',
575 fields.datetime: 'timestamp',
576 fields.binary: 'bytea',
577 fields.many2one: 'int4',
578 fields.serialized: 'text',
581 def get_pg_type(f, type_override=None):
583 :param fields._column f: field to get a Postgres type for
584 :param type type_override: use the provided type for dispatching instead of the field's own type
585 :returns: (postgres_identification_type, postgres_type_specification)
588 field_type = type_override or type(f)
590 if field_type in FIELDS_TO_PGTYPES:
591 pg_type = (FIELDS_TO_PGTYPES[field_type], FIELDS_TO_PGTYPES[field_type])
592 elif issubclass(field_type, fields.float):
594 pg_type = ('numeric', 'NUMERIC')
596 pg_type = ('float8', 'DOUBLE PRECISION')
597 elif issubclass(field_type, (fields.char, fields.reference)):
598 pg_type = ('varchar', pg_varchar(f.size))
599 elif issubclass(field_type, fields.selection):
600 if (isinstance(f.selection, list) and isinstance(f.selection[0][0], int))\
601 or getattr(f, 'size', None) == -1:
602 pg_type = ('int4', 'INTEGER')
604 pg_type = ('varchar', pg_varchar(getattr(f, 'size', None)))
605 elif issubclass(field_type, fields.function):
606 if f._type == 'selection':
607 pg_type = ('varchar', pg_varchar())
609 pg_type = get_pg_type(f, getattr(fields, f._type))
611 _logger.warning('%s type not supported!', field_type)
617 class MetaModel(type):
618 """ Metaclass for the Model.
620 This class is used as the metaclass for the Model class to discover
621 the models defined in a module (i.e. without instanciating them).
622 If the automatic discovery is not needed, it is possible to set the
623 model's _register attribute to False.
627 module_to_models = {}
629 def __init__(self, name, bases, attrs):
630 if not self._register:
631 self._register = True
632 super(MetaModel, self).__init__(name, bases, attrs)
635 # The (OpenERP) module name can be in the `openerp.addons` namespace
636 # or not. For instance module `sale` can be imported as
637 # `openerp.addons.sale` (the good way) or `sale` (for backward
639 module_parts = self.__module__.split('.')
640 if len(module_parts) > 2 and module_parts[0] == 'openerp' and \
641 module_parts[1] == 'addons':
642 module_name = self.__module__.split('.')[2]
644 module_name = self.__module__.split('.')[0]
645 if not hasattr(self, '_module'):
646 self._module = module_name
648 # Remember which models to instanciate for this module.
650 self.module_to_models.setdefault(self._module, []).append(self)
653 # Definition of log access columns, automatically added to models if
654 # self._log_access is True
655 LOG_ACCESS_COLUMNS = {
656 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
657 'create_date': 'TIMESTAMP',
658 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
659 'write_date': 'TIMESTAMP'
661 # special columns automatically created by the ORM
662 MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys()
664 class BaseModel(object):
665 """ Base class for OpenERP models.
667 OpenERP models are created by inheriting from this class' subclasses:
669 * Model: for regular database-persisted models
670 * TransientModel: for temporary data, stored in the database but automatically
671 vaccuumed every so often
672 * AbstractModel: for abstract super classes meant to be shared by multiple
673 _inheriting classes (usually Models or TransientModels)
675 The system will later instantiate the class once per database (on
676 which the class' module is installed).
678 To create a class that should not be instantiated, the _register class attribute
681 __metaclass__ = MetaModel
682 _auto = True # create database backend
683 _register = False # Set to false if the model shouldn't be automatically discovered.
690 _parent_name = 'parent_id'
691 _parent_store = False
692 _parent_order = False
699 # dict of {field:method}, with method returning the (name_get of records, {id: fold})
700 # to include in the _read_group, if grouped on this field
704 _transient = False # True in a TransientModel
707 # { 'parent_model': 'm2o_field', ... }
710 # Mapping from inherits'd field name to triple (m, r, f, n) where m is the
711 # model from which it is inherits'd, r is the (local) field towards m, f
712 # is the _column object itself, and n is the original (i.e. top-most)
715 # { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
716 # field_column_obj, origina_parent_model), ... }
719 # Mapping field name/column_info object
720 # This is similar to _inherit_fields but:
721 # 1. includes self fields,
722 # 2. uses column_info instead of a triple.
728 _sql_constraints = []
729 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
731 CONCURRENCY_CHECK_FIELD = '__last_update'
733 def log(self, cr, uid, id, message, secondary=False, context=None):
734 return _logger.warning("log() is deprecated. Please use OpenChatter notification system instead of the res.log mechanism.")
736 def view_init(self, cr, uid, fields_list, context=None):
737 """Override this method to do specific things when a view on the object is opened."""
740 def _field_create(self, cr, context=None):
741 """ Create entries in ir_model_fields for all the model's fields.
743 If necessary, also create an entry in ir_model, and if called from the
744 modules loading scheme (by receiving 'module' in the context), also
745 create entries in ir_model_data (for the model and the fields).
747 - create an entry in ir_model (if there is not already one),
748 - create an entry in ir_model_data (if there is not already one, and if
749 'module' is in the context),
750 - update ir_model_fields with the fields found in _columns
751 (TODO there is some redundancy as _columns is updated from
752 ir_model_fields in __init__).
757 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
759 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
760 model_id = cr.fetchone()[0]
761 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
763 model_id = cr.fetchone()[0]
764 if 'module' in context:
765 name_id = 'model_'+self._name.replace('.', '_')
766 cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
768 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
769 (name_id, context['module'], 'ir.model', model_id)
774 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
776 for rec in cr.dictfetchall():
777 cols[rec['name']] = rec
779 ir_model_fields_obj = self.pool.get('ir.model.fields')
781 # sparse field should be created at the end, as it depends on its serialized field already existing
782 model_fields = sorted(self._columns.items(), key=lambda x: 1 if x[1]._type == 'sparse' else 0)
783 for (k, f) in model_fields:
785 'model_id': model_id,
788 'field_description': f.string,
790 'relation': f._obj or '',
791 'select_level': tools.ustr(f.select or 0),
792 'readonly': (f.readonly and 1) or 0,
793 'required': (f.required and 1) or 0,
794 'selectable': (f.selectable and 1) or 0,
795 'translate': (f.translate and 1) or 0,
796 'relation_field': f._fields_id if isinstance(f, fields.one2many) else '',
797 'serialization_field_id': None,
799 if getattr(f, 'serialization_field', None):
800 # resolve link to serialization_field if specified by name
801 serialization_field_id = ir_model_fields_obj.search(cr, SUPERUSER_ID, [('model','=',vals['model']), ('name', '=', f.serialization_field)])
802 if not serialization_field_id:
803 raise except_orm(_('Error'), _("Serialization field `%s` not found for sparse field `%s`!") % (f.serialization_field, k))
804 vals['serialization_field_id'] = serialization_field_id[0]
806 # When its a custom field,it does not contain f.select
807 if context.get('field_state', 'base') == 'manual':
808 if context.get('field_name', '') == k:
809 vals['select_level'] = context.get('select', '0')
810 #setting value to let the problem NOT occur next time
812 vals['select_level'] = cols[k]['select_level']
815 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
816 id = cr.fetchone()[0]
818 cr.execute("""INSERT INTO ir_model_fields (
819 id, model_id, model, name, field_description, ttype,
820 relation,state,select_level,relation_field, translate, serialization_field_id
822 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
824 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
825 vals['relation'], 'base',
826 vals['select_level'], vals['relation_field'], bool(vals['translate']), vals['serialization_field_id']
828 if 'module' in context:
829 name1 = 'field_' + self._table + '_' + k
830 cr.execute("select name from ir_model_data where name=%s", (name1,))
832 name1 = name1 + "_" + str(id)
833 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
834 (name1, context['module'], 'ir.model.fields', id)
837 for key, val in vals.items():
838 if cols[k][key] != vals[key]:
839 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
841 cr.execute("""UPDATE ir_model_fields SET
842 model_id=%s, field_description=%s, ttype=%s, relation=%s,
843 select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s, serialization_field_id=%s
845 model=%s AND name=%s""", (
846 vals['model_id'], vals['field_description'], vals['ttype'],
848 vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['serialization_field_id'], vals['model'], vals['name']
854 # Goal: try to apply inheritance at the instanciation level and
855 # put objects in the pool var
858 def create_instance(cls, pool, cr):
859 """ Instanciate a given model.
861 This class method instanciates the class of some model (i.e. a class
862 deriving from osv or osv_memory). The class might be the class passed
863 in argument or, if it inherits from another class, a class constructed
864 by combining the two classes.
866 The ``attributes`` argument specifies which parent class attributes
869 TODO: the creation of the combined class is repeated at each call of
870 this method. This is probably unnecessary.
873 attributes = ['_columns', '_defaults', '_inherits', '_constraints',
876 parent_names = getattr(cls, '_inherit', None)
878 if isinstance(parent_names, (str, unicode)):
879 name = cls._name or parent_names
880 parent_names = [parent_names]
884 raise TypeError('_name is mandatory in case of multiple inheritance')
886 for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]):
887 if parent_name not in pool:
888 raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
889 'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
890 parent_model = pool[parent_name]
891 if not getattr(cls, '_original_module', None) and name == parent_model._name:
892 cls._original_module = parent_model._original_module
893 parent_class = parent_model.__class__
896 new = copy.copy(getattr(parent_model, s, {}))
898 # Don't _inherit custom fields.
902 if hasattr(new, 'update'):
903 new.update(cls.__dict__.get(s, {}))
904 elif s=='_constraints':
905 for c in cls.__dict__.get(s, []):
907 for c2 in range(len(new)):
908 #For _constraints, we should check field and methods as well
909 if new[c2][2]==c[2] and (new[c2][0] == c[0] \
910 or getattr(new[c2][0],'__name__', True) == \
911 getattr(c[0],'__name__', False)):
912 # If new class defines a constraint with
913 # same function name, we let it override
922 new.extend(cls.__dict__.get(s, []))
925 # Keep links to non-inherited constraints, e.g. useful when exporting translations
926 nattr['_local_constraints'] = cls.__dict__.get('_constraints', [])
927 nattr['_local_sql_constraints'] = cls.__dict__.get('_sql_constraints', [])
929 cls = type(name, (cls, parent_class), dict(nattr, _register=False))
931 cls._local_constraints = getattr(cls, '_constraints', [])
932 cls._local_sql_constraints = getattr(cls, '_sql_constraints', [])
934 if not getattr(cls, '_original_module', None):
935 cls._original_module = cls._module
936 obj = object.__new__(cls)
938 if hasattr(obj, '_columns'):
939 # float fields are registry-dependent (digit attribute). Duplicate them to avoid issues.
940 for c, f in obj._columns.items():
941 if f._type == 'float':
942 obj._columns[c] = copy.copy(f)
944 obj.__init__(pool, cr)
948 """Register this model.
950 This doesn't create an instance but simply register the model
951 as being part of the module where it is defined.
956 # Set the module name (e.g. base, sale, accounting, ...) on the class.
957 module = cls.__module__.split('.')[0]
958 if not hasattr(cls, '_module'):
961 # Record this class in the list of models to instantiate for this module,
962 # managed by the metaclass.
963 module_model_list = MetaModel.module_to_models.setdefault(cls._module, [])
964 if cls not in module_model_list:
966 module_model_list.append(cls)
968 # Since we don't return an instance here, the __init__
969 # method won't be called.
972 def __init__(self, pool, cr):
973 """ Initialize a model and make it part of the given registry.
975 - copy the stored fields' functions in the osv_pool,
976 - update the _columns with the fields found in ir_model_fields,
977 - ensure there is a many2one for each _inherits'd parent,
978 - update the children's _columns,
979 - give a chance to each field to initialize itself.
982 pool.add(self._name, self)
985 if not self._name and not hasattr(self, '_inherit'):
986 name = type(self).__name__.split('.')[0]
987 msg = "The class %s has to have a _name attribute" % name
990 raise except_orm('ValueError', msg)
992 if not self._description:
993 self._description = self._name
995 self._table = self._name.replace('.', '_')
997 if not hasattr(self, '_log_access'):
998 # If _log_access is not specified, it is the same value as _auto.
999 self._log_access = getattr(self, "_auto", True)
1001 self._columns = self._columns.copy()
1002 for store_field in self._columns:
1003 f = self._columns[store_field]
1004 if hasattr(f, 'digits_change'):
1006 def not_this_field(stored_func):
1007 x, y, z, e, f, l = stored_func
1008 return x != self._name or y != store_field
1009 self.pool._store_function[self._name] = filter(not_this_field, self.pool._store_function.get(self._name, []))
1010 if not isinstance(f, fields.function):
1016 sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, f.priority, None)}
1017 for object, aa in sm.items():
1019 (fnct, fields2, order, length) = aa
1021 (fnct, fields2, order) = aa
1024 raise except_orm('Error',
1025 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
1026 self.pool._store_function.setdefault(object, [])
1027 t = (self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length)
1028 if not t in self.pool._store_function[object]:
1029 self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
1030 self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
1032 for (key, _, msg) in self._sql_constraints:
1033 self.pool._sql_error[self._table+'_'+key] = msg
1035 # Load manual fields
1037 # Check the query is already done for all modules of if we need to
1039 if self.pool.fields_by_model is not None:
1040 manual_fields = self.pool.fields_by_model.get(self._name, [])
1042 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
1043 manual_fields = cr.dictfetchall()
1044 for field in manual_fields:
1045 if field['name'] in self._columns:
1048 'string': field['field_description'],
1049 'required': bool(field['required']),
1050 'readonly': bool(field['readonly']),
1051 'domain': eval(field['domain']) if field['domain'] else None,
1052 'size': field['size'] or None,
1053 'ondelete': field['on_delete'],
1054 'translate': (field['translate']),
1057 #'select': int(field['select_level'])
1060 if field['serialization_field_id']:
1061 cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],))
1062 attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']})
1063 if field['ttype'] in ['many2one', 'one2many', 'many2many']:
1064 attrs.update({'relation': field['relation']})
1065 self._columns[field['name']] = fields.sparse(**attrs)
1066 elif field['ttype'] == 'selection':
1067 self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
1068 elif field['ttype'] == 'reference':
1069 self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
1070 elif field['ttype'] == 'many2one':
1071 self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
1072 elif field['ttype'] == 'one2many':
1073 self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
1074 elif field['ttype'] == 'many2many':
1075 _rel1 = field['relation'].replace('.', '_')
1076 _rel2 = field['model'].replace('.', '_')
1077 _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
1078 self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
1080 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
1082 self._inherits_check()
1083 self._inherits_reload()
1084 if not self._sequence:
1085 self._sequence = self._table + '_id_seq'
1086 for k in self._defaults:
1087 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
1088 for f in self._columns:
1089 self._columns[f].restart()
1092 if self.is_transient():
1093 self._transient_check_count = 0
1094 self._transient_max_count = config.get('osv_memory_count_limit')
1095 self._transient_max_hours = config.get('osv_memory_age_limit')
1096 assert self._log_access, "TransientModels must have log_access turned on, "\
1097 "in order to implement their access rights policy"
1100 if self._rec_name is not None:
1101 assert self._rec_name in self._all_columns.keys() + ['id'], "Invalid rec_name %s for model %s" % (self._rec_name, self._name)
1103 self._rec_name = 'name'
1106 def __export_row(self, cr, uid, row, fields, context=None):
1110 def check_type(field_type):
1111 if field_type == 'float':
1113 elif field_type == 'integer':
1115 elif field_type == 'boolean':
1119 def selection_field(in_field):
1120 col_obj = self.pool[in_field.keys()[0]]
1121 if f[i] in col_obj._columns.keys():
1122 return col_obj._columns[f[i]]
1123 elif f[i] in col_obj._inherits.keys():
1124 selection_field(col_obj._inherits)
1128 def _get_xml_id(self, cr, uid, r):
1129 model_data = self.pool.get('ir.model.data')
1130 data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
1132 d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
1134 r = '%s.%s' % (d['module'], d['name'])
1140 n = self._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' )
1141 if not model_data.search(cr, uid, [('name', '=', n)]):
1144 model_data.create(cr, SUPERUSER_ID, {
1146 'model': self._name,
1148 'module': '__export__',
1154 data = map(lambda x: '', range(len(fields)))
1156 for fpos in range(len(fields)):
1166 r = _get_xml_id(self, cr, uid, r)
1169 # To display external name of selection field when its exported
1170 if f[i] in self._columns.keys():
1171 cols = self._columns[f[i]]
1172 elif f[i] in self._inherit_fields.keys():
1173 cols = selection_field(self._inherits)
1174 if cols and cols._type == 'selection':
1175 sel_list = cols.selection
1176 if r and type(sel_list) == type([]):
1177 r = [x[1] for x in sel_list if r==x[0]]
1178 r = r and r[0] or False
1180 if f[i] in self._columns:
1181 r = check_type(self._columns[f[i]]._type)
1182 elif f[i] in self._inherit_fields:
1183 r = check_type(self._inherit_fields[f[i]][2]._type)
1184 data[fpos] = r or False
1186 if isinstance(r, (browse_record_list, list)):
1188 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
1191 if [x for x in fields2 if x]:
1193 done.append(fields2)
1194 if cols and cols._type=='many2many' and len(fields[fpos])>(i+1) and (fields[fpos][i+1]=='id'):
1195 data[fpos] = ','.join([_get_xml_id(self, cr, uid, x) for x in r])
1199 lines2 = row2._model.__export_row(cr, uid, row2, fields2,
1202 for fpos2 in range(len(fields)):
1203 if lines2 and lines2[0][fpos2]:
1204 data[fpos2] = lines2[0][fpos2]
1208 name_relation = self.pool[rr._table_name]._rec_name
1209 if isinstance(rr[name_relation], browse_record):
1210 rr = rr[name_relation]
1211 rr_name = self.pool[rr._table_name].name_get(cr, uid, [rr.id], context=context)
1212 rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
1213 dt += tools.ustr(rr_name or '') + ','
1214 data[fpos] = dt[:-1]
1223 if isinstance(r, browse_record):
1224 r = self.pool[r._table_name].name_get(cr, uid, [r.id], context=context)
1225 r = r and r[0] and r[0][1] or ''
1226 data[fpos] = tools.ustr(r or '')
1227 return [data] + lines
1229 def export_data(self, cr, uid, ids, fields_to_export, context=None):
1231 Export fields for selected objects
1233 :param cr: database cursor
1234 :param uid: current user id
1235 :param ids: list of ids
1236 :param fields_to_export: list of fields
1237 :param context: context arguments, like lang, time zone
1238 :rtype: dictionary with a *datas* matrix
1240 This method is used when exporting data via client menu
1245 cols = self._columns.copy()
1246 for f in self._inherit_fields:
1247 cols.update({f: self._inherit_fields[f][2]})
1248 fields_to_export = map(fix_import_export_id_paths, fields_to_export)
1250 for row in self.browse(cr, uid, ids, context):
1251 datas += self.__export_row(cr, uid, row, fields_to_export, context)
1252 return {'datas': datas}
1254 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
1257 Use :meth:`~load` instead
1259 Import given data in given module
1261 This method is used when importing data via client menu.
1263 Example of fields to import for a sale.order::
1266 partner_id, (=name_search)
1267 order_line/.id, (=database_id)
1269 order_line/product_id/id, (=xml id)
1270 order_line/price_unit,
1271 order_line/product_uom_qty,
1272 order_line/product_uom/id (=xml_id)
1274 This method returns a 4-tuple with the following structure::
1276 (return_code, errored_resource, error_message, unused)
1278 * The first item is a return code, it is ``-1`` in case of
1279 import error, or the last imported row number in case of success
1280 * The second item contains the record data dict that failed to import
1281 in case of error, otherwise it's 0
1282 * The third item contains an error message string in case of error,
1284 * The last item is currently unused, with no specific semantics
1286 :param fields: list of fields to import
1287 :param datas: data to import
1288 :param mode: 'init' or 'update' for record creation
1289 :param current_module: module name
1290 :param noupdate: flag for record creation
1291 :param filename: optional file to store partial import state for recovery
1292 :returns: 4-tuple in the form (return_code, errored_resource, error_message, unused)
1293 :rtype: (int, dict or 0, str or 0, str or 0)
1295 context = dict(context) if context is not None else {}
1296 context['_import_current_module'] = current_module
1298 fields = map(fix_import_export_id_paths, fields)
1299 ir_model_data_obj = self.pool.get('ir.model.data')
1302 if m['type'] == 'error':
1303 raise Exception(m['message'])
1305 if config.get('import_partial') and filename:
1306 with open(config.get('import_partial'), 'rb') as partial_import_file:
1307 data = pickle.load(partial_import_file)
1308 position = data.get(filename, 0)
1312 for res_id, xml_id, res, info in self._convert_records(cr, uid,
1313 self._extract_records(cr, uid, fields, datas,
1314 context=context, log=log),
1315 context=context, log=log):
1316 ir_model_data_obj._update(cr, uid, self._name,
1317 current_module, res, mode=mode, xml_id=xml_id,
1318 noupdate=noupdate, res_id=res_id, context=context)
1319 position = info.get('rows', {}).get('to', 0) + 1
1320 if config.get('import_partial') and filename and (not (position%100)):
1321 with open(config.get('import_partial'), 'rb') as partial_import:
1322 data = pickle.load(partial_import)
1323 data[filename] = position
1324 with open(config.get('import_partial'), 'wb') as partial_import:
1325 pickle.dump(data, partial_import)
1326 if context.get('defer_parent_store_computation'):
1327 self._parent_store_compute(cr)
1329 except Exception, e:
1331 return -1, {}, 'Line %d : %s' % (position + 1, tools.ustr(e)), ''
1333 if context.get('defer_parent_store_computation'):
1334 self._parent_store_compute(cr)
1335 return position, 0, 0, 0
1337 def load(self, cr, uid, fields, data, context=None):
1339 Attempts to load the data matrix, and returns a list of ids (or
1340 ``False`` if there was an error and no id could be generated) and a
1343 The ids are those of the records created and saved (in database), in
1344 the same order they were extracted from the file. They can be passed
1345 directly to :meth:`~read`
1347 :param fields: list of fields to import, at the same index as the corresponding data
1348 :type fields: list(str)
1349 :param data: row-major matrix of data to import
1350 :type data: list(list(str))
1351 :param dict context:
1352 :returns: {ids: list(int)|False, messages: [Message]}
1354 cr.execute('SAVEPOINT model_load')
1357 fields = map(fix_import_export_id_paths, fields)
1358 ModelData = self.pool['ir.model.data'].clear_caches()
1360 fg = self.fields_get(cr, uid, context=context)
1367 for id, xid, record, info in self._convert_records(cr, uid,
1368 self._extract_records(cr, uid, fields, data,
1369 context=context, log=messages.append),
1370 context=context, log=messages.append):
1372 cr.execute('SAVEPOINT model_load_save')
1373 except psycopg2.InternalError, e:
1374 # broken transaction, exit and hope the source error was
1376 if not any(message['type'] == 'error' for message in messages):
1377 messages.append(dict(info, type='error',message=
1378 u"Unknown database error: '%s'" % e))
1381 ids.append(ModelData._update(cr, uid, self._name,
1382 current_module, record, mode=mode, xml_id=xid,
1383 noupdate=noupdate, res_id=id, context=context))
1384 cr.execute('RELEASE SAVEPOINT model_load_save')
1385 except psycopg2.Warning, e:
1386 messages.append(dict(info, type='warning', message=str(e)))
1387 cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
1388 except psycopg2.Error, e:
1389 messages.append(dict(
1391 **PGERROR_TO_OE[e.pgcode](self, fg, info, e)))
1392 # Failed to write, log to messages, rollback savepoint (to
1393 # avoid broken transaction) and keep going
1394 cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
1395 if any(message['type'] == 'error' for message in messages):
1396 cr.execute('ROLLBACK TO SAVEPOINT model_load')
1398 return {'ids': ids, 'messages': messages}
1399 def _extract_records(self, cr, uid, fields_, data,
1400 context=None, log=lambda a: None):
1401 """ Generates record dicts from the data sequence.
1403 The result is a generator of dicts mapping field names to raw
1404 (unconverted, unvalidated) values.
1406 For relational fields, if sub-fields were provided the value will be
1407 a list of sub-records
1409 The following sub-fields may be set on the record (by key):
1410 * None is the name_get for the record (to use with name_create/name_search)
1411 * "id" is the External ID for the record
1412 * ".id" is the Database ID for the record
1414 columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
1415 # Fake columns to avoid special cases in extractor
1416 columns[None] = fields.char('rec_name')
1417 columns['id'] = fields.char('External ID')
1418 columns['.id'] = fields.integer('Database ID')
1420 # m2o fields can't be on multiple lines so exclude them from the
1421 # is_relational field rows filter, but special-case it later on to
1422 # be handled with relational fields (as it can have subfields)
1423 is_relational = lambda field: columns[field]._type in ('one2many', 'many2many', 'many2one')
1424 get_o2m_values = itemgetter_tuple(
1425 [index for index, field in enumerate(fields_)
1426 if columns[field[0]]._type == 'one2many'])
1427 get_nono2m_values = itemgetter_tuple(
1428 [index for index, field in enumerate(fields_)
1429 if columns[field[0]]._type != 'one2many'])
1430 # Checks if the provided row has any non-empty non-relational field
1431 def only_o2m_values(row, f=get_nono2m_values, g=get_o2m_values):
1432 return any(g(row)) and not any(f(row))
1436 if index >= len(data): return
1439 # copy non-relational fields to record dict
1440 record = dict((field[0], value)
1441 for field, value in itertools.izip(fields_, row)
1442 if not is_relational(field[0]))
1444 # Get all following rows which have relational values attached to
1445 # the current record (no non-relational values)
1446 record_span = itertools.takewhile(
1447 only_o2m_values, itertools.islice(data, index + 1, None))
1448 # stitch record row back on for relational fields
1449 record_span = list(itertools.chain([row], record_span))
1450 for relfield in set(
1451 field[0] for field in fields_
1452 if is_relational(field[0])):
1453 column = columns[relfield]
1454 # FIXME: how to not use _obj without relying on fields_get?
1455 Model = self.pool[column._obj]
1457 # get only cells for this sub-field, should be strictly
1458 # non-empty, field path [None] is for name_get column
1459 indices, subfields = zip(*((index, field[1:] or [None])
1460 for index, field in enumerate(fields_)
1461 if field[0] == relfield))
1463 # return all rows which have at least one value for the
1464 # subfields of relfield
1465 relfield_data = filter(any, map(itemgetter_tuple(indices), record_span))
1466 record[relfield] = [subrecord
1467 for subrecord, _subinfo in Model._extract_records(
1468 cr, uid, subfields, relfield_data,
1469 context=context, log=log)]
1471 yield record, {'rows': {
1473 'to': index + len(record_span) - 1
1475 index += len(record_span)
1476 def _convert_records(self, cr, uid, records,
1477 context=None, log=lambda a: None):
1478 """ Converts records from the source iterable (recursive dicts of
1479 strings) into forms which can be written to the database (via
1480 self.create or (ir.model.data)._update)
1482 :returns: a list of triplets of (id, xid, record)
1483 :rtype: list((int|None, str|None, dict))
1485 if context is None: context = {}
1486 Converter = self.pool['ir.fields.converter']
1487 columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
1488 Translation = self.pool['ir.translation']
1490 (f, (Translation._get_source(cr, uid, self._name + ',' + f, 'field',
1491 context.get('lang'))
1493 for f, column in columns.iteritems())
1495 convert = Converter.for_model(cr, uid, self, context=context)
1497 def _log(base, field, exception):
1498 type = 'warning' if isinstance(exception, Warning) else 'error'
1499 # logs the logical (not human-readable) field name for automated
1500 # processing of response, but injects human readable in message
1501 record = dict(base, type=type, field=field,
1502 message=unicode(exception.args[0]) % base)
1503 if len(exception.args) > 1 and exception.args[1]:
1504 record.update(exception.args[1])
1507 stream = CountingStream(records)
1508 for record, extras in stream:
1511 # name_get/name_create
1512 if None in record: pass
1519 dbid = int(record['.id'])
1521 # in case of overridden id column
1522 dbid = record['.id']
1523 if not self.search(cr, uid, [('id', '=', dbid)], context=context):
1526 record=stream.index,
1528 message=_(u"Unknown database identifier '%s'") % dbid))
1531 converted = convert(record, lambda field, err:\
1532 _log(dict(extras, record=stream.index, field=field_names[field]), field, err))
1534 yield dbid, xid, converted, dict(extras, record=stream.index)
1536 def get_invalid_fields(self, cr, uid):
1537 return list(self._invalids)
1539 def _validate(self, cr, uid, ids, context=None):
1540 context = context or {}
1541 lng = context.get('lang')
1542 trans = self.pool.get('ir.translation')
1544 for constraint in self._constraints:
1545 fun, msg, fields = constraint
1546 # We don't pass around the context here: validation code
1547 # must always yield the same results.
1548 if not fun(self, cr, uid, ids):
1549 # Check presence of __call__ directly instead of using
1550 # callable() because it will be deprecated as of Python 3.0
1551 if hasattr(msg, '__call__'):
1552 tmp_msg = msg(self, cr, uid, ids, context=context)
1553 if isinstance(tmp_msg, tuple):
1554 tmp_msg, params = tmp_msg
1555 translated_msg = tmp_msg % params
1557 translated_msg = tmp_msg
1559 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg)
1561 _("The field(s) `%s` failed against a constraint: %s") % (', '.join(fields), translated_msg)
1563 self._invalids.update(fields)
1565 raise except_orm('ValidateError', '\n'.join(error_msgs))
1567 self._invalids.clear()
1569 def default_get(self, cr, uid, fields_list, context=None):
1571 Returns default values for the fields in fields_list.
1573 :param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
1574 :type fields_list: list
1575 :param context: optional context dictionary - it may contains keys for specifying certain options
1576 like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
1577 It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
1578 or override a default value for a field.
1579 A special ``bin_size`` boolean flag may also be passed in the context to request the
1580 value of all fields.binary columns to be returned as the size of the binary instead of its
1581 contents. This can also be selectively overriden by passing a field-specific flag
1582 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
1583 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
1584 :return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
1586 # trigger view init hook
1587 self.view_init(cr, uid, fields_list, context)
1593 # get the default values for the inherited fields
1594 for t in self._inherits.keys():
1595 defaults.update(self.pool[t].default_get(cr, uid, fields_list, context))
1597 # get the default values defined in the object
1598 for f in fields_list:
1599 if f in self._defaults:
1600 if callable(self._defaults[f]):
1601 defaults[f] = self._defaults[f](self, cr, uid, context)
1603 defaults[f] = self._defaults[f]
1605 fld_def = ((f in self._columns) and self._columns[f]) \
1606 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1609 if isinstance(fld_def, fields.property):
1610 property_obj = self.pool.get('ir.property')
1611 prop_value = property_obj.get(cr, uid, f, self._name, context=context)
1613 if isinstance(prop_value, (browse_record, browse_null)):
1614 defaults[f] = prop_value.id
1616 defaults[f] = prop_value
1618 if f not in defaults:
1621 # get the default values set by the user and override the default
1622 # values defined in the object
1623 ir_values_obj = self.pool.get('ir.values')
1624 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1625 for id, field, field_value in res:
1626 if field in fields_list:
1627 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1628 if fld_def._type == 'many2one':
1629 obj = self.pool[fld_def._obj]
1630 if not obj.search(cr, uid, [('id', '=', field_value or False)]):
1632 if fld_def._type == 'many2many':
1633 obj = self.pool[fld_def._obj]
1635 for i in range(len(field_value or [])):
1636 if not obj.search(cr, uid, [('id', '=',
1639 field_value2.append(field_value[i])
1640 field_value = field_value2
1641 if fld_def._type == 'one2many':
1642 obj = self.pool[fld_def._obj]
1644 for i in range(len(field_value or [])):
1645 field_value2.append({})
1646 for field2 in field_value[i]:
1647 if field2 in obj._columns.keys() and obj._columns[field2]._type == 'many2one':
1648 obj2 = self.pool[obj._columns[field2]._obj]
1649 if not obj2.search(cr, uid,
1650 [('id', '=', field_value[i][field2])]):
1652 elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type == 'many2one':
1653 obj2 = self.pool[obj._inherit_fields[field2][2]._obj]
1654 if not obj2.search(cr, uid,
1655 [('id', '=', field_value[i][field2])]):
1657 # TODO add test for many2many and one2many
1658 field_value2[i][field2] = field_value[i][field2]
1659 field_value = field_value2
1660 defaults[field] = field_value
1662 # get the default values from the context
1663 for key in context or {}:
1664 if key.startswith('default_') and (key[8:] in fields_list):
1665 defaults[key[8:]] = context[key]
1668 def fields_get_keys(self, cr, user, context=None):
1669 res = self._columns.keys()
1670 # TODO I believe this loop can be replace by
1671 # res.extend(self._inherit_fields.key())
1672 for parent in self._inherits:
1673 res.extend(self.pool[parent].fields_get_keys(cr, user, context))
1676 def _rec_name_fallback(self, cr, uid, context=None):
1677 rec_name = self._rec_name
1678 if rec_name not in self._columns:
1679 rec_name = self._columns.keys()[0] if len(self._columns.keys()) > 0 else "id"
1683 # Overload this method if you need a window title which depends on the context
1685 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
1688 def user_has_groups(self, cr, uid, groups, context=None):
1689 """Return true if the user is at least member of one of the groups
1690 in groups_str. Typically used to resolve ``groups`` attribute
1691 in view and model definitions.
1693 :param str groups: comma-separated list of fully-qualified group
1694 external IDs, e.g.: ``base.group_user,base.group_system``
1695 :return: True if the current user is a member of one of the
1698 return any([self.pool.get('res.users').has_group(cr, uid, group_ext_id)
1699 for group_ext_id in groups.split(',')])
1701 def __view_look_dom(self, cr, user, node, view_id, in_tree_view, model_fields, context=None):
1702 """Return the description of the fields in the node.
1704 In a normal call to this method, node is a complete view architecture
1705 but it is actually possible to give some sub-node (this is used so
1706 that the method can call itself recursively).
1708 Originally, the field descriptions are drawn from the node itself.
1709 But there is now some code calling fields_get() in order to merge some
1710 of those information in the architecture.
1722 if isinstance(s, unicode):
1723 return s.encode('utf8')
1726 def check_group(node):
1727 """Apply group restrictions, may be set at view level or model level::
1728 * at view level this means the element should be made invisible to
1729 people who are not members
1730 * at model level (exclusively for fields, obviously), this means
1731 the field should be completely removed from the view, as it is
1732 completely unavailable for non-members
1734 :return: True if field should be included in the result of fields_view_get
1736 if node.tag == 'field' and node.get('name') in self._all_columns:
1737 column = self._all_columns[node.get('name')].column
1738 if column.groups and not self.user_has_groups(cr, user,
1739 groups=column.groups,
1741 node.getparent().remove(node)
1742 fields.pop(node.get('name'), None)
1743 # no point processing view-level ``groups`` anymore, return
1745 if node.get('groups'):
1746 can_see = self.user_has_groups(cr, user,
1747 groups=node.get('groups'),
1750 node.set('invisible', '1')
1751 modifiers['invisible'] = True
1752 if 'attrs' in node.attrib:
1753 del(node.attrib['attrs']) #avoid making field visible later
1754 del(node.attrib['groups'])
1757 if node.tag in ('field', 'node', 'arrow'):
1758 if node.get('object'):
1763 if f.tag == 'field':
1764 xml += etree.tostring(f, encoding="utf-8")
1766 new_xml = etree.fromstring(encode(xml))
1767 ctx = context.copy()
1768 ctx['base_model_name'] = self._name
1769 xarch, xfields = self.pool[node.get('object')].__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
1774 attrs = {'views': views}
1776 if node.get('name'):
1779 if node.get('name') in self._columns:
1780 column = self._columns[node.get('name')]
1782 column = self._inherit_fields[node.get('name')][2]
1787 relation = self.pool[column._obj] if column._obj else None
1792 if f.tag in ('form', 'tree', 'graph', 'kanban', 'calendar'):
1794 ctx = context.copy()
1795 ctx['base_model_name'] = self._name
1796 xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
1797 views[str(f.tag)] = {
1801 attrs = {'views': views}
1802 if node.get('widget') and node.get('widget') == 'selection':
1803 # Prepare the cached selection list for the client. This needs to be
1804 # done even when the field is invisible to the current user, because
1805 # other events could need to change its value to any of the selectable ones
1806 # (such as on_change events, refreshes, etc.)
1808 # If domain and context are strings, we keep them for client-side, otherwise
1809 # we evaluate them server-side to consider them when generating the list of
1811 # TODO: find a way to remove this hack, by allow dynamic domains
1813 if column._domain and not isinstance(column._domain, basestring):
1814 dom = list(column._domain)
1815 dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
1816 search_context = dict(context)
1817 if column._context and not isinstance(column._context, basestring):
1818 search_context.update(column._context)
1819 attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
1820 if (node.get('required') and not int(node.get('required'))) or not column.required:
1821 attrs['selection'].append((False, ''))
1822 fields[node.get('name')] = attrs
1824 field = model_fields.get(node.get('name'))
1826 transfer_field_to_modifiers(field, modifiers)
1829 elif node.tag in ('form', 'tree'):
1830 result = self.view_header_get(cr, user, False, node.tag, context)
1832 node.set('string', result)
1833 in_tree_view = node.tag == 'tree'
1835 elif node.tag == 'calendar':
1836 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color', 'all_day','attendee'):
1837 if node.get(additional_field):
1838 fields[node.get(additional_field)] = {}
1840 if not check_group(node):
1841 # node must be removed, no need to proceed further with its children
1844 # The view architeture overrides the python model.
1845 # Get the attrs before they are (possibly) deleted by check_group below
1846 transfer_node_to_modifiers(node, modifiers, context, in_tree_view)
1848 # TODO remove attrs couterpart in modifiers when invisible is true ?
1851 if 'lang' in context:
1852 if node.text and node.text.strip():
1853 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.text.strip())
1855 node.text = node.text.replace(node.text.strip(), trans)
1856 if node.tail and node.tail.strip():
1857 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.tail.strip())
1859 node.tail = node.tail.replace(node.tail.strip(), trans)
1861 if node.get('string') and not result:
1862 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
1863 if trans == node.get('string') and ('base_model_name' in context):
1864 # If translation is same as source, perhaps we'd have more luck with the alternative model name
1865 # (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
1866 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
1868 node.set('string', trans)
1870 for attr_name in ('confirm', 'sum', 'avg', 'help', 'placeholder'):
1871 attr_value = node.get(attr_name)
1873 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], attr_value)
1875 node.set(attr_name, trans)
1878 if children or (node.tag == 'field' and f.tag in ('filter','separator')):
1879 fields.update(self.__view_look_dom(cr, user, f, view_id, in_tree_view, model_fields, context))
1881 transfer_modifiers_to_node(modifiers, node)
1884 def _disable_workflow_buttons(self, cr, user, node):
1885 """ Set the buttons in node to readonly if the user can't activate them. """
1887 # admin user can always activate workflow buttons
1890 # TODO handle the case of more than one workflow for a model or multiple
1891 # transitions with different groups and same signal
1892 usersobj = self.pool.get('res.users')
1893 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1894 for button in buttons:
1895 user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
1896 cr.execute("""SELECT DISTINCT t.group_id
1898 INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
1899 INNER JOIN wkf_transition t ON (t.act_to = a.id)
1902 AND t.group_id is NOT NULL
1903 """, (self._name, button.get('name')))
1904 group_ids = [x[0] for x in cr.fetchall() if x[0]]
1905 can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
1906 button.set('readonly', str(int(not can_click)))
1909 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1910 """ Return an architecture and a description of all the fields.
1912 The field description combines the result of fields_get() and
1915 :param node: the architecture as as an etree
1916 :return: a tuple (arch, fields) where arch is the given node as a
1917 string and fields is the description of all the fields.
1921 if node.tag == 'diagram':
1922 if node.getchildren()[0].tag == 'node':
1923 node_model = self.pool[node.getchildren()[0].get('object')]
1924 node_fields = node_model.fields_get(cr, user, None, context)
1925 fields.update(node_fields)
1926 if not node.get("create") and not node_model.check_access_rights(cr, user, 'create', raise_exception=False):
1927 node.set("create", 'false')
1928 if node.getchildren()[1].tag == 'arrow':
1929 arrow_fields = self.pool[node.getchildren()[1].get('object')].fields_get(cr, user, None, context)
1930 fields.update(arrow_fields)
1932 fields = self.fields_get(cr, user, None, context)
1933 fields_def = self.__view_look_dom(cr, user, node, view_id, False, fields, context=context)
1934 node = self._disable_workflow_buttons(cr, user, node)
1935 if node.tag in ('kanban', 'tree', 'form', 'gantt'):
1936 for action, operation in (('create', 'create'), ('delete', 'unlink'), ('edit', 'write')):
1937 if not node.get(action) and not self.check_access_rights(cr, user, operation, raise_exception=False):
1938 node.set(action, 'false')
1939 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1940 for k in fields.keys():
1941 if k not in fields_def:
1943 for field in fields_def:
1945 # sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1946 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1947 elif field in fields:
1948 fields[field].update(fields_def[field])
1950 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1951 res = cr.fetchall()[:]
1953 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1954 msg = "\n * ".join([r[0] for r in res])
1955 msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
1957 raise except_orm('View error', msg)
1960 def _get_default_form_view(self, cr, user, context=None):
1961 """ Generates a default single-line form view using all fields
1962 of the current model except the m2m and o2m ones.
1964 :param cr: database cursor
1965 :param int user: user id
1966 :param dict context: connection context
1967 :returns: a form view as an lxml document
1968 :rtype: etree._Element
1970 view = etree.Element('form', string=self._description)
1971 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
1972 for field, descriptor in self.fields_get(cr, user, context=context).iteritems():
1973 if descriptor['type'] in ('one2many', 'many2many'):
1975 etree.SubElement(view, 'field', name=field)
1976 if descriptor['type'] == 'text':
1977 etree.SubElement(view, 'newline')
1980 def _get_default_search_view(self, cr, user, context=None):
1981 """ Generates a single-field search view, based on _rec_name.
1983 :param cr: database cursor
1984 :param int user: user id
1985 :param dict context: connection context
1986 :returns: a tree view as an lxml document
1987 :rtype: etree._Element
1989 view = etree.Element('search', string=self._description)
1990 etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
1993 def _get_default_tree_view(self, cr, user, context=None):
1994 """ Generates a single-field tree view, based on _rec_name.
1996 :param cr: database cursor
1997 :param int user: user id
1998 :param dict context: connection context
1999 :returns: a tree view as an lxml document
2000 :rtype: etree._Element
2002 view = etree.Element('tree', string=self._description)
2003 etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
2006 def _get_default_calendar_view(self, cr, user, context=None):
2007 """ Generates a default calendar view by trying to infer
2008 calendar fields from a number of pre-set attribute names
2010 :param cr: database cursor
2011 :param int user: user id
2012 :param dict context: connection context
2013 :returns: a calendar view
2014 :rtype: etree._Element
2016 def set_first_of(seq, in_, to):
2017 """Sets the first value of ``seq`` also found in ``in_`` to
2018 the ``to`` attribute of the view being closed over.
2020 Returns whether it's found a suitable value (and set it on
2021 the attribute) or not
2029 view = etree.Element('calendar', string=self._description)
2030 etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
2032 if self._date_name not in self._columns:
2034 for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
2035 if dt in self._columns:
2036 self._date_name = dt
2041 raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
2042 view.set('date_start', self._date_name)
2044 set_first_of(["user_id", "partner_id", "x_user_id", "x_partner_id"],
2045 self._columns, 'color')
2047 if not set_first_of(["date_stop", "date_end", "x_date_stop", "x_date_end"],
2048 self._columns, 'date_stop'):
2049 if not set_first_of(["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"],
2050 self._columns, 'date_delay'):
2052 _('Invalid Object Architecture!'),
2053 _("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % self._name))
2058 # if view_id, view_type is not required
2060 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
2062 Get the detailed composition of the requested view like fields, model, view architecture
2064 :param cr: database cursor
2065 :param user: current user id
2066 :param view_id: id of the view or None
2067 :param view_type: type of the view to return if view_id is None ('form', tree', ...)
2068 :param context: context arguments, like lang, time zone
2069 :param toolbar: true to include contextual actions
2070 :param submenu: deprecated
2071 :return: dictionary describing the composition of the requested view (including inherited views and extensions)
2072 :raise AttributeError:
2073 * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
2074 * if some tag other than 'position' is found in parent view
2075 :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
2082 if isinstance(s, unicode):
2083 return s.encode('utf8')
2086 def raise_view_error(error_msg, child_view_id):
2087 view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
2088 error_msg = error_msg % {'parent_xml_id': view.xml_id}
2089 raise AttributeError("View definition error for inherited view '%s' on model '%s': %s"
2090 % (child_view.xml_id, self._name, error_msg))
2092 def locate(source, spec):
2093 """ Locate a node in a source (parent) architecture.
2095 Given a complete source (parent) architecture (i.e. the field
2096 `arch` in a view), and a 'spec' node (a node in an inheriting
2097 view that specifies the location in the source view of what
2098 should be changed), return (if it exists) the node in the
2099 source view matching the specification.
2101 :param source: a parent architecture to modify
2102 :param spec: a modifying node in an inheriting view
2103 :return: a node in the source matching the spec
2106 if spec.tag == 'xpath':
2107 nodes = source.xpath(spec.get('expr'))
2108 return nodes[0] if nodes else None
2109 elif spec.tag == 'field':
2110 # Only compare the field name: a field can be only once in a given view
2111 # at a given level (and for multilevel expressions, we should use xpath
2112 # inheritance spec anyway).
2113 for node in source.getiterator('field'):
2114 if node.get('name') == spec.get('name'):
2118 for node in source.getiterator(spec.tag):
2119 if isinstance(node, SKIPPED_ELEMENT_TYPES):
2121 if all(node.get(attr) == spec.get(attr) \
2122 for attr in spec.attrib
2123 if attr not in ('position','version')):
2124 # Version spec should match parent's root element's version
2125 if spec.get('version') and spec.get('version') != source.get('version'):
2130 def apply_inheritance_specs(source, specs_arch, inherit_id=None):
2131 """ Apply an inheriting view.
2133 Apply to a source architecture all the spec nodes (i.e. nodes
2134 describing where and what changes to apply to some parent
2135 architecture) given by an inheriting view.
2137 :param source: a parent architecture to modify
2138 :param specs_arch: a modifying architecture in an inheriting view
2139 :param inherit_id: the database id of the inheriting view
2140 :return: a modified source where the specs are applied
2143 specs_tree = etree.fromstring(encode(specs_arch))
2144 # Queue of specification nodes (i.e. nodes describing where and
2145 # changes to apply to some parent architecture).
2146 specs = [specs_tree]
2150 if isinstance(spec, SKIPPED_ELEMENT_TYPES):
2152 if spec.tag == 'data':
2153 specs += [ c for c in specs_tree ]
2155 node = locate(source, spec)
2156 if node is not None:
2157 pos = spec.get('position', 'inside')
2158 if pos == 'replace':
2159 if node.getparent() is None:
2160 source = copy.deepcopy(spec[0])
2163 node.addprevious(child)
2164 node.getparent().remove(node)
2165 elif pos == 'attributes':
2166 for child in spec.getiterator('attribute'):
2167 attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
2169 node.set(attribute[0], attribute[1])
2170 elif attribute[0] in node.attrib:
2171 del node.attrib[attribute[0]]
2173 sib = node.getnext()
2177 elif pos == 'after':
2182 sib.addprevious(child)
2183 elif pos == 'before':
2184 node.addprevious(child)
2186 raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
2189 ' %s="%s"' % (attr, spec.get(attr))
2190 for attr in spec.attrib
2191 if attr != 'position'
2193 tag = "<%s%s>" % (spec.tag, attrs)
2194 if spec.get('version') and spec.get('version') != source.get('version'):
2195 raise_view_error("Mismatching view API version for element '%s': %r vs %r in parent view '%%(parent_xml_id)s'" % \
2196 (tag, spec.get('version'), source.get('version')), inherit_id)
2197 raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
2201 def apply_view_inheritance(cr, user, source, inherit_id):
2202 """ Apply all the (directly and indirectly) inheriting views.
2204 :param source: a parent architecture to modify (with parent
2205 modifications already applied)
2206 :param inherit_id: the database view_id of the parent view
2207 :return: a modified source where all the modifying architecture
2211 sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name, context=context)
2212 for (view_arch, view_id) in sql_inherit:
2213 source = apply_inheritance_specs(source, view_arch, view_id)
2214 source = apply_view_inheritance(cr, user, source, view_id)
2217 result = {'type': view_type, 'model': self._name}
2220 parent_view_model = None
2221 view_ref_key = view_type + '_view_ref'
2222 view_ref = context.get(view_ref_key)
2223 # Search for a root (i.e. without any parent) view.
2225 if view_ref and not view_id:
2227 module, view_ref = view_ref.split('.', 1)
2228 cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
2229 view_ref_res = cr.fetchone()
2231 view_id = view_ref_res[0]
2233 _logger.warning('%r requires a fully-qualified external id (got: %r for model %s). '
2234 'Please use the complete `module.view_id` form instead.', view_ref_key, view_ref,
2238 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2240 WHERE id=%s""", (view_id,))
2242 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2244 WHERE model=%s AND type=%s AND inherit_id IS NULL
2245 ORDER BY priority""", (self._name, view_type))
2246 sql_res = cr.dictfetchone()
2251 view_id = sql_res['inherit_id'] or sql_res['id']
2252 parent_view_model = sql_res['model']
2253 if not sql_res['inherit_id']:
2256 # if a view was found
2258 source = etree.fromstring(encode(sql_res['arch']))
2260 arch=apply_view_inheritance(cr, user, source, sql_res['id']),
2261 type=sql_res['type'],
2262 view_id=sql_res['id'],
2263 name=sql_res['name'],
2264 field_parent=sql_res['field_parent'] or False)
2266 # otherwise, build some kind of default view
2268 view = getattr(self, '_get_default_%s_view' % view_type)(
2270 except AttributeError:
2271 # what happens here, graph case?
2272 raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
2280 if parent_view_model != self._name:
2281 ctx = context.copy()
2282 ctx['base_model_name'] = parent_view_model
2285 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
2286 result['arch'] = xarch
2287 result['fields'] = xfields
2292 for key in ('report_sxw_content', 'report_rml_content',
2293 'report_sxw', 'report_rml',
2294 'report_sxw_content_data', 'report_rml_content_data'):
2298 ir_values_obj = self.pool.get('ir.values')
2299 resprint = ir_values_obj.get(cr, user, 'action',
2300 'client_print_multi', [(self._name, False)], False,
2302 resaction = ir_values_obj.get(cr, user, 'action',
2303 'client_action_multi', [(self._name, False)], False,
2306 resrelate = ir_values_obj.get(cr, user, 'action',
2307 'client_action_relate', [(self._name, False)], False,
2309 resaction = [clean(action) for action in resaction
2310 if view_type == 'tree' or not action[2].get('multi')]
2311 resprint = [clean(print_) for print_ in resprint
2312 if view_type == 'tree' or not print_[2].get('multi')]
2313 #When multi="True" set it will display only in More of the list view
2314 resrelate = [clean(action) for action in resrelate
2315 if (action[2].get('multi') and view_type == 'tree') or (not action[2].get('multi') and view_type == 'form')]
2317 for x in itertools.chain(resprint, resaction, resrelate):
2318 x['string'] = x['name']
2320 result['toolbar'] = {
2322 'action': resaction,
2327 _view_look_dom_arch = __view_look_dom_arch
2329 def search_count(self, cr, user, args, context=None):
2332 res = self.search(cr, user, args, context=context, count=True)
2333 if isinstance(res, list):
2337 def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
2339 Search for records based on a search domain.
2341 :param cr: database cursor
2342 :param user: current user id
2343 :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
2344 :param offset: optional number of results to skip in the returned values (default: 0)
2345 :param limit: optional max number of records to return (default: **None**)
2346 :param order: optional columns to sort by (default: self._order=id )
2347 :param context: optional context arguments, like lang, time zone
2348 :type context: dictionary
2349 :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
2350 :return: id or list of ids of records matching the criteria
2351 :rtype: integer or list of integers
2352 :raise AccessError: * if user tries to bypass access rules for read on the requested object.
2354 **Expressing a search domain (args)**
2356 Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
2358 * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
2359 * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
2360 The semantics of most of these operators are obvious.
2361 The ``child_of`` operator will look for records who are children or grand-children of a given record,
2362 according to the semantics of this model (i.e following the relationship field named by
2363 ``self._parent_name``, by default ``parent_id``.
2364 * **value** must be a valid value to compare with the values of **field_name**, depending on its type.
2366 Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
2367 These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
2368 Be very careful about this when you combine them the first time.
2370 Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
2372 [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
2374 The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
2376 (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
2379 return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
2381 def name_get(self, cr, user, ids, context=None):
2382 """Returns the preferred display value (text representation) for the records with the
2383 given ``ids``. By default this will be the value of the ``name`` column, unless
2384 the model implements a custom behavior.
2385 Can sometimes be seen as the inverse function of :meth:`~.name_search`, but it is not
2389 :return: list of pairs ``(id,text_repr)`` for all records with the given ``ids``.
2393 if isinstance(ids, (int, long)):
2396 if self._rec_name in self._all_columns:
2397 rec_name_column = self._all_columns[self._rec_name].column
2398 return [(r['id'], rec_name_column.as_display_name(cr, user, self, r[self._rec_name], context=context))
2399 for r in self.read(cr, user, ids, [self._rec_name],
2400 load='_classic_write', context=context)]
2401 return [(id, "%s,%s" % (self._name, id)) for id in ids]
2403 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
2404 """Search for records that have a display name matching the given ``name`` pattern if compared
2405 with the given ``operator``, while also matching the optional search domain (``args``).
2406 This is used for example to provide suggestions based on a partial value for a relational
2408 Sometimes be seen as the inverse function of :meth:`~.name_get`, but it is not
2411 This method is equivalent to calling :meth:`~.search` with a search domain based on ``name``
2412 and then :meth:`~.name_get` on the result of the search.
2414 :param list args: optional search domain (see :meth:`~.search` for syntax),
2415 specifying further restrictions
2416 :param str operator: domain operator for matching the ``name`` pattern, such as ``'like'``
2418 :param int limit: optional max number of records to return
2420 :return: list of pairs ``(id,text_repr)`` for all matching records.
2422 return self._name_search(cr, user, name, args, operator, context, limit)
2424 def name_create(self, cr, uid, name, context=None):
2425 """Creates a new record by calling :meth:`~.create` with only one
2426 value provided: the name of the new record (``_rec_name`` field).
2427 The new record will also be initialized with any default values applicable
2428 to this model, or provided through the context. The usual behavior of
2429 :meth:`~.create` applies.
2430 Similarly, this method may raise an exception if the model has multiple
2431 required fields and some do not have default values.
2433 :param name: name of the record to create
2436 :return: the :meth:`~.name_get` pair value for the newly-created record.
2438 rec_id = self.create(cr, uid, {self._rec_name: name}, context)
2439 return self.name_get(cr, uid, [rec_id], context)[0]
2441 # private implementation of name_search, allows passing a dedicated user for the name_get part to
2442 # solve some access rights issues
2443 def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
2449 # optimize out the default criterion of ``ilike ''`` that matches everything
2450 if not (name == '' and operator == 'ilike'):
2451 args += [(self._rec_name, operator, name)]
2452 access_rights_uid = name_get_uid or user
2453 ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
2454 res = self.name_get(cr, access_rights_uid, ids, context)
2457 def read_string(self, cr, uid, id, langs, fields=None, context=None):
2460 self.pool.get('ir.translation').check_access_rights(cr, uid, 'read')
2462 fields = self._columns.keys() + self._inherit_fields.keys()
2463 #FIXME: collect all calls to _get_source into one SQL call.
2465 res[lang] = {'code': lang}
2467 if f in self._columns:
2468 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
2470 res[lang][f] = res_trans
2472 res[lang][f] = self._columns[f].string
2473 for table in self._inherits:
2474 cols = intersect(self._inherit_fields.keys(), fields)
2475 res2 = self.pool[table].read_string(cr, uid, id, langs, cols, context)
2478 res[lang]['code'] = lang
2479 for f in res2[lang]:
2480 res[lang][f] = res2[lang][f]
2483 def write_string(self, cr, uid, id, langs, vals, context=None):
2484 self.pool.get('ir.translation').check_access_rights(cr, uid, 'write')
2485 #FIXME: try to only call the translation in one SQL
2488 if field in self._columns:
2489 src = self._columns[field].string
2490 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
2491 for table in self._inherits:
2492 cols = intersect(self._inherit_fields.keys(), vals)
2494 self.pool[table].write_string(cr, uid, id, langs, vals, context)
2497 def _add_missing_default_values(self, cr, uid, values, context=None):
2498 missing_defaults = []
2499 avoid_tables = [] # avoid overriding inherited values when parent is set
2500 for tables, parent_field in self._inherits.items():
2501 if parent_field in values:
2502 avoid_tables.append(tables)
2503 for field in self._columns.keys():
2504 if not field in values:
2505 missing_defaults.append(field)
2506 for field in self._inherit_fields.keys():
2507 if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
2508 missing_defaults.append(field)
2510 if len(missing_defaults):
2511 # override defaults with the provided values, never allow the other way around
2512 defaults = self.default_get(cr, uid, missing_defaults, context)
2514 if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
2515 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
2516 and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
2517 defaults[dv] = [(6, 0, defaults[dv])]
2518 if (dv in self._columns and self._columns[dv]._type == 'one2many' \
2519 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
2520 and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
2521 defaults[dv] = [(0, 0, x) for x in defaults[dv]]
2522 defaults.update(values)
2526 def clear_caches(self):
2527 """ Clear the caches
2529 This clears the caches associated to methods decorated with
2530 ``tools.ormcache`` or ``tools.ormcache_multi``.
2533 getattr(self, '_ormcache')
2535 self.pool._any_cache_cleared = True
2536 except AttributeError:
2540 def _read_group_fill_results(self, cr, uid, domain, groupby, groupby_list, aggregated_fields,
2541 read_group_result, read_group_order=None, context=None):
2542 """Helper method for filling in empty groups for all possible values of
2543 the field being grouped by"""
2545 # self._group_by_full should map groupable fields to a method that returns
2546 # a list of all aggregated values that we want to display for this field,
2547 # in the form of a m2o-like pair (key,label).
2548 # This is useful to implement kanban views for instance, where all columns
2549 # should be displayed even if they don't contain any record.
2551 # Grab the list of all groups that should be displayed, including all present groups
2552 present_group_ids = [x[groupby][0] for x in read_group_result if x[groupby]]
2553 all_groups,folded = self._group_by_full[groupby](self, cr, uid, present_group_ids, domain,
2554 read_group_order=read_group_order,
2555 access_rights_uid=openerp.SUPERUSER_ID,
2558 result_template = dict.fromkeys(aggregated_fields, False)
2559 result_template[groupby + '_count'] = 0
2560 if groupby_list and len(groupby_list) > 1:
2561 result_template['__context'] = {'group_by': groupby_list[1:]}
2563 # Merge the left_side (current results as dicts) with the right_side (all
2564 # possible values as m2o pairs). Both lists are supposed to be using the
2565 # same ordering, and can be merged in one pass.
2568 def append_left(left_side):
2569 grouped_value = left_side[groupby] and left_side[groupby][0]
2570 if not grouped_value in known_values:
2571 result.append(left_side)
2572 known_values[grouped_value] = left_side
2574 count_attr = groupby + '_count'
2575 known_values[grouped_value].update({count_attr: left_side[count_attr]})
2576 def append_right(right_side):
2577 grouped_value = right_side[0]
2578 if not grouped_value in known_values:
2579 line = dict(result_template)
2580 line[groupby] = right_side
2581 line['__domain'] = [(groupby,'=',grouped_value)] + domain
2583 known_values[grouped_value] = line
2584 while read_group_result or all_groups:
2585 left_side = read_group_result[0] if read_group_result else None
2586 right_side = all_groups[0] if all_groups else None
2587 assert left_side is None or left_side[groupby] is False \
2588 or isinstance(left_side[groupby], (tuple,list)), \
2589 'M2O-like pair expected, got %r' % left_side[groupby]
2590 assert right_side is None or isinstance(right_side, (tuple,list)), \
2591 'M2O-like pair expected, got %r' % right_side
2592 if left_side is None:
2593 append_right(all_groups.pop(0))
2594 elif right_side is None:
2595 append_left(read_group_result.pop(0))
2596 elif left_side[groupby] == right_side:
2597 append_left(read_group_result.pop(0))
2598 all_groups.pop(0) # discard right_side
2599 elif not left_side[groupby] or not left_side[groupby][0]:
2600 # left side == "Undefined" entry, not present on right_side
2601 append_left(read_group_result.pop(0))
2603 append_right(all_groups.pop(0))
2607 r['__fold'] = folded.get(r[groupby] and r[groupby][0], False)
2610 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
2612 Get the list of records in list view grouped by the given ``groupby`` fields
2614 :param cr: database cursor
2615 :param uid: current user id
2616 :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
2617 :param list fields: list of fields present in the list view specified on the object
2618 :param list groupby: list of groupby descriptions by which the records will be grouped.
2619 A groupby description is either a field (then it will be grouped by that field)
2620 or a string 'field:groupby_function'. Right now, the only functions supported
2621 are 'day', 'week', 'month', 'quarter' or 'year', and they only make sense for
2622 date/datetime fields.
2623 :param int offset: optional number of records to skip
2624 :param int limit: optional max number of records to return
2625 :param dict context: context arguments, like lang, time zone.
2626 :param list orderby: optional ``order by`` specification, for
2627 overriding the natural sort ordering of the
2628 groups, see also :py:meth:`~osv.osv.osv.search`
2629 (supported only for many2one fields currently)
2630 :return: list of dictionaries(one dictionary for each record) containing:
2632 * the values of fields grouped by the fields in ``groupby`` argument
2633 * __domain: list of tuples specifying the search criteria
2634 * __context: dictionary with argument like ``groupby``
2635 :rtype: [{'field_name_1': value, ...]
2636 :raise AccessError: * if user has no read rights on the requested object
2637 * if user tries to bypass access rules for read on the requested object
2640 context = context or {}
2641 self.check_access_rights(cr, uid, 'read')
2643 fields = self._columns.keys()
2645 query = self._where_calc(cr, uid, domain, context=context)
2646 self._apply_ir_rules(cr, uid, query, 'read', context=context)
2648 # Take care of adding join(s) if groupby is an '_inherits'ed field
2649 groupby_list = groupby
2650 qualified_groupby_field = groupby
2652 if isinstance(groupby, list):
2653 groupby = groupby[0]
2654 splitted_groupby = groupby.split(':')
2655 if len(splitted_groupby) == 2:
2656 groupby = splitted_groupby[0]
2657 groupby_function = splitted_groupby[1]
2659 groupby_function = False
2660 qualified_groupby_field = self._inherits_join_calc(groupby, query)
2663 assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
2664 groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
2665 assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
2667 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
2668 fget = self.fields_get(cr, uid, fields)
2670 group_count = group_by = groupby
2671 group_by_params = {}
2673 if fget.get(groupby):
2674 groupby_type = fget[groupby]['type']
2675 if groupby_type in ('date', 'datetime'):
2676 if groupby_function:
2677 interval = groupby_function
2681 if interval == 'day':
2682 display_format = 'dd MMMM YYYY'
2683 elif interval == 'week':
2684 display_format = "'W'w"
2685 elif interval == 'month':
2686 display_format = 'MMMM'
2687 elif interval == 'quarter':
2688 display_format = 'QQQ'
2689 elif interval == 'year':
2690 display_format = 'YYYY'
2692 qualified_groupby_field = "date_trunc('%s',%s)" % (interval, qualified_groupby_field)
2693 flist = "%s as %s " % (qualified_groupby_field, groupby)
2694 elif groupby_type == 'boolean':
2695 qualified_groupby_field = "coalesce(%s,false)" % qualified_groupby_field
2696 flist = "%s as %s " % (qualified_groupby_field, groupby)
2698 flist = qualified_groupby_field
2700 # Don't allow arbitrary values, as this would be a SQL injection vector!
2701 raise except_orm(_('Invalid group_by'),
2702 _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
2704 aggregated_fields = [
2706 if f not in ('id', 'sequence')
2707 if fget[f]['type'] in ('integer', 'float')
2708 if (f in self._columns and getattr(self._columns[f], '_classic_write'))]
2709 for f in aggregated_fields:
2710 group_operator = fget[f].get('group_operator', 'sum')
2713 qualified_field = '"%s"."%s"' % (self._table, f)
2714 flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
2716 gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
2718 from_clause, where_clause, where_clause_params = query.get_sql()
2719 where_clause = where_clause and ' WHERE ' + where_clause
2720 limit_str = limit and ' limit %d' % limit or ''
2721 offset_str = offset and ' offset %d' % offset or ''
2722 if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
2724 cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
2727 for r in cr.dictfetchall():
2728 for fld, val in r.items():
2729 if val is None: r[fld] = False
2730 alldata[r['id']] = r
2733 order = orderby or groupby
2734 data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=order, context=context)
2736 # the IDs of records that have groupby field value = False or '' should be included too
2737 data_ids += set(alldata.keys()).difference(data_ids)
2740 data = self.read(cr, uid, data_ids, [groupby], context=context)
2741 # restore order of the search as read() uses the default _order (this is only for groups, so the footprint of data should be small):
2742 data_dict = dict((d['id'], d[groupby] ) for d in data)
2743 result = [{'id': i, groupby: data_dict[i]} for i in data_ids]
2745 result = [{'id': i} for i in data_ids]
2749 d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
2750 if not isinstance(groupby_list, (str, unicode)):
2751 if groupby or not context.get('group_by_no_leaf', False):
2752 d['__context'] = {'group_by': groupby_list[1:]}
2753 if groupby and groupby in fget:
2754 if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
2755 groupby_datetime = alldata[d['id']][groupby]
2756 if isinstance(groupby_datetime, basestring):
2757 _default = datetime.datetime(1970, 1, 1) # force starts of month
2758 groupby_datetime = dateutil.parser.parse(groupby_datetime, default=_default)
2759 d[groupby] = babel.dates.format_date(
2760 groupby_datetime, format=display_format, locale=context.get('lang', 'en_US'))
2761 domain_dt_begin = groupby_datetime
2762 if interval == 'quarter':
2763 domain_dt_end = groupby_datetime + dateutil.relativedelta.relativedelta(months=3)
2764 elif interval == 'month':
2765 domain_dt_end = groupby_datetime + dateutil.relativedelta.relativedelta(months=1)
2766 elif interval == 'week':
2767 domain_dt_end = groupby_datetime + datetime.timedelta(days=7)
2768 elif interval == 'day':
2769 domain_dt_end = groupby_datetime + datetime.timedelta(days=1)
2771 domain_dt_end = groupby_datetime + dateutil.relativedelta.relativedelta(years=1)
2772 d['__domain'] = [(groupby, '>=', domain_dt_begin.strftime('%Y-%m-%d')), (groupby, '<', domain_dt_end.strftime('%Y-%m-%d'))] + domain
2773 del alldata[d['id']][groupby]
2774 d.update(alldata[d['id']])
2777 if groupby and groupby in self._group_by_full:
2778 result = self._read_group_fill_results(cr, uid, domain, groupby, groupby_list,
2779 aggregated_fields, result, read_group_order=order,
2784 def _inherits_join_add(self, current_model, parent_model_name, query):
2786 Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
2787 :param current_model: current model object
2788 :param parent_model_name: name of the parent model for which the clauses should be added
2789 :param query: query object on which the JOIN should be added
2791 inherits_field = current_model._inherits[parent_model_name]
2792 parent_model = self.pool[parent_model_name]
2793 parent_alias, parent_alias_statement = query.add_join((current_model._table, parent_model._table, inherits_field, 'id', inherits_field), implicit=True)
2796 def _inherits_join_calc(self, field, query):
2798 Adds missing table select and join clause(s) to ``query`` for reaching
2799 the field coming from an '_inherits' parent table (no duplicates).
2801 :param field: name of inherited field to reach
2802 :param query: query object on which the JOIN should be added
2803 :return: qualified name of field, to be used in SELECT clause
2805 current_table = self
2806 parent_alias = '"%s"' % current_table._table
2807 while field in current_table._inherit_fields and not field in current_table._columns:
2808 parent_model_name = current_table._inherit_fields[field][0]
2809 parent_table = self.pool[parent_model_name]
2810 parent_alias = self._inherits_join_add(current_table, parent_model_name, query)
2811 current_table = parent_table
2812 return '%s."%s"' % (parent_alias, field)
2814 def _parent_store_compute(self, cr):
2815 if not self._parent_store:
2817 _logger.info('Computing parent left and right for table %s...', self._table)
2818 def browse_rec(root, pos=0):
2820 where = self._parent_name+'='+str(root)
2822 where = self._parent_name+' IS NULL'
2823 if self._parent_order:
2824 where += ' order by '+self._parent_order
2825 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
2827 for id in cr.fetchall():
2828 pos2 = browse_rec(id[0], pos2)
2829 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
2831 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
2832 if self._parent_order:
2833 query += ' order by ' + self._parent_order
2836 for (root,) in cr.fetchall():
2837 pos = browse_rec(root, pos)
2840 def _update_store(self, cr, f, k):
2841 _logger.info("storing computed values of fields.function '%s'", k)
2842 ss = self._columns[k]._symbol_set
2843 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
2844 cr.execute('select id from '+self._table)
2845 ids_lst = map(lambda x: x[0], cr.fetchall())
2847 iids = ids_lst[:AUTOINIT_RECALCULATE_STORED_FIELDS]
2848 ids_lst = ids_lst[AUTOINIT_RECALCULATE_STORED_FIELDS:]
2849 res = f.get(cr, self, iids, k, SUPERUSER_ID, {})
2850 for key, val in res.items():
2853 # if val is a many2one, just write the ID
2854 if type(val) == tuple:
2856 if val is not False:
2857 cr.execute(update_query, (ss[1](val), key))
2859 def _check_selection_field_value(self, cr, uid, field, value, context=None):
2860 """Raise except_orm if value is not among the valid values for the selection field"""
2861 if self._columns[field]._type == 'reference':
2862 val_model, val_id_str = value.split(',', 1)
2865 val_id = long(val_id_str)
2869 raise except_orm(_('ValidateError'),
2870 _('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
2874 if isinstance(self._columns[field].selection, (tuple, list)):
2875 if val in dict(self._columns[field].selection):
2877 elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
2879 raise except_orm(_('ValidateError'),
2880 _('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._table, field))
2882 def _check_removed_columns(self, cr, log=False):
2883 # iterate on the database columns to drop the NOT NULL constraints
2884 # of fields which were required but have been removed (or will be added by another module)
2885 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
2886 columns += MAGIC_COLUMNS
2887 cr.execute("SELECT a.attname, a.attnotnull"
2888 " FROM pg_class c, pg_attribute a"
2889 " WHERE c.relname=%s"
2890 " AND c.oid=a.attrelid"
2891 " AND a.attisdropped=%s"
2892 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
2893 " AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
2895 for column in cr.dictfetchall():
2897 _logger.debug("column %s is in the table %s but not in the corresponding object %s",
2898 column['attname'], self._table, self._name)
2899 if column['attnotnull']:
2900 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
2901 _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2902 self._table, column['attname'])
2904 def _save_constraint(self, cr, constraint_name, type):
2906 Record the creation of a constraint for this model, to make it possible
2907 to delete it later when the module is uninstalled. Type can be either
2908 'f' or 'u' depending on the constraint being a foreign key or not.
2910 if not self._module:
2911 # no need to save constraints for custom models as they're not part
2914 assert type in ('f', 'u')
2916 SELECT 1 FROM ir_model_constraint, ir_module_module
2917 WHERE ir_model_constraint.module=ir_module_module.id
2918 AND ir_model_constraint.name=%s
2919 AND ir_module_module.name=%s
2920 """, (constraint_name, self._module))
2923 INSERT INTO ir_model_constraint
2924 (name, date_init, date_update, module, model, type)
2925 VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
2926 (SELECT id FROM ir_module_module WHERE name=%s),
2927 (SELECT id FROM ir_model WHERE model=%s), %s)""",
2928 (constraint_name, self._module, self._name, type))
2930 def _save_relation_table(self, cr, relation_table):
2932 Record the creation of a many2many for this model, to make it possible
2933 to delete it later when the module is uninstalled.
2936 SELECT 1 FROM ir_model_relation, ir_module_module
2937 WHERE ir_model_relation.module=ir_module_module.id
2938 AND ir_model_relation.name=%s
2939 AND ir_module_module.name=%s
2940 """, (relation_table, self._module))
2942 cr.execute("""INSERT INTO ir_model_relation (name, date_init, date_update, module, model)
2943 VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
2944 (SELECT id FROM ir_module_module WHERE name=%s),
2945 (SELECT id FROM ir_model WHERE model=%s))""",
2946 (relation_table, self._module, self._name))
2948 # checked version: for direct m2o starting from `self`
2949 def _m2o_add_foreign_key_checked(self, source_field, dest_model, ondelete):
2950 assert self.is_transient() or not dest_model.is_transient(), \
2951 'Many2One relationships from non-transient Model to TransientModel are forbidden'
2952 if self.is_transient() and not dest_model.is_transient():
2953 # TransientModel relationships to regular Models are annoying
2954 # usually because they could block deletion due to the FKs.
2955 # So unless stated otherwise we default them to ondelete=cascade.
2956 ondelete = ondelete or 'cascade'
2957 fk_def = (self._table, source_field, dest_model._table, ondelete or 'set null')
2958 self._foreign_keys.add(fk_def)
2959 _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
2961 # unchecked version: for custom cases, such as m2m relationships
2962 def _m2o_add_foreign_key_unchecked(self, source_table, source_field, dest_model, ondelete):
2963 fk_def = (source_table, source_field, dest_model._table, ondelete or 'set null')
2964 self._foreign_keys.add(fk_def)
2965 _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
2967 def _drop_constraint(self, cr, source_table, constraint_name):
2968 cr.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (source_table,constraint_name))
2970 def _m2o_fix_foreign_key(self, cr, source_table, source_field, dest_model, ondelete):
2971 # Find FK constraint(s) currently established for the m2o field,
2972 # and see whether they are stale or not
2973 cr.execute("""SELECT confdeltype as ondelete_rule, conname as constraint_name,
2974 cl2.relname as foreign_table
2975 FROM pg_constraint as con, pg_class as cl1, pg_class as cl2,
2976 pg_attribute as att1, pg_attribute as att2
2977 WHERE con.conrelid = cl1.oid
2978 AND cl1.relname = %s
2979 AND con.confrelid = cl2.oid
2980 AND array_lower(con.conkey, 1) = 1
2981 AND con.conkey[1] = att1.attnum
2982 AND att1.attrelid = cl1.oid
2983 AND att1.attname = %s
2984 AND array_lower(con.confkey, 1) = 1
2985 AND con.confkey[1] = att2.attnum
2986 AND att2.attrelid = cl2.oid
2987 AND att2.attname = %s
2988 AND con.contype = 'f'""", (source_table, source_field, 'id'))
2989 constraints = cr.dictfetchall()
2991 if len(constraints) == 1:
2992 # Is it the right constraint?
2994 if cons['ondelete_rule'] != POSTGRES_CONFDELTYPES.get((ondelete or 'set null').upper(), 'a')\
2995 or cons['foreign_table'] != dest_model._table:
2996 # Wrong FK: drop it and recreate
2997 _schema.debug("Table '%s': dropping obsolete FK constraint: '%s'",
2998 source_table, cons['constraint_name'])
2999 self._drop_constraint(cr, source_table, cons['constraint_name'])
3001 # it's all good, nothing to do!
3004 # Multiple FKs found for the same field, drop them all, and re-create
3005 for cons in constraints:
3006 _schema.debug("Table '%s': dropping duplicate FK constraints: '%s'",
3007 source_table, cons['constraint_name'])
3008 self._drop_constraint(cr, source_table, cons['constraint_name'])
3010 # (re-)create the FK
3011 self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete)
3015 def _auto_init(self, cr, context=None):
3018 Call _field_create and, unless _auto is False:
3020 - create the corresponding table in database for the model,
3021 - possibly add the parent columns in database,
3022 - possibly add the columns 'create_uid', 'create_date', 'write_uid',
3023 'write_date' in database if _log_access is True (the default),
3024 - report on database columns no more existing in _columns,
3025 - remove no more existing not null constraints,
3026 - alter existing database columns to match _columns,
3027 - create database tables to match _columns,
3028 - add database indices to match _columns,
3029 - save in self._foreign_keys a list a foreign keys to create (see
3033 self._foreign_keys = set()
3034 raise_on_invalid_object_name(self._name)
3037 store_compute = False
3039 update_custom_fields = context.get('update_custom_fields', False)
3040 self._field_create(cr, context=context)
3041 create = not self._table_exist(cr)
3045 self._create_table(cr)
3048 if self._parent_store:
3049 if not self._parent_columns_exist(cr):
3050 self._create_parent_columns(cr)
3051 store_compute = True
3053 # Create the create_uid, create_date, write_uid, write_date, columns if desired.
3054 if self._log_access:
3055 self._add_log_columns(cr)
3057 self._check_removed_columns(cr, log=False)
3059 # iterate on the "object columns"
3060 column_data = self._select_column_data(cr)
3062 for k, f in self._columns.iteritems():
3063 if k in MAGIC_COLUMNS:
3065 # Don't update custom (also called manual) fields
3066 if f.manual and not update_custom_fields:
3069 if isinstance(f, fields.one2many):
3070 self._o2m_raise_on_missing_reference(cr, f)
3072 elif isinstance(f, fields.many2many):
3073 self._m2m_raise_or_create_relation(cr, f)
3076 res = column_data.get(k)
3078 # The field is not found as-is in database, try if it
3079 # exists with an old name.
3080 if not res and hasattr(f, 'oldname'):
3081 res = column_data.get(f.oldname)
3083 cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
3085 column_data[k] = res
3086 _schema.debug("Table '%s': renamed column '%s' to '%s'",
3087 self._table, f.oldname, k)
3089 # The field already exists in database. Possibly
3090 # change its type, rename it, drop it or change its
3093 f_pg_type = res['typname']
3094 f_pg_size = res['size']
3095 f_pg_notnull = res['attnotnull']
3096 if isinstance(f, fields.function) and not f.store and\
3097 not getattr(f, 'nodrop', False):
3098 _logger.info('column %s (%s) in table %s removed: converted to a function !\n',
3099 k, f.string, self._table)
3100 cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
3102 _schema.debug("Table '%s': dropped column '%s' with cascade",
3106 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
3111 ('text', 'char', pg_varchar(f.size), '::%s' % pg_varchar(f.size)),
3112 ('varchar', 'text', 'TEXT', ''),
3113 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3114 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
3115 ('timestamp', 'date', 'date', '::date'),
3116 ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3117 ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3119 if f_pg_type == 'varchar' and f._type == 'char' and ((f.size is None and f_pg_size) or f_pg_size < f.size):
3120 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
3121 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, pg_varchar(f.size)))
3122 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::%s' % (self._table, k, pg_varchar(f.size)))
3123 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
3125 _schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
3126 self._table, k, f_pg_size or 'unlimited', f.size or 'unlimited')
3128 if (f_pg_type==c[0]) and (f._type==c[1]):
3129 if f_pg_type != f_obj_type:
3131 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
3132 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
3133 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
3134 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
3136 _schema.debug("Table '%s': column '%s' changed type from %s to %s",
3137 self._table, k, c[0], c[1])
3140 if f_pg_type != f_obj_type:
3144 newname = k + '_moved' + str(i)
3145 cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
3146 "WHERE c.relname=%s " \
3147 "AND a.attname=%s " \
3148 "AND c.oid=a.attrelid ", (self._table, newname))
3149 if not cr.fetchone()[0]:
3153 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
3154 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
3155 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
3156 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
3157 _schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
3158 self._table, k, f_pg_type, f._type, newname)
3160 # if the field is required and hasn't got a NOT NULL constraint
3161 if f.required and f_pg_notnull == 0:
3162 # set the field to the default value if any
3163 if k in self._defaults:
3164 if callable(self._defaults[k]):
3165 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
3167 default = self._defaults[k]
3169 if default is not None:
3170 ss = self._columns[k]._symbol_set
3171 query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
3172 cr.execute(query, (ss[1](default),))
3173 # add the NOT NULL constraint
3176 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
3178 _schema.debug("Table '%s': column '%s': added NOT NULL constraint",
3181 msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
3182 "If you want to have it, you should update the records and execute manually:\n"\
3183 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
3184 _schema.warning(msg, self._table, k, self._table, k)
3186 elif not f.required and f_pg_notnull == 1:
3187 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
3189 _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
3192 indexname = '%s_%s_index' % (self._table, k)
3193 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
3194 res2 = cr.dictfetchall()
3195 if not res2 and f.select:
3196 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
3198 if f._type == 'text':
3199 # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
3200 msg = "Table '%s': Adding (b-tree) index for %s column '%s'."\
3201 "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
3202 " because there is a length limit for indexable btree values!\n"\
3203 "Use a search view instead if you simply want to make the field searchable."
3204 _schema.warning(msg, self._table, f._type, k)
3205 if res2 and not f.select:
3206 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
3208 msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
3209 _schema.debug(msg, self._table, k, f._type)
3211 if isinstance(f, fields.many2one):
3212 dest_model = self.pool[f._obj]
3213 if dest_model._table != 'ir_actions':
3214 self._m2o_fix_foreign_key(cr, self._table, k, dest_model, f.ondelete)
3216 # The field doesn't exist in database. Create it if necessary.
3218 if not isinstance(f, fields.function) or f.store:
3219 # add the missing field
3220 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
3221 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
3222 _schema.debug("Table '%s': added column '%s' with definition=%s",
3223 self._table, k, get_pg_type(f)[1])
3226 if not create and k in self._defaults:
3227 if callable(self._defaults[k]):
3228 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
3230 default = self._defaults[k]
3232 ss = self._columns[k]._symbol_set
3233 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
3234 cr.execute(query, (ss[1](default),))
3236 _logger.debug("Table '%s': setting default value of new column %s", self._table, k)
3238 # remember the functions to call for the stored fields
3239 if isinstance(f, fields.function):
3241 if f.store is not True: # i.e. if f.store is a dict
3242 order = f.store[f.store.keys()[0]][2]
3243 todo_end.append((order, self._update_store, (f, k)))
3245 # and add constraints if needed
3246 if isinstance(f, fields.many2one):
3247 if f._obj not in self.pool:
3248 raise except_orm('Programming Error', 'There is no reference available for %s' % (f._obj,))
3249 dest_model = self.pool[f._obj]
3250 ref = dest_model._table
3251 # ir_actions is inherited so foreign key doesn't work on it
3252 if ref != 'ir_actions':
3253 self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete)
3255 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
3259 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
3260 _schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
3263 msg = "WARNING: unable to set column %s of table %s not null !\n"\
3264 "Try to re-run: openerp-server --update=module\n"\
3265 "If it doesn't work, update records and execute manually:\n"\
3266 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
3267 _logger.warning(msg, k, self._table, self._table, k)
3271 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
3272 create = not bool(cr.fetchone())
3274 cr.commit() # start a new transaction
3277 self._add_sql_constraints(cr)
3280 self._execute_sql(cr)
3283 self._parent_store_compute(cr)
3288 def _auto_end(self, cr, context=None):
3289 """ Create the foreign keys recorded by _auto_init. """
3290 for t, k, r, d in self._foreign_keys:
3291 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
3292 self._save_constraint(cr, "%s_%s_fkey" % (t, k), 'f')
3294 del self._foreign_keys
3297 def _table_exist(self, cr):
3298 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
3302 def _create_table(self, cr):
3303 cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id))' % (self._table,))
3304 cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,))
3305 _schema.debug("Table '%s': created", self._table)
3308 def _parent_columns_exist(self, cr):
3309 cr.execute("""SELECT c.relname
3310 FROM pg_class c, pg_attribute a
3311 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
3312 """, (self._table, 'parent_left'))
3316 def _create_parent_columns(self, cr):
3317 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
3318 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
3319 if 'parent_left' not in self._columns:
3320 _logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
3322 _schema.debug("Table '%s': added column '%s' with definition=%s",
3323 self._table, 'parent_left', 'INTEGER')
3324 elif not self._columns['parent_left'].select:
3325 _logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
3327 if 'parent_right' not in self._columns:
3328 _logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
3330 _schema.debug("Table '%s': added column '%s' with definition=%s",
3331 self._table, 'parent_right', 'INTEGER')
3332 elif not self._columns['parent_right'].select:
3333 _logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
3335 if self._columns[self._parent_name].ondelete not in ('cascade', 'restrict'):
3336 _logger.error("The column %s on object %s must be set as ondelete='cascade' or 'restrict'",
3337 self._parent_name, self._name)
3342 def _add_log_columns(self, cr):
3343 for field, field_def in LOG_ACCESS_COLUMNS.iteritems():
3346 FROM pg_class c, pg_attribute a
3347 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
3348 """, (self._table, field))
3350 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, field, field_def))
3352 _schema.debug("Table '%s': added column '%s' with definition=%s",
3353 self._table, field, field_def)
3356 def _select_column_data(self, cr):
3357 # attlen is the number of bytes necessary to represent the type when
3358 # the type has a fixed size. If the type has a varying size attlen is
3359 # -1 and atttypmod is the size limit + 4, or -1 if there is no limit.
3360 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN (CASE WHEN a.atttypmod=-1 THEN 0 ELSE a.atttypmod-4 END) ELSE a.attlen END as size " \
3361 "FROM pg_class c,pg_attribute a,pg_type t " \
3362 "WHERE c.relname=%s " \
3363 "AND c.oid=a.attrelid " \
3364 "AND a.atttypid=t.oid", (self._table,))
3365 return dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
3368 def _o2m_raise_on_missing_reference(self, cr, f):
3369 # TODO this check should be a method on fields.one2many.
3370 if f._obj in self.pool:
3371 other = self.pool[f._obj]
3372 # TODO the condition could use fields_get_keys().
3373 if f._fields_id not in other._columns.keys():
3374 if f._fields_id not in other._inherit_fields.keys():
3375 raise except_orm('Programming Error', "There is no reference field '%s' found for '%s'" % (f._fields_id, f._obj,))
3377 def _m2m_raise_or_create_relation(self, cr, f):
3378 m2m_tbl, col1, col2 = f._sql_names(self)
3379 self._save_relation_table(cr, m2m_tbl)
3380 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (m2m_tbl,))
3381 if not cr.dictfetchall():
3382 if f._obj not in self.pool:
3383 raise except_orm('Programming Error', 'Many2Many destination model does not exist: `%s`' % (f._obj,))
3384 dest_model = self.pool[f._obj]
3385 ref = dest_model._table
3386 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s"))' % (m2m_tbl, col1, col2, col1, col2))
3387 # create foreign key references with ondelete=cascade, unless the targets are SQL views
3388 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (ref,))
3389 if not cr.fetchall():
3390 self._m2o_add_foreign_key_unchecked(m2m_tbl, col2, dest_model, 'cascade')
3391 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (self._table,))
3392 if not cr.fetchall():
3393 self._m2o_add_foreign_key_unchecked(m2m_tbl, col1, self, 'cascade')
3395 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col1, m2m_tbl, col1))
3396 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col2, m2m_tbl, col2))
3397 cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (m2m_tbl, self._table, ref))
3399 _schema.debug("Create table '%s': m2m relation between '%s' and '%s'", m2m_tbl, self._table, ref)
3402 def _add_sql_constraints(self, cr):
3405 Modify this model's database table constraints so they match the one in
3409 def unify_cons_text(txt):
3410 return txt.lower().replace(', ',',').replace(' (','(')
3412 for (key, con, _) in self._sql_constraints:
3413 conname = '%s_%s' % (self._table, key)
3415 self._save_constraint(cr, conname, 'u')
3416 cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
3417 existing_constraints = cr.dictfetchall()
3421 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
3422 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
3423 self._table, conname, con),
3424 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
3429 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
3430 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
3431 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
3437 if not existing_constraints:
3438 # constraint does not exists:
3439 sql_actions['add']['execute'] = True
3440 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3441 elif unify_cons_text(con) not in [unify_cons_text(item['condef']) for item in existing_constraints]:
3442 # constraint exists but its definition has changed:
3443 sql_actions['drop']['execute'] = True
3444 sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
3445 sql_actions['add']['execute'] = True
3446 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3448 # we need to add the constraint:
3449 sql_actions = [item for item in sql_actions.values()]
3450 sql_actions.sort(key=lambda x: x['order'])
3451 for sql_action in [action for action in sql_actions if action['execute']]:
3453 cr.execute(sql_action['query'])
3455 _schema.debug(sql_action['msg_ok'])
3457 _schema.warning(sql_action['msg_err'])
3461 def _execute_sql(self, cr):
3462 """ Execute the SQL code from the _sql attribute (if any)."""
3463 if hasattr(self, "_sql"):
3464 for line in self._sql.split(';'):
3465 line2 = line.replace('\n', '').strip()
3471 # Update objects that uses this one to update their _inherits fields
3474 def _inherits_reload_src(self):
3475 """ Recompute the _inherit_fields mapping on each _inherits'd child model."""
3476 for obj in self.pool.models.values():
3477 if self._name in obj._inherits:
3478 obj._inherits_reload()
3481 def _inherits_reload(self):
3482 """ Recompute the _inherit_fields mapping.
3484 This will also call itself on each inherits'd child model.
3488 for table in self._inherits:
3489 other = self.pool[table]
3490 for col in other._columns.keys():
3491 res[col] = (table, self._inherits[table], other._columns[col], table)
3492 for col in other._inherit_fields.keys():
3493 res[col] = (table, self._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
3494 self._inherit_fields = res
3495 self._all_columns = self._get_column_infos()
3496 self._inherits_reload_src()
3499 def _get_column_infos(self):
3500 """Returns a dict mapping all fields names (direct fields and
3501 inherited field via _inherits) to a ``column_info`` struct
3502 giving detailed columns """
3504 for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems():
3505 result[k] = fields.column_info(k, col, parent, m2o, original_parent)
3506 for k, col in self._columns.iteritems():
3507 result[k] = fields.column_info(k, col)
3511 def _inherits_check(self):
3512 for table, field_name in self._inherits.items():
3513 if field_name not in self._columns:
3514 _logger.info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.', field_name, self._name)
3515 self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
3516 required=True, ondelete="cascade")
3517 elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() not in ("cascade", "restrict"):
3518 _logger.warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade" or "restrict", forcing it to required + cascade.', field_name, self._name)
3519 self._columns[field_name].required = True
3520 self._columns[field_name].ondelete = "cascade"
3522 #def __getattr__(self, name):
3524 # Proxies attribute accesses to the `inherits` parent so we can call methods defined on the inherited parent
3525 # (though inherits doesn't use Python inheritance).
3526 # Handles translating between local ids and remote ids.
3527 # Known issue: doesn't work correctly when using python's own super(), don't involve inherit-based inheritance
3528 # when you have inherits.
3530 # for model, field in self._inherits.iteritems():
3531 # proxy = self.pool.get(model)
3532 # if hasattr(proxy, name):
3533 # attribute = getattr(proxy, name)
3534 # if not hasattr(attribute, '__call__'):
3538 # return super(orm, self).__getattr__(name)
3540 # def _proxy(cr, uid, ids, *args, **kwargs):
3541 # objects = self.browse(cr, uid, ids, kwargs.get('context', None))
3542 # lst = [obj[field].id for obj in objects if obj[field]]
3543 # return getattr(proxy, name)(cr, uid, lst, *args, **kwargs)
3548 def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
3549 """ Return the definition of each field.
3551 The returned value is a dictionary (indiced by field name) of
3552 dictionaries. The _inherits'd fields are included. The string, help,
3553 and selection (if present) attributes are translated.
3555 :param cr: database cursor
3556 :param user: current user id
3557 :param allfields: list of fields
3558 :param context: context arguments, like lang, time zone
3559 :return: dictionary of field dictionaries, each one describing a field of the business object
3560 :raise AccessError: * if user has no create/write rights on the requested object
3566 write_access = self.check_access_rights(cr, user, 'write', raise_exception=False) \
3567 or self.check_access_rights(cr, user, 'create', raise_exception=False)
3571 translation_obj = self.pool.get('ir.translation')
3572 for parent in self._inherits:
3573 res.update(self.pool[parent].fields_get(cr, user, allfields, context))
3575 for f, field in self._columns.iteritems():
3576 if (allfields and f not in allfields) or \
3577 (field.groups and not self.user_has_groups(cr, user, groups=field.groups, context=context)):
3580 res[f] = fields.field_to_dict(self, cr, user, field, context=context)
3582 if not write_access:
3583 res[f]['readonly'] = True
3584 res[f]['states'] = {}
3586 if 'lang' in context:
3587 if 'string' in res[f]:
3588 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context['lang'])
3590 res[f]['string'] = res_trans
3591 if 'help' in res[f]:
3592 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context['lang'])
3594 res[f]['help'] = help_trans
3595 if 'selection' in res[f]:
3596 if isinstance(field.selection, (tuple, list)):
3597 sel = field.selection
3599 for key, val in sel:
3602 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context['lang'], val)
3603 sel2.append((key, val2 or val))
3604 res[f]['selection'] = sel2
3608 def get_empty_list_help(self, cr, user, help, context=None):
3609 """ Generic method giving the help message displayed when having
3610 no result to display in a list or kanban view. By default it returns
3611 the help given in parameter that is generally the help message
3612 defined in the action.
3616 def check_field_access_rights(self, cr, user, operation, fields, context=None):
3618 Check the user access rights on the given fields. This raises Access
3619 Denied if the user does not have the rights. Otherwise it returns the
3620 fields (as is if the fields is not falsy, or the readable/writable
3621 fields if fields is falsy).
3624 """Predicate to test if the user has access to the given field name."""
3625 # Ignore requested field if it doesn't exist. This is ugly but
3626 # it seems to happen at least with 'name_alias' on res.partner.
3627 if field_name not in self._all_columns:
3629 field = self._all_columns[field_name].column
3630 if user != SUPERUSER_ID and field.groups:
3631 return self.user_has_groups(cr, user, groups=field.groups, context=context)
3635 fields = filter(p, self._all_columns.keys())
3637 filtered_fields = filter(lambda a: not p(a), fields)
3639 _logger.warning('Access Denied by ACLs for operation: %s, uid: %s, model: %s, fields: %s', operation, user, self._name, ', '.join(filtered_fields))
3642 _('The requested operation cannot be completed due to security restrictions. '
3643 'Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
3644 (self._description, operation))
3647 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
3648 """ Read records with given ids with the given fields
3650 :param cr: database cursor
3651 :param user: current user id
3652 :param ids: id or list of the ids of the records to read
3653 :param fields: optional list of field names to return (default: all fields would be returned)
3654 :type fields: list (example ['field_name_1', ...])
3655 :param context: optional context dictionary - it may contains keys for specifying certain options
3656 like ``context_lang``, ``context_tz`` to alter the results of the call.
3657 A special ``bin_size`` boolean flag may also be passed in the context to request the
3658 value of all fields.binary columns to be returned as the size of the binary instead of its
3659 contents. This can also be selectively overriden by passing a field-specific flag
3660 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
3661 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
3662 :return: list of dictionaries((dictionary per record asked)) with requested field values
3663 :rtype: [{‘name_of_the_field’: value, ...}, ...]
3664 :raise AccessError: * if user has no read rights on the requested object
3665 * if user tries to bypass access rules for read on the requested object
3669 self.check_access_rights(cr, user, 'read')
3670 fields = self.check_field_access_rights(cr, user, 'read', fields)
3671 if isinstance(ids, (int, long)):
3675 select = map(lambda x: isinstance(x, dict) and x['id'] or x, select)
3676 result = self._read_flat(cr, user, select, fields, context, load)
3678 if isinstance(ids, (int, long)):
3679 return result and result[0] or False
3682 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
3687 if fields_to_read is None:
3688 fields_to_read = self._columns.keys()
3690 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
3691 fields_pre = [f for f in fields_to_read if
3692 f == self.CONCURRENCY_CHECK_FIELD
3693 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
3694 ] + self._inherits.values()
3698 def convert_field(f):
3699 f_qual = '%s."%s"' % (self._table, f) # need fully-qualified references in case len(tables) > 1
3700 if f in ('create_date', 'write_date'):
3701 return "date_trunc('second', %s) as %s" % (f_qual, f)
3702 if f == self.CONCURRENCY_CHECK_FIELD:
3703 if self._log_access:
3704 return "COALESCE(%s.write_date, %s.create_date, (now() at time zone 'UTC'))::timestamp AS %s" % (self._table, self._table, f,)
3705 return "(now() at time zone 'UTC')::timestamp AS %s" % (f,)
3706 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
3707 return 'length(%s) as "%s"' % (f_qual, f)
3710 # Construct a clause for the security rules.
3711 # 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses,
3712 # or will at least contain self._table.
3713 rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
3715 fields_pre2 = map(convert_field, fields_pre)
3716 order_by = self._parent_order or self._order
3717 select_fields = ','.join(fields_pre2 + ['%s.id' % self._table])
3718 query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table)
3720 query += " AND " + (' OR '.join(rule_clause))
3721 query += " ORDER BY " + order_by
3722 for sub_ids in cr.split_for_in_conditions(ids):
3723 cr.execute(query, [tuple(sub_ids)] + rule_params)
3724 results = cr.dictfetchall()
3725 result_ids = [x['id'] for x in results]
3726 self._check_record_rules_result_count(cr, user, sub_ids, result_ids, 'read', context=context)
3729 self.check_access_rule(cr, user, ids, 'read', context=context)
3730 res = map(lambda x: {'id': x}, ids)
3732 if context.get('lang'):
3733 for f in fields_pre:
3734 if f == self.CONCURRENCY_CHECK_FIELD:
3736 if self._columns[f].translate:
3737 ids = [x['id'] for x in res]
3738 #TODO: optimize out of this loop
3739 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context['lang'], ids)
3741 r[f] = res_trans.get(r['id'], False) or r[f]
3743 for table in self._inherits:
3744 col = self._inherits[table]
3745 cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()]
3748 res2 = self.pool[table].read(cr, user, [x[col] for x in res], cols, context, load)
3756 if not record[col]: # if the record is deleted from _inherits table?
3758 record.update(res3[record[col]])
3759 if col not in fields_to_read:
3762 # all fields which need to be post-processed by a simple function (symbol_get)
3763 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
3766 for f in fields_post:
3767 r[f] = self._columns[f]._symbol_get(r[f])
3768 ids = [x['id'] for x in res]
3770 # all non inherited fields for which the attribute whose name is in load is False
3771 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
3773 # Compute POST fields
3775 for f in fields_post:
3776 todo.setdefault(self._columns[f]._multi, [])
3777 todo[self._columns[f]._multi].append(f)
3778 for key, val in todo.items():
3780 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
3781 assert res2 is not None, \
3782 'The function field "%s" on the "%s" model returned None\n' \
3783 '(a dictionary was expected).' % (val[0], self._name)
3786 if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
3787 multi_fields = res2.get(record['id'],{})
3789 record[pos] = multi_fields.get(pos,[])
3792 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
3795 record[f] = res2[record['id']]
3799 # Warn about deprecated fields now that fields_pre and fields_post are computed
3800 # Explicitly use list() because we may receive tuples
3801 for f in list(fields_pre) + list(fields_post):
3802 field_column = self._all_columns.get(f) and self._all_columns.get(f).column
3803 if field_column and field_column.deprecated:
3804 _logger.warning('Field %s.%s is deprecated: %s', self._name, f, field_column.deprecated)
3808 for field in vals.copy():
3810 if field in self._columns:
3811 fobj = self._columns[field]
3817 for group in groups:
3818 module = group.split(".")[0]
3819 grp = group.split(".")[1]
3820 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3821 (grp, module, 'res.groups', user))
3822 readonly = cr.fetchall()
3823 if readonly[0][0] >= 1:
3826 elif readonly[0][0] == 0:
3832 if type(vals[field]) == type([]):
3834 elif type(vals[field]) == type(0.0):
3836 elif type(vals[field]) == type(''):
3837 vals[field] = '=No Permission='
3841 if vals[field] is None:
3846 # TODO check READ access
3847 def perm_read(self, cr, user, ids, context=None, details=True):
3849 Returns some metadata about the given records.
3851 :param details: if True, \*_uid fields are replaced with the name of the user
3852 :return: list of ownership dictionaries for each requested record
3853 :rtype: list of dictionaries with the following keys:
3856 * create_uid: user who created the record
3857 * create_date: date when the record was created
3858 * write_uid: last user who changed the record
3859 * write_date: date of the last change to the record
3860 * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
3867 uniq = isinstance(ids, (int, long))
3871 if self._log_access:
3872 fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
3873 quoted_table = '"%s"' % self._table
3874 fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields)
3875 query = '''SELECT %s, __imd.module, __imd.name
3876 FROM %s LEFT JOIN ir_model_data __imd
3877 ON (__imd.model = %%s and __imd.res_id = %s.id)
3878 WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
3879 cr.execute(query, (self._name, tuple(ids)))
3880 res = cr.dictfetchall()
3883 r[key] = r[key] or False
3884 if details and key in ('write_uid', 'create_uid') and r[key]:
3886 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
3888 pass # Leave the numeric uid there
3889 r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
3890 del r['name'], r['module']
3895 def _check_concurrency(self, cr, ids, context):
3898 if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
3900 check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp)"
3901 for sub_ids in cr.split_for_in_conditions(ids):
3904 id_ref = "%s,%s" % (self._name, id)
3905 update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
3907 ids_to_check.extend([id, update_date])
3908 if not ids_to_check:
3910 cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
3913 # mention the first one only to keep the error message readable
3914 raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
3916 def _check_record_rules_result_count(self, cr, uid, ids, result_ids, operation, context=None):
3917 """Verify the returned rows after applying record rules matches
3918 the length of `ids`, and raise an appropriate exception if it does not.
3920 ids, result_ids = set(ids), set(result_ids)
3921 missing_ids = ids - result_ids
3923 # Attempt to distinguish record rule restriction vs deleted records,
3924 # to provide a more specific error message - check if the missinf
3925 cr.execute('SELECT id FROM ' + self._table + ' WHERE id IN %s', (tuple(missing_ids),))
3926 forbidden_ids = [x[0] for x in cr.fetchall()]
3928 # the missing ids are (at least partially) hidden by access rules
3929 if uid == SUPERUSER_ID:
3931 _logger.warning('Access Denied by record rules for operation: %s on record ids: %r, uid: %s, model: %s', operation, forbidden_ids, uid, self._name)
3932 raise except_orm(_('Access Denied'),
3933 _('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
3934 (self._description, operation))
3936 # If we get here, the missing_ids are not in the database
3937 if operation in ('read','unlink'):
3938 # No need to warn about deleting an already deleted record.
3939 # And no error when reading a record that was deleted, to prevent spurious
3940 # errors for non-transactional search/read sequences coming from clients
3942 _logger.warning('Failed operation on deleted record(s): %s, uid: %s, model: %s', operation, uid, self._name)
3943 raise except_orm(_('Missing document(s)'),
3944 _('One of the documents you are trying to access has been deleted, please try again after refreshing.'))
3947 def check_access_rights(self, cr, uid, operation, raise_exception=True): # no context on purpose.
3948 """Verifies that the operation given by ``operation`` is allowed for the user
3949 according to the access rights."""
3950 return self.pool.get('ir.model.access').check(cr, uid, self._name, operation, raise_exception)
3952 def check_access_rule(self, cr, uid, ids, operation, context=None):
3953 """Verifies that the operation given by ``operation`` is allowed for the user
3954 according to ir.rules.
3956 :param operation: one of ``write``, ``unlink``
3957 :raise except_orm: * if current ir.rules do not permit this operation.
3958 :return: None if the operation is allowed
3960 if uid == SUPERUSER_ID:
3963 if self.is_transient():
3964 # Only one single implicit access rule for transient models: owner only!
3965 # This is ok to hardcode because we assert that TransientModels always
3966 # have log_access enabled so that the create_uid column is always there.
3967 # And even with _inherits, these fields are always present in the local
3968 # table too, so no need for JOINs.
3969 cr.execute("""SELECT distinct create_uid
3971 WHERE id IN %%s""" % self._table, (tuple(ids),))
3972 uids = [x[0] for x in cr.fetchall()]
3973 if len(uids) != 1 or uids[0] != uid:
3974 raise except_orm(_('Access Denied'),
3975 _('For this kind of document, you may only access records you created yourself.\n\n(Document type: %s)') % (self._description,))
3977 where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
3979 where_clause = ' and ' + ' and '.join(where_clause)
3980 for sub_ids in cr.split_for_in_conditions(ids):
3981 cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
3982 ' WHERE ' + self._table + '.id IN %s' + where_clause,
3983 [sub_ids] + where_params)
3984 returned_ids = [x['id'] for x in cr.dictfetchall()]
3985 self._check_record_rules_result_count(cr, uid, sub_ids, returned_ids, operation, context=context)
3987 def create_workflow(self, cr, uid, ids, context=None):
3988 """Create a workflow instance for each given record IDs."""
3989 from openerp import workflow
3991 workflow.trg_create(uid, self._name, res_id, cr)
3994 def delete_workflow(self, cr, uid, ids, context=None):
3995 """Delete the workflow instances bound to the given record IDs."""
3996 from openerp import workflow
3998 workflow.trg_delete(uid, self._name, res_id, cr)
4001 def step_workflow(self, cr, uid, ids, context=None):
4002 """Reevaluate the workflow instances of the given record IDs."""
4003 from openerp import workflow
4005 workflow.trg_write(uid, self._name, res_id, cr)
4008 def signal_workflow(self, cr, uid, ids, signal, context=None):
4009 """Send given workflow signal and return a dict mapping ids to workflow results"""
4010 from openerp import workflow
4013 result[res_id] = workflow.trg_validate(uid, self._name, res_id, signal, cr)
4016 def redirect_workflow(self, cr, uid, old_new_ids, context=None):
4017 """ Rebind the workflow instance bound to the given 'old' record IDs to
4018 the given 'new' IDs. (``old_new_ids`` is a list of pairs ``(old, new)``.
4020 from openerp import workflow
4021 for old_id, new_id in old_new_ids:
4022 workflow.trg_redirect(uid, self._name, old_id, new_id, cr)
4025 def unlink(self, cr, uid, ids, context=None):
4027 Delete records with given ids
4029 :param cr: database cursor
4030 :param uid: current user id
4031 :param ids: id or list of ids
4032 :param context: (optional) context arguments, like lang, time zone
4034 :raise AccessError: * if user has no unlink rights on the requested object
4035 * if user tries to bypass access rules for unlink on the requested object
4036 :raise UserError: if the record is default property for other records
4041 if isinstance(ids, (int, long)):
4044 result_store = self._store_get_values(cr, uid, ids, self._all_columns.keys(), context)
4046 self._check_concurrency(cr, ids, context)
4048 self.check_access_rights(cr, uid, 'unlink')
4050 ir_property = self.pool.get('ir.property')
4052 # Check if the records are used as default properties.
4053 domain = [('res_id', '=', False),
4054 ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
4056 if ir_property.search(cr, uid, domain, context=context):
4057 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
4059 # Delete the records' properties.
4060 property_ids = ir_property.search(cr, uid, [('res_id', 'in', ['%s,%s' % (self._name, i) for i in ids])], context=context)
4061 ir_property.unlink(cr, uid, property_ids, context=context)
4063 self.delete_workflow(cr, uid, ids, context=context)
4065 self.check_access_rule(cr, uid, ids, 'unlink', context=context)
4066 pool_model_data = self.pool.get('ir.model.data')
4067 ir_values_obj = self.pool.get('ir.values')
4068 for sub_ids in cr.split_for_in_conditions(ids):
4069 cr.execute('delete from ' + self._table + ' ' \
4070 'where id IN %s', (sub_ids,))
4072 # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
4073 # as these are not connected with real database foreign keys, and would be dangling references.
4074 # Note: following steps performed as admin to avoid access rights restrictions, and with no context
4075 # to avoid possible side-effects during admin calls.
4076 # Step 1. Calling unlink of ir_model_data only for the affected IDS
4077 reference_ids = pool_model_data.search(cr, SUPERUSER_ID, [('res_id','in',list(sub_ids)),('model','=',self._name)])
4078 # Step 2. Marching towards the real deletion of referenced records
4080 pool_model_data.unlink(cr, SUPERUSER_ID, reference_ids)
4082 # For the same reason, removing the record relevant to ir_values
4083 ir_value_ids = ir_values_obj.search(cr, uid,
4084 ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
4087 ir_values_obj.unlink(cr, uid, ir_value_ids, context=context)
4089 for order, obj_name, store_ids, fields in result_store:
4090 if obj_name != self._name:
4091 obj = self.pool[obj_name]
4092 cr.execute('select id from '+obj._table+' where id IN %s', (tuple(store_ids),))
4093 rids = map(lambda x: x[0], cr.fetchall())
4095 obj._store_set_values(cr, uid, rids, fields, context)
4102 def write(self, cr, user, ids, vals, context=None):
4104 Update records with given ids with the given field values
4106 :param cr: database cursor
4107 :param user: current user id
4109 :param ids: object id or list of object ids to update according to **vals**
4110 :param vals: field values to update, e.g {'field_name': new_field_value, ...}
4111 :type vals: dictionary
4112 :param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
4113 :type context: dictionary
4115 :raise AccessError: * if user has no write rights on the requested object
4116 * if user tries to bypass access rules for write on the requested object
4117 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
4118 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
4120 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
4122 + For a many2many field, a list of tuples is expected.
4123 Here is the list of tuple that are accepted, with the corresponding semantics ::
4125 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
4126 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
4127 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
4128 (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
4129 (4, ID) link to existing record with id = ID (adds a relationship)
4130 (5) unlink all (like using (3,ID) for all linked records)
4131 (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
4134 [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
4136 + For a one2many field, a lits of tuples is expected.
4137 Here is the list of tuple that are accepted, with the corresponding semantics ::
4139 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
4140 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
4141 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
4144 [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
4146 + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
4147 + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
4151 self.check_field_access_rights(cr, user, 'write', vals.keys())
4152 for field in vals.copy():
4154 if field in self._columns:
4155 fobj = self._columns[field]
4156 elif field in self._inherit_fields:
4157 fobj = self._inherit_fields[field][2]
4164 for group in groups:
4165 module = group.split(".")[0]
4166 grp = group.split(".")[1]
4167 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
4168 (grp, module, 'res.groups', user))
4169 readonly = cr.fetchall()
4170 if readonly[0][0] >= 1:
4181 if isinstance(ids, (int, long)):
4184 self._check_concurrency(cr, ids, context)
4185 self.check_access_rights(cr, user, 'write')
4187 result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
4189 # No direct update of parent_left/right
4190 vals.pop('parent_left', None)
4191 vals.pop('parent_right', None)
4193 parents_changed = []
4194 parent_order = self._parent_order or self._order
4195 if self._parent_store and (self._parent_name in vals):
4196 # The parent_left/right computation may take up to
4197 # 5 seconds. No need to recompute the values if the
4198 # parent is the same.
4199 # Note: to respect parent_order, nodes must be processed in
4200 # order, so ``parents_changed`` must be ordered properly.
4201 parent_val = vals[self._parent_name]
4203 query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \
4204 (self._table, self._parent_name, self._parent_name, parent_order)
4205 cr.execute(query, (tuple(ids), parent_val))
4207 query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \
4208 (self._table, self._parent_name, parent_order)
4209 cr.execute(query, (tuple(ids),))
4210 parents_changed = map(operator.itemgetter(0), cr.fetchall())
4217 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
4219 field_column = self._all_columns.get(field) and self._all_columns.get(field).column
4220 if field_column and field_column.deprecated:
4221 _logger.warning('Field %s.%s is deprecated: %s', self._name, field, field_column.deprecated)
4222 if field in self._columns:
4223 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
4224 if (not totranslate) or not self._columns[field].translate:
4225 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
4226 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
4227 direct.append(field)
4229 upd_todo.append(field)
4231 updend.append(field)
4232 if field in self._columns \
4233 and hasattr(self._columns[field], 'selection') \
4235 self._check_selection_field_value(cr, user, field, vals[field], context=context)
4237 if self._log_access:
4238 upd0.append('write_uid=%s')
4239 upd0.append("write_date=(now() at time zone 'UTC')")
4243 self.check_access_rule(cr, user, ids, 'write', context=context)
4244 for sub_ids in cr.split_for_in_conditions(ids):
4245 cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
4246 'where id IN %s', upd1 + [sub_ids])
4247 if cr.rowcount != len(sub_ids):
4248 raise except_orm(_('AccessError'),
4249 _('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
4254 if self._columns[f].translate:
4255 src_trans = self.pool[self._name].read(cr, user, ids, [f])[0][f]
4258 # Inserting value to DB
4259 context_wo_lang = dict(context, lang=None)
4260 self.write(cr, user, ids, {f: vals[f]}, context=context_wo_lang)
4261 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
4264 # call the 'set' method of fields which are not classic_write
4265 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
4267 # default element in context must be removed when call a one2many or many2many
4268 rel_context = context.copy()
4269 for c in context.items():
4270 if c[0].startswith('default_'):
4271 del rel_context[c[0]]
4273 for field in upd_todo:
4275 result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
4277 unknown_fields = updend[:]
4278 for table in self._inherits:
4279 col = self._inherits[table]
4281 for sub_ids in cr.split_for_in_conditions(ids):
4282 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
4283 'where id IN %s', (sub_ids,))
4284 nids.extend([x[0] for x in cr.fetchall()])
4288 if self._inherit_fields[val][0] == table:
4290 unknown_fields.remove(val)
4292 self.pool[table].write(cr, user, nids, v, context)
4296 'No such field(s) in model %s: %s.',
4297 self._name, ', '.join(unknown_fields))
4298 self._validate(cr, user, ids, context)
4300 # TODO: use _order to set dest at the right position and not first node of parent
4301 # We can't defer parent_store computation because the stored function
4302 # fields that are computer may refer (directly or indirectly) to
4303 # parent_left/right (via a child_of domain)
4306 self.pool._init_parent[self._name] = True
4308 order = self._parent_order or self._order
4309 parent_val = vals[self._parent_name]
4311 clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
4313 clause, params = '%s IS NULL' % (self._parent_name,), ()
4315 for id in parents_changed:
4316 cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
4317 pleft, pright = cr.fetchone()
4318 distance = pright - pleft + 1
4320 # Positions of current siblings, to locate proper insertion point;
4321 # this can _not_ be fetched outside the loop, as it needs to be refreshed
4322 # after each update, in case several nodes are sequentially inserted one
4323 # next to the other (i.e computed incrementally)
4324 cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params)
4325 parents = cr.fetchall()
4327 # Find Position of the element
4329 for (parent_pright, parent_id) in parents:
4332 position = parent_pright + 1
4334 # It's the first node of the parent
4339 cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
4340 position = cr.fetchone()[0] + 1
4342 if pleft < position <= pright:
4343 raise except_orm(_('UserError'), _('Recursivity Detected.'))
4345 if pleft < position:
4346 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
4347 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
4348 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
4350 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
4351 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
4352 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
4354 result += self._store_get_values(cr, user, ids, vals.keys(), context)
4358 for order, model_name, ids_to_update, fields_to_recompute in result:
4359 key = (model_name, tuple(fields_to_recompute))
4360 done.setdefault(key, {})
4361 # avoid to do several times the same computation
4363 for id in ids_to_update:
4364 if id not in done[key]:
4365 done[key][id] = True
4367 self.pool[model_name]._store_set_values(cr, user, todo, fields_to_recompute, context)
4369 self.step_workflow(cr, user, ids, context=context)
4373 # TODO: Should set perm to user.xxx
4375 def create(self, cr, user, vals, context=None):
4377 Create a new record for the model.
4379 The values for the new record are initialized using the ``vals``
4380 argument, and if necessary the result of ``default_get()``.
4382 :param cr: database cursor
4383 :param user: current user id
4385 :param vals: field values for new record, e.g {'field_name': field_value, ...}
4386 :type vals: dictionary
4387 :param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
4388 :type context: dictionary
4389 :return: id of new record created
4390 :raise AccessError: * if user has no create rights on the requested object
4391 * if user tries to bypass access rules for create on the requested object
4392 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
4393 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
4395 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific.
4396 Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how
4403 if self.is_transient():
4404 self._transient_vacuum(cr, user)
4406 self.check_access_rights(cr, user, 'create')
4408 if self._log_access:
4409 for f in LOG_ACCESS_COLUMNS:
4410 if vals.pop(f, None) is not None:
4412 'Field `%s` is not allowed when creating the model `%s`.',
4414 vals = self._add_missing_default_values(cr, user, vals, context)
4417 for v in self._inherits:
4418 if self._inherits[v] not in vals:
4421 tocreate[v] = {'id': vals[self._inherits[v]]}
4424 # columns will contain a list of field defined as a tuple
4425 # tuple(field_name, format_string, field_value)
4426 # the tuple will be used by the string formatting for the INSERT
4428 ('id', "nextval('%s')" % self._sequence),
4433 for v in vals.keys():
4434 if v in self._inherit_fields and v not in self._columns:
4435 (table, col, col_detail, original_parent) = self._inherit_fields[v]
4436 tocreate[table][v] = vals[v]
4439 if (v not in self._inherit_fields) and (v not in self._columns):
4441 unknown_fields.append(v)
4444 'No such field(s) in model %s: %s.',
4445 self._name, ', '.join(unknown_fields))
4447 if not self._sequence:
4450 _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.')
4453 for table in tocreate:
4454 if self._inherits[table] in vals:
4455 del vals[self._inherits[table]]
4457 record_id = tocreate[table].pop('id', None)
4459 # When linking/creating parent records, force context without 'no_store_function' key that
4460 # defers stored functions computing, as these won't be computed in batch at the end of create().
4461 parent_context = dict(context)
4462 parent_context.pop('no_store_function', None)
4464 if record_id is None or not record_id:
4465 record_id = self.pool[table].create(cr, user, tocreate[table], context=parent_context)
4467 self.pool[table].write(cr, user, [record_id], tocreate[table], context=parent_context)
4469 columns.append((self._inherits[table], '%s', record_id))
4471 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
4472 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
4474 for bool_field in bool_fields:
4475 if bool_field not in vals:
4476 vals[bool_field] = False
4478 for field in vals.copy():
4480 if field in self._columns:
4481 fobj = self._columns[field]
4483 fobj = self._inherit_fields[field][2]
4489 for group in groups:
4490 module = group.split(".")[0]
4491 grp = group.split(".")[1]
4492 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
4493 (grp, module, 'res.groups', user))
4494 readonly = cr.fetchall()
4495 if readonly[0][0] >= 1:
4498 elif readonly[0][0] == 0:
4506 current_field = self._columns[field]
4507 if current_field._classic_write:
4508 columns.append((field, '%s', current_field._symbol_set[1](vals[field])))
4510 #for the function fields that receive a value, we set them directly in the database
4511 #(they may be required), but we also need to trigger the _fct_inv()
4512 if (hasattr(current_field, '_fnct_inv')) and not isinstance(current_field, fields.related):
4513 #TODO: this way to special case the related fields is really creepy but it shouldn't be changed at
4514 #one week of the release candidate. It seems the only good way to handle correctly this is to add an
4515 #attribute to make a field `really readonly´ and thus totally ignored by the create()... otherwise
4516 #if, for example, the related has a default value (for usability) then the fct_inv is called and it
4517 #may raise some access rights error. Changing this is a too big change for now, and is thus postponed
4518 #after the release but, definitively, the behavior shouldn't be different for related and function
4520 upd_todo.append(field)
4522 #TODO: this `if´ statement should be removed because there is no good reason to special case the fields
4523 #related. See the above TODO comment for further explanations.
4524 if not isinstance(current_field, fields.related):
4525 upd_todo.append(field)
4526 if field in self._columns \
4527 and hasattr(current_field, 'selection') \
4529 self._check_selection_field_value(cr, user, field, vals[field], context=context)
4530 if self._log_access:
4531 columns.append(('create_uid', '%s', user))
4532 columns.append(('write_uid', '%s', user))
4533 columns.append(('create_date', "(now() at time zone 'UTC')"))
4534 columns.append(('write_date', "(now() at time zone 'UTC')"))
4536 # the list of tuples used in this formatting corresponds to
4537 # tuple(field_name, format, value)
4538 # In some case, for example (id, create_date, write_date) we does not
4539 # need to read the third value of the tuple, because the real value is
4540 # encoded in the second value (the format).
4542 """INSERT INTO "%s" (%s) VALUES(%s) RETURNING id""" % (
4544 ', '.join('"%s"' % f[0] for f in columns),
4545 ', '.join(f[1] for f in columns)
4547 tuple([f[2] for f in columns if len(f) > 2])
4550 id_new, = cr.fetchone()
4551 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
4553 if self._parent_store and not context.get('defer_parent_store_computation'):
4555 self.pool._init_parent[self._name] = True
4557 parent = vals.get(self._parent_name, False)
4559 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
4561 result_p = cr.fetchall()
4562 for (pleft,) in result_p:
4567 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
4568 pleft_old = cr.fetchone()[0]
4571 cr.execute('select max(parent_right) from '+self._table)
4572 pleft = cr.fetchone()[0] or 0
4573 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
4574 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
4575 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
4577 # default element in context must be remove when call a one2many or many2many
4578 rel_context = context.copy()
4579 for c in context.items():
4580 if c[0].startswith('default_'):
4581 del rel_context[c[0]]
4584 for field in upd_todo:
4585 result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
4586 self._validate(cr, user, [id_new], context)
4588 if not context.get('no_store_function', False):
4589 result += self._store_get_values(cr, user, [id_new],
4590 list(set(vals.keys() + self._inherits.values())),
4594 for order, model_name, ids, fields2 in result:
4595 if not (model_name, ids, fields2) in done:
4596 self.pool[model_name]._store_set_values(cr, user, ids, fields2, context)
4597 done.append((model_name, ids, fields2))
4599 if self._log_create and not (context and context.get('no_store_function', False)):
4600 message = self._description + \
4602 self.name_get(cr, user, [id_new], context=context)[0][1] + \
4603 "' " + _("created.")
4604 self.log(cr, user, id_new, message, True, context=context)
4605 self.check_access_rule(cr, user, [id_new], 'create', context=context)
4606 self.create_workflow(cr, user, [id_new], context=context)
4609 def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None):
4610 """Fetch records as objects allowing to use dot notation to browse fields and relations
4612 :param cr: database cursor
4613 :param uid: current user id
4614 :param select: id or list of ids.
4615 :param context: context arguments, like lang, time zone
4616 :rtype: object or list of objects requested
4619 self._list_class = list_class or browse_record_list
4621 # need to accepts ints and longs because ids coming from a method
4622 # launched by button in the interface have a type long...
4623 if isinstance(select, (int, long)):
4624 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
4625 elif isinstance(select, list):
4626 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context=context)
4628 return browse_null()
4630 def _store_get_values(self, cr, uid, ids, fields, context):
4631 """Returns an ordered list of fields.function to call due to
4632 an update operation on ``fields`` of records with ``ids``,
4633 obtained by calling the 'store' triggers of these fields,
4634 as setup by their 'store' attribute.
4636 :return: [(priority, model_name, [record_ids,], [function_fields,])]
4638 if fields is None: fields = []
4639 stored_functions = self.pool._store_function.get(self._name, [])
4641 # use indexed names for the details of the stored_functions:
4642 model_name_, func_field_to_compute_, target_ids_func_, trigger_fields_, priority_ = range(5)
4644 # only keep store triggers that should be triggered for the ``fields``
4646 triggers_to_compute = [f for f in stored_functions \
4647 if ((not f[trigger_fields_]) or set(fields).intersection(f[trigger_fields_]))]
4650 target_id_results = {}
4651 for store_trigger in triggers_to_compute:
4652 target_func_id_ = id(store_trigger[target_ids_func_])
4653 if not target_func_id_ in target_id_results:
4654 # use admin user for accessing objects having rules defined on store fields
4655 target_id_results[target_func_id_] = [i for i in store_trigger[target_ids_func_](self, cr, SUPERUSER_ID, ids, context) if i]
4656 target_ids = target_id_results[target_func_id_]
4658 # the compound key must consider the priority and model name
4659 key = (store_trigger[priority_], store_trigger[model_name_])
4660 for target_id in target_ids:
4661 to_compute_map.setdefault(key, {}).setdefault(target_id,set()).add(tuple(store_trigger))
4663 # Here to_compute_map looks like:
4664 # { (10, 'model_a') : { target_id1: [ (trigger_1_tuple, trigger_2_tuple) ], ... }
4665 # (20, 'model_a') : { target_id2: [ (trigger_3_tuple, trigger_4_tuple) ], ... }
4666 # (99, 'model_a') : { target_id1: [ (trigger_5_tuple, trigger_6_tuple) ], ... }
4669 # Now we need to generate the batch function calls list
4671 # { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] }
4673 for ((priority,model), id_map) in to_compute_map.iteritems():
4674 trigger_ids_maps = {}
4675 # function_ids_maps =
4676 # { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] }
4677 for target_id, triggers in id_map.iteritems():
4678 trigger_ids_maps.setdefault(tuple(triggers), []).append(target_id)
4679 for triggers, target_ids in trigger_ids_maps.iteritems():
4680 call_map.setdefault((priority,model),[]).append((priority, model, target_ids,
4681 [t[func_field_to_compute_] for t in triggers]))
4682 ordered_keys = call_map.keys()
4686 result = reduce(operator.add, (call_map[k] for k in ordered_keys))
4689 def _store_set_values(self, cr, uid, ids, fields, context):
4690 """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
4691 respecting ``multi`` attributes), and stores the resulting values in the database directly."""
4696 if self._log_access:
4697 cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
4701 field_dict.setdefault(r[0], [])
4702 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
4703 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
4704 for i in self.pool._store_function.get(self._name, []):
4706 up_write_date = write_date + datetime.timedelta(hours=i[5])
4707 if datetime.datetime.now() < up_write_date:
4709 field_dict[r[0]].append(i[1])
4715 if self._columns[f]._multi not in keys:
4716 keys.append(self._columns[f]._multi)
4717 todo.setdefault(self._columns[f]._multi, [])
4718 todo[self._columns[f]._multi].append(f)
4722 # use admin user for accessing objects having rules defined on store fields
4723 result = self._columns[val[0]].get(cr, self, ids, val, SUPERUSER_ID, context=context)
4724 for id, value in result.items():
4726 for f in value.keys():
4727 if f in field_dict[id]:
4734 if self._columns[v]._type == 'many2one':
4736 value[v] = value[v][0]
4739 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
4740 upd1.append(self._columns[v]._symbol_set[1](value[v]))
4743 cr.execute('update "' + self._table + '" set ' + \
4744 ','.join(upd0) + ' where id = %s', upd1)
4748 # use admin user for accessing objects having rules defined on store fields
4749 result = self._columns[f].get(cr, self, ids, f, SUPERUSER_ID, context=context)
4750 for r in result.keys():
4752 if r in field_dict.keys():
4753 if f in field_dict[r]:
4755 for id, value in result.items():
4756 if self._columns[f]._type == 'many2one':
4761 cr.execute('update "' + self._table + '" set ' + \
4762 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
4768 def perm_write(self, cr, user, ids, fields, context=None):
4769 raise NotImplementedError(_('This method does not exist anymore'))
4771 # TODO: ameliorer avec NULL
4772 def _where_calc(self, cr, user, domain, active_test=True, context=None):
4773 """Computes the WHERE clause needed to implement an OpenERP domain.
4774 :param domain: the domain to compute
4776 :param active_test: whether the default filtering of records with ``active``
4777 field set to ``False`` should be applied.
4778 :return: the query expressing the given domain as provided in domain
4779 :rtype: osv.query.Query
4784 # if the object has a field named 'active', filter out all inactive
4785 # records unless they were explicitely asked for
4786 if 'active' in self._all_columns and (active_test and context.get('active_test', True)):
4788 # the item[0] trick below works for domain items and '&'/'|'/'!'
4790 if not any(item[0] == 'active' for item in domain):
4791 domain.insert(0, ('active', '=', 1))
4793 domain = [('active', '=', 1)]
4796 e = expression.expression(cr, user, domain, self, context)
4797 tables = e.get_tables()
4798 where_clause, where_params = e.to_sql()
4799 where_clause = where_clause and [where_clause] or []
4801 where_clause, where_params, tables = [], [], ['"%s"' % self._table]
4803 return Query(tables, where_clause, where_params)
4805 def _check_qorder(self, word):
4806 if not regex_order.match(word):
4807 raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
4810 def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
4811 """Add what's missing in ``query`` to implement all appropriate ir.rules
4812 (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
4814 :param query: the current query object
4816 if uid == SUPERUSER_ID:
4819 def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None):
4820 """ :param string parent_model: string of the parent model
4821 :param model child_object: model object, base of the rule application
4824 if parent_model and child_object:
4825 # as inherited rules are being applied, we need to add the missing JOIN
4826 # to reach the parent table (if it was not JOINed yet in the query)
4827 parent_alias = child_object._inherits_join_add(child_object, parent_model, query)
4828 # inherited rules are applied on the external table -> need to get the alias and replace
4829 parent_table = self.pool[parent_model]._table
4830 added_clause = [clause.replace('"%s"' % parent_table, '"%s"' % parent_alias) for clause in added_clause]
4831 # change references to parent_table to parent_alias, because we now use the alias to refer to the table
4833 for table in added_tables:
4834 # table is just a table name -> switch to the full alias
4835 if table == '"%s"' % parent_table:
4836 new_tables.append('"%s" as "%s"' % (parent_table, parent_alias))
4837 # table is already a full statement -> replace reference to the table to its alias, is correct with the way aliases are generated
4839 new_tables.append(table.replace('"%s"' % parent_table, '"%s"' % parent_alias))
4840 added_tables = new_tables
4841 query.where_clause += added_clause
4842 query.where_clause_params += added_params
4843 for table in added_tables:
4844 if table not in query.tables:
4845 query.tables.append(table)
4849 # apply main rules on the object
4850 rule_obj = self.pool.get('ir.rule')
4851 rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, self._name, mode, context=context)
4852 apply_rule(rule_where_clause, rule_where_clause_params, rule_tables)
4854 # apply ir.rules from the parents (through _inherits)
4855 for inherited_model in self._inherits:
4856 rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, inherited_model, mode, context=context)
4857 apply_rule(rule_where_clause, rule_where_clause_params, rule_tables,
4858 parent_model=inherited_model, child_object=self)
4860 def _generate_m2o_order_by(self, order_field, query):
4862 Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
4863 either native m2o fields or function/related fields that are stored, including
4864 intermediate JOINs for inheritance if required.
4866 :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
4868 if order_field not in self._columns and order_field in self._inherit_fields:
4869 # also add missing joins for reaching the table containing the m2o field
4870 qualified_field = self._inherits_join_calc(order_field, query)
4871 order_field_column = self._inherit_fields[order_field][2]
4873 qualified_field = '"%s"."%s"' % (self._table, order_field)
4874 order_field_column = self._columns[order_field]
4876 assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
4877 if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
4878 _logger.debug("Many2one function/related fields must be stored " \
4879 "to be used as ordering fields! Ignoring sorting for %s.%s",
4880 self._name, order_field)
4883 # figure out the applicable order_by for the m2o
4884 dest_model = self.pool[order_field_column._obj]
4885 m2o_order = dest_model._order
4886 if not regex_order.match(m2o_order):
4887 # _order is complex, can't use it here, so we default to _rec_name
4888 m2o_order = dest_model._rec_name
4890 # extract the field names, to be able to qualify them and add desc/asc
4892 for order_part in m2o_order.split(","):
4893 m2o_order_list.append(order_part.strip().split(" ", 1)[0].strip())
4894 m2o_order = m2o_order_list
4896 # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
4897 # as we don't want to exclude results that have NULL values for the m2o
4898 src_table, src_field = qualified_field.replace('"', '').split('.', 1)
4899 dst_alias, dst_alias_statement = query.add_join((src_table, dest_model._table, src_field, 'id', src_field), implicit=False, outer=True)
4900 qualify = lambda field: '"%s"."%s"' % (dst_alias, field)
4901 return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
4903 def _generate_order_by(self, order_spec, query):
4905 Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
4906 a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
4908 :raise" except_orm in case order_spec is malformed
4910 order_by_clause = ''
4911 order_spec = order_spec or self._order
4913 order_by_elements = []
4914 self._check_qorder(order_spec)
4915 for order_part in order_spec.split(','):
4916 order_split = order_part.strip().split(' ')
4917 order_field = order_split[0].strip()
4918 order_direction = order_split[1].strip() if len(order_split) == 2 else ''
4920 if order_field == 'id' or (self._log_access and order_field in LOG_ACCESS_COLUMNS.keys()):
4921 order_by_elements.append('"%s"."%s" %s' % (self._table, order_field, order_direction))
4922 elif order_field in self._columns:
4923 order_column = self._columns[order_field]
4924 if order_column._classic_read:
4925 inner_clause = '"%s"."%s"' % (self._table, order_field)
4926 elif order_column._type == 'many2one':
4927 inner_clause = self._generate_m2o_order_by(order_field, query)
4929 continue # ignore non-readable or "non-joinable" fields
4930 elif order_field in self._inherit_fields:
4931 parent_obj = self.pool[self._inherit_fields[order_field][3]]
4932 order_column = parent_obj._columns[order_field]
4933 if order_column._classic_read:
4934 inner_clause = self._inherits_join_calc(order_field, query)
4935 elif order_column._type == 'many2one':
4936 inner_clause = self._generate_m2o_order_by(order_field, query)
4938 continue # ignore non-readable or "non-joinable" fields
4940 raise ValueError( _("Sorting field %s not found on model %s") %( order_field, self._name))
4942 if isinstance(inner_clause, list):
4943 for clause in inner_clause:
4944 order_by_elements.append("%s %s" % (clause, order_direction))
4946 order_by_elements.append("%s %s" % (inner_clause, order_direction))
4947 if order_by_elements:
4948 order_by_clause = ",".join(order_by_elements)
4950 return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
4952 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
4954 Private implementation of search() method, allowing specifying the uid to use for the access right check.
4955 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
4956 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
4957 This is ok at the security level because this method is private and not callable through XML-RPC.
4959 :param access_rights_uid: optional user ID to use when checking access rights
4960 (not for ir.rules, this is only for ir.model.access)
4964 self.check_access_rights(cr, access_rights_uid or user, 'read')
4966 # For transient models, restrict acces to the current user, except for the super-user
4967 if self.is_transient() and self._log_access and user != SUPERUSER_ID:
4968 args = expression.AND(([('create_uid', '=', user)], args or []))
4970 query = self._where_calc(cr, user, args, context=context)
4971 self._apply_ir_rules(cr, user, query, 'read', context=context)
4972 order_by = self._generate_order_by(order, query)
4973 from_clause, where_clause, where_clause_params = query.get_sql()
4975 limit_str = limit and ' limit %d' % limit or ''
4976 offset_str = offset and ' offset %d' % offset or ''
4977 where_str = where_clause and (" WHERE %s" % where_clause) or ''
4978 query_str = 'SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str
4981 # /!\ the main query must be executed as a subquery, otherwise
4982 # offset and limit apply to the result of count()!
4983 cr.execute('SELECT count(*) FROM (%s) AS count' % query_str, where_clause_params)
4987 cr.execute(query_str, where_clause_params)
4990 # TDE note: with auto_join, we could have several lines about the same result
4991 # i.e. a lead with several unread messages; we uniquify the result using
4992 # a fast way to do it while preserving order (http://www.peterbe.com/plog/uniqifiers-benchmark)
4993 def _uniquify_list(seq):
4995 return [x for x in seq if x not in seen and not seen.add(x)]
4997 return _uniquify_list([x[0] for x in res])
4999 # returns the different values ever entered for one field
5000 # this is used, for example, in the client when the user hits enter on
5002 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
5005 if field in self._inherit_fields:
5006 return self.pool[self._inherit_fields[field][0]].distinct_field_get(cr, uid, field, value, args, offset, limit)
5008 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
5010 def copy_data(self, cr, uid, id, default=None, context=None):
5012 Copy given record's data with all its fields values
5014 :param cr: database cursor
5015 :param uid: current user id
5016 :param id: id of the record to copy
5017 :param default: field values to override in the original values of the copied record
5018 :type default: dictionary
5019 :param context: context arguments, like lang, time zone
5020 :type context: dictionary
5021 :return: dictionary containing all the field values
5027 # avoid recursion through already copied records in case of circular relationship
5028 seen_map = context.setdefault('__copy_data_seen', {})
5029 if id in seen_map.setdefault(self._name, []):
5031 seen_map[self._name].append(id)
5035 if 'state' not in default:
5036 if 'state' in self._defaults:
5037 if callable(self._defaults['state']):
5038 default['state'] = self._defaults['state'](self, cr, uid, context)
5040 default['state'] = self._defaults['state']
5042 # build a black list of fields that should not be copied
5043 blacklist = set(MAGIC_COLUMNS + ['parent_left', 'parent_right'])
5044 def blacklist_given_fields(obj):
5045 # blacklist the fields that are given by inheritance
5046 for other, field_to_other in obj._inherits.items():
5047 blacklist.add(field_to_other)
5048 if field_to_other in default:
5049 # all the fields of 'other' are given by the record: default[field_to_other],
5050 # except the ones redefined in self
5051 blacklist.update(set(self.pool[other]._all_columns) - set(self._columns))
5053 blacklist_given_fields(self.pool[other])
5054 # blacklist deprecated fields
5055 for name, field in obj._columns.items():
5056 if field.deprecated:
5059 blacklist_given_fields(self)
5061 fields_to_read = [f for f in self.check_field_access_rights(cr, uid, 'read', None)
5062 if f not in blacklist]
5063 data = self.read(cr, uid, [id], fields_to_read, context=context)
5067 raise IndexError(_("Record #%d of %s not found, cannot copy!") % (id, self._name))
5070 for f, colinfo in self._all_columns.items():
5071 field = colinfo.column
5074 elif f in blacklist:
5076 elif isinstance(field, fields.function):
5078 elif field._type == 'many2one':
5079 res[f] = data[f] and data[f][0]
5080 elif field._type == 'one2many':
5081 other = self.pool[field._obj]
5082 # duplicate following the order of the ids because we'll rely on
5083 # it later for copying translations in copy_translation()!
5084 lines = [other.copy_data(cr, uid, line_id, context=context) for line_id in sorted(data[f])]
5085 # the lines are duplicated using the wrong (old) parent, but then
5086 # are reassigned to the correct one thanks to the (0, 0, ...)
5087 res[f] = [(0, 0, line) for line in lines if line]
5088 elif field._type == 'many2many':
5089 res[f] = [(6, 0, data[f])]
5095 def copy_translations(self, cr, uid, old_id, new_id, context=None):
5099 # avoid recursion through already copied records in case of circular relationship
5100 seen_map = context.setdefault('__copy_translations_seen',{})
5101 if old_id in seen_map.setdefault(self._name,[]):
5103 seen_map[self._name].append(old_id)
5105 trans_obj = self.pool.get('ir.translation')
5106 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
5107 fields = self.fields_get(cr, uid, context=context)
5109 for field_name, field_def in fields.items():
5110 # removing the lang to compare untranslated values
5111 context_wo_lang = dict(context, lang=None)
5112 old_record, new_record = self.browse(cr, uid, [old_id, new_id], context=context_wo_lang)
5113 # we must recursively copy the translations for o2o and o2m
5114 if field_def['type'] == 'one2many':
5115 target_obj = self.pool[field_def['relation']]
5116 # here we rely on the order of the ids to match the translations
5117 # as foreseen in copy_data()
5118 old_children = sorted(r.id for r in old_record[field_name])
5119 new_children = sorted(r.id for r in new_record[field_name])
5120 for (old_child, new_child) in zip(old_children, new_children):
5121 target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
5122 # and for translatable fields we keep them for copy
5123 elif field_def.get('translate'):
5124 if field_name in self._columns:
5125 trans_name = self._name + "," + field_name
5128 elif field_name in self._inherit_fields:
5129 trans_name = self._inherit_fields[field_name][0] + "," + field_name
5130 # get the id of the parent record to set the translation
5131 inherit_field_name = self._inherit_fields[field_name][1]
5132 target_id = new_record[inherit_field_name].id
5133 source_id = old_record[inherit_field_name].id
5137 trans_ids = trans_obj.search(cr, uid, [
5138 ('name', '=', trans_name),
5139 ('res_id', '=', source_id)
5141 user_lang = context.get('lang')
5142 for record in trans_obj.read(cr, uid, trans_ids, context=context):
5144 # remove source to avoid triggering _set_src
5145 del record['source']
5146 record.update({'res_id': target_id})
5147 if user_lang and user_lang == record['lang']:
5148 # 'source' to force the call to _set_src
5149 # 'value' needed if value is changed in copy(), want to see the new_value
5150 record['source'] = old_record[field_name]
5151 record['value'] = new_record[field_name]
5152 trans_obj.create(cr, uid, record, context=context)
5155 def copy(self, cr, uid, id, default=None, context=None):
5157 Duplicate record with given id updating it with default values
5159 :param cr: database cursor
5160 :param uid: current user id
5161 :param id: id of the record to copy
5162 :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
5163 :type default: dictionary
5164 :param context: context arguments, like lang, time zone
5165 :type context: dictionary
5166 :return: id of the newly created record
5171 context = context.copy()
5172 data = self.copy_data(cr, uid, id, default, context)
5173 new_id = self.create(cr, uid, data, context)
5174 self.copy_translations(cr, uid, id, new_id, context)
5177 def exists(self, cr, uid, ids, context=None):
5178 """Checks whether the given id or ids exist in this model,
5179 and return the list of ids that do. This is simple to use for
5180 a truth test on a browse_record::
5185 :param ids: id or list of ids to check for existence
5186 :type ids: int or [int]
5187 :return: the list of ids that currently exist, out of
5190 if type(ids) in (int, long):
5194 query = 'SELECT id FROM "%s"' % self._table
5195 cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
5196 return [x[0] for x in cr.fetchall()]
5198 def check_recursion(self, cr, uid, ids, context=None, parent=None):
5199 _logger.warning("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
5201 assert parent is None or parent in self._columns or parent in self._inherit_fields,\
5202 "The 'parent' parameter passed to check_recursion() must be None or a valid field name"
5203 return self._check_recursion(cr, uid, ids, context, parent)
5205 def _check_recursion(self, cr, uid, ids, context=None, parent=None):
5207 Verifies that there is no loop in a hierarchical structure of records,
5208 by following the parent relationship using the **parent** field until a loop
5209 is detected or until a top-level record is found.
5211 :param cr: database cursor
5212 :param uid: current user id
5213 :param ids: list of ids of records to check
5214 :param parent: optional parent field name (default: ``self._parent_name = parent_id``)
5215 :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
5218 parent = self._parent_name
5220 # must ignore 'active' flag, ir.rules, etc. => direct SQL query
5221 query = 'SELECT "%s" FROM "%s" WHERE id = %%s' % (parent, self._table)
5224 while current_id is not None:
5225 cr.execute(query, (current_id,))
5226 result = cr.fetchone()
5227 current_id = result[0] if result else None
5228 if current_id == id:
5232 def _check_m2m_recursion(self, cr, uid, ids, field_name):
5234 Verifies that there is no loop in a hierarchical structure of records,
5235 by following the parent relationship using the **parent** field until a loop
5236 is detected or until a top-level record is found.
5238 :param cr: database cursor
5239 :param uid: current user id
5240 :param ids: list of ids of records to check
5241 :param field_name: field to check
5242 :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
5245 field = self._all_columns.get(field_name)
5246 field = field.column if field else None
5247 if not field or field._type != 'many2many' or field._obj != self._name:
5248 # field must be a many2many on itself
5249 raise ValueError('invalid field_name: %r' % (field_name,))
5251 query = 'SELECT distinct "%s" FROM "%s" WHERE "%s" IN %%s' % (field._id2, field._rel, field._id1)
5255 for i in range(0, len(ids_parent), cr.IN_MAX):
5257 sub_ids_parent = ids_parent[i:j]
5258 cr.execute(query, (tuple(sub_ids_parent),))
5259 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
5260 ids_parent = ids_parent2
5261 for i in ids_parent:
5266 def _get_external_ids(self, cr, uid, ids, *args, **kwargs):
5267 """Retrieve the External ID(s) of any database record.
5269 **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
5271 :return: map of ids to the list of their fully qualified External IDs
5272 in the form ``module.key``, or an empty list when there's no External
5273 ID for a record, e.g.::
5275 { 'id': ['module.ext_id', 'module.ext_id_bis'],
5278 ir_model_data = self.pool.get('ir.model.data')
5279 data_ids = ir_model_data.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
5280 data_results = ir_model_data.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
5283 # can't use dict.fromkeys() as the list would be shared!
5285 for record in data_results:
5286 result[record['res_id']].append('%(module)s.%(name)s' % record)
5289 def get_external_id(self, cr, uid, ids, *args, **kwargs):
5290 """Retrieve the External ID of any database record, if there
5291 is one. This method works as a possible implementation
5292 for a function field, to be able to add it to any
5293 model object easily, referencing it as ``Model.get_external_id``.
5295 When multiple External IDs exist for a record, only one
5296 of them is returned (randomly).
5298 :return: map of ids to their fully qualified XML ID,
5299 defaulting to an empty string when there's none
5300 (to be usable as a function field),
5303 { 'id': 'module.ext_id',
5306 results = self._get_xml_ids(cr, uid, ids)
5307 for k, v in results.iteritems():
5314 # backwards compatibility
5315 get_xml_id = get_external_id
5316 _get_xml_ids = _get_external_ids
5318 def print_report(self, cr, uid, ids, name, data, context=None):
5320 Render the report `name` for the given IDs. The report must be defined
5321 for this model, not another.
5323 report = self.pool['ir.actions.report.xml']._lookup_report(cr, name)
5324 assert self._name == report.table
5325 return report.create(cr, uid, ids, data, context)
5328 def is_transient(self):
5329 """ Return whether the model is transient.
5331 See :class:`TransientModel`.
5334 return self._transient
5336 def _transient_clean_rows_older_than(self, cr, seconds):
5337 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5338 # Never delete rows used in last 5 minutes
5339 seconds = max(seconds, 300)
5340 query = ("SELECT id FROM " + self._table + " WHERE"
5341 " COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp"
5342 " < ((now() at time zone 'UTC') - interval %s)")
5343 cr.execute(query, ("%s seconds" % seconds,))
5344 ids = [x[0] for x in cr.fetchall()]
5345 self.unlink(cr, SUPERUSER_ID, ids)
5347 def _transient_clean_old_rows(self, cr, max_count):
5348 # Check how many rows we have in the table
5349 cr.execute("SELECT count(*) AS row_count FROM " + self._table)
5351 if res[0][0] <= max_count:
5352 return # max not reached, nothing to do
5353 self._transient_clean_rows_older_than(cr, 300)
5355 def _transient_vacuum(self, cr, uid, force=False):
5356 """Clean the transient records.
5358 This unlinks old records from the transient model tables whenever the
5359 "_transient_max_count" or "_max_age" conditions (if any) are reached.
5360 Actual cleaning will happen only once every "_transient_check_time" calls.
5361 This means this method can be called frequently called (e.g. whenever
5362 a new record is created).
5363 Example with both max_hours and max_count active:
5364 Suppose max_hours = 0.2 (e.g. 12 minutes), max_count = 20, there are 55 rows in the
5365 table, 10 created/changed in the last 5 minutes, an additional 12 created/changed between
5366 5 and 10 minutes ago, the rest created/changed more then 12 minutes ago.
5367 - age based vacuum will leave the 22 rows created/changed in the last 12 minutes
5368 - count based vacuum will wipe out another 12 rows. Not just 2, otherwise each addition
5369 would immediately cause the maximum to be reached again.
5370 - the 10 rows that have been created/changed the last 5 minutes will NOT be deleted
5372 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5373 _transient_check_time = 20 # arbitrary limit on vacuum executions
5374 self._transient_check_count += 1
5375 if not force and (self._transient_check_count < _transient_check_time):
5376 return True # no vacuum cleaning this time
5377 self._transient_check_count = 0
5379 # Age-based expiration
5380 if self._transient_max_hours:
5381 self._transient_clean_rows_older_than(cr, self._transient_max_hours * 60 * 60)
5383 # Count-based expiration
5384 if self._transient_max_count:
5385 self._transient_clean_old_rows(cr, self._transient_max_count)
5389 def resolve_2many_commands(self, cr, uid, field_name, commands, fields=None, context=None):
5390 """ Serializes one2many and many2many commands into record dictionaries
5391 (as if all the records came from the database via a read()). This
5392 method is aimed at onchange methods on one2many and many2many fields.
5394 Because commands might be creation commands, not all record dicts
5395 will contain an ``id`` field. Commands matching an existing record
5396 will have an ``id``.
5398 :param field_name: name of the one2many or many2many field matching the commands
5399 :type field_name: str
5400 :param commands: one2many or many2many commands to execute on ``field_name``
5401 :type commands: list((int|False, int|False, dict|False))
5402 :param fields: list of fields to read from the database, when applicable
5403 :type fields: list(str)
5404 :returns: records in a shape similar to that returned by ``read()``
5405 (except records may be missing the ``id`` field if they don't exist in db)
5408 result = [] # result (list of dict)
5409 record_ids = [] # ids of records to read
5410 updates = {} # {id: dict} of updates on particular records
5412 for command in commands:
5413 if not isinstance(command, (list, tuple)):
5414 record_ids.append(command)
5415 elif command[0] == 0:
5416 result.append(command[2])
5417 elif command[0] == 1:
5418 record_ids.append(command[1])
5419 updates.setdefault(command[1], {}).update(command[2])
5420 elif command[0] in (2, 3):
5421 record_ids = [id for id in record_ids if id != command[1]]
5422 elif command[0] == 4:
5423 record_ids.append(command[1])
5424 elif command[0] == 5:
5425 result, record_ids = [], []
5426 elif command[0] == 6:
5427 result, record_ids = [], list(command[2])
5429 # read the records and apply the updates
5430 other_model = self.pool[self._all_columns[field_name].column._obj]
5431 for record in other_model.read(cr, uid, record_ids, fields=fields, context=context):
5432 record.update(updates.get(record['id'], {}))
5433 result.append(record)
5437 # for backward compatibility
5438 resolve_o2m_commands_to_record_dicts = resolve_2many_commands
5440 def search_read(self, cr, uid, domain=None, fields=None, offset=0, limit=None, order=None, context=None):
5442 Performs a ``search()`` followed by a ``read()``.
5444 :param cr: database cursor
5445 :param user: current user id
5446 :param domain: Search domain, see ``args`` parameter in ``search()``. Defaults to an empty domain that will match all records.
5447 :param fields: List of fields to read, see ``fields`` parameter in ``read()``. Defaults to all fields.
5448 :param offset: Number of records to skip, see ``offset`` parameter in ``search()``. Defaults to 0.
5449 :param limit: Maximum number of records to return, see ``limit`` parameter in ``search()``. Defaults to no limit.
5450 :param order: Columns to sort result, see ``order`` parameter in ``search()``. Defaults to no sort.
5451 :param context: context arguments.
5452 :return: List of dictionaries containing the asked fields.
5453 :rtype: List of dictionaries.
5456 record_ids = self.search(cr, uid, domain or [], offset, limit or False, order or False, context or {})
5460 if fields and fields == ['id']:
5461 # shortcut read if we only want the ids
5462 return [{'id': id} for id in record_ids]
5464 result = self.read(cr, uid, record_ids, fields or [], context or {})
5466 if len(result) >= 1:
5470 result = [index[x] for x in record_ids if x in index]
5473 def _register_hook(self, cr):
5474 """ stuff to do right after the registry is built """
5477 def __getattr__(self, name):
5478 if name.startswith('signal_'):
5479 signal_name = name[len('signal_'):]
5481 return (lambda *args, **kwargs:
5482 self.signal_workflow(*args, signal=signal_name, **kwargs))
5483 get = getattr(super(BaseModel, self), '__getattr__', None)
5484 if get is not None: return get(name)
5485 raise AttributeError(
5486 "'%s' object has no attribute '%s'" % (type(self).__name__, name))
5488 # keep this import here, at top it will cause dependency cycle errors
5491 class Model(BaseModel):
5492 """Main super-class for regular database-persisted OpenERP models.
5494 OpenERP models are created by inheriting from this class::
5499 The system will later instantiate the class once per database (on
5500 which the class' module is installed).
5503 _register = False # not visible in ORM registry, meant to be python-inherited only
5504 _transient = False # True in a TransientModel
5506 class TransientModel(BaseModel):
5507 """Model super-class for transient records, meant to be temporarily
5508 persisted, and regularly vaccuum-cleaned.
5510 A TransientModel has a simplified access rights management,
5511 all users can create new records, and may only access the
5512 records they created. The super-user has unrestricted access
5513 to all TransientModel records.
5516 _register = False # not visible in ORM registry, meant to be python-inherited only
5519 class AbstractModel(BaseModel):
5520 """Abstract Model super-class for creating an abstract class meant to be
5521 inherited by regular models (Models or TransientModels) but not meant to
5522 be usable on its own, or persisted.
5524 Technical note: we don't want to make AbstractModel the super-class of
5525 Model or BaseModel because it would not make sense to put the main
5526 definition of persistence methods such as create() in it, and still we
5527 should be able to override them within an AbstractModel.
5529 _auto = False # don't create any database backend for AbstractModels
5530 _register = False # not visible in ORM registry, meant to be python-inherited only
5533 def itemgetter_tuple(items):
5534 """ Fixes itemgetter inconsistency (useful in some cases) of not returning
5535 a tuple if len(items) == 1: always returns an n-tuple where n = len(items)
5540 return lambda gettable: (gettable[items[0]],)
5541 return operator.itemgetter(*items)
5542 class ImportWarning(Warning):
5543 """ Used to send warnings upwards the stack during the import process
5548 def convert_pgerror_23502(model, fields, info, e):
5549 m = re.match(r'^null value in column "(?P<field>\w+)" violates '
5550 r'not-null constraint\n',
5552 field_name = m.group('field')
5553 if not m or field_name not in fields:
5554 return {'message': unicode(e)}
5555 message = _(u"Missing required value for the field '%s'.") % field_name
5556 field = fields.get(field_name)
5558 message = _(u"Missing required value for the field '%s' (%s)") % (field['string'], field_name)
5561 'field': field_name,
5563 def convert_pgerror_23505(model, fields, info, e):
5564 m = re.match(r'^duplicate key (?P<field>\w+) violates unique constraint',
5566 field_name = m.group('field')
5567 if not m or field_name not in fields:
5568 return {'message': unicode(e)}
5569 message = _(u"The value for the field '%s' already exists.") % field_name
5570 field = fields.get(field_name)
5572 message = _(u"%s This might be '%s' in the current model, or a field "
5573 u"of the same name in an o2m.") % (message, field['string'])
5576 'field': field_name,
5579 PGERROR_TO_OE = collections.defaultdict(
5580 # shape of mapped converters
5581 lambda: (lambda model, fvg, info, pgerror: {'message': unicode(pgerror)}), {
5582 # not_null_violation
5583 '23502': convert_pgerror_23502,
5584 # unique constraint error
5585 '23505': convert_pgerror_23505,
5587 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: