1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
22 #.apidoc title: Object Relational Mapping
23 #.apidoc module-mods: member-order: bysource
26 Object relational mapping to database (postgresql) module
27 * Hierarchical structure
28 * Constraints consistency, validations
29 * Object meta Data depends on its status
30 * Optimised processing by complex query (multiple actions at once)
31 * Default fields value
32 * Permissions optimisation
33 * Persistant object: DB postgresql
35 * Multi-level caching system
36 * 2 different inheritancies
38 - classicals (varchar, integer, boolean, ...)
39 - relations (one2many, many2one, many2many)
60 from lxml import etree
64 import openerp.netsvc as netsvc
65 import openerp.tools as tools
66 from openerp.tools.config import config
67 from openerp.tools.misc import CountingStream
68 from openerp.tools.safe_eval import safe_eval as eval
69 from openerp.tools.translate import _
70 from openerp import SUPERUSER_ID
71 from query import Query
73 _logger = logging.getLogger(__name__)
74 _schema = logging.getLogger(__name__ + '.schema')
76 # List of etree._Element subclasses that we choose to ignore when parsing XML.
77 from openerp.tools import SKIPPED_ELEMENT_TYPES
79 regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
80 regex_object_name = re.compile(r'^[a-z0-9_.]+$')
82 def transfer_field_to_modifiers(field, modifiers):
85 for attr in ('invisible', 'readonly', 'required'):
86 state_exceptions[attr] = []
87 default_values[attr] = bool(field.get(attr))
88 for state, modifs in (field.get("states",{})).items():
90 if default_values[modif[0]] != modif[1]:
91 state_exceptions[modif[0]].append(state)
93 for attr, default_value in default_values.items():
94 if state_exceptions[attr]:
95 modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])]
97 modifiers[attr] = default_value
100 # Don't deal with groups, it is done by check_group().
101 # Need the context to evaluate the invisible attribute on tree views.
102 # For non-tree views, the context shouldn't be given.
103 def transfer_node_to_modifiers(node, modifiers, context=None, in_tree_view=False):
104 if node.get('attrs'):
105 modifiers.update(eval(node.get('attrs')))
107 if node.get('states'):
108 if 'invisible' in modifiers and isinstance(modifiers['invisible'], list):
109 # TODO combine with AND or OR, use implicit AND for now.
110 modifiers['invisible'].append(('state', 'not in', node.get('states').split(',')))
112 modifiers['invisible'] = [('state', 'not in', node.get('states').split(','))]
114 for a in ('invisible', 'readonly', 'required'):
116 v = bool(eval(node.get(a), {'context': context or {}}))
117 if in_tree_view and a == 'invisible':
118 # Invisible in a tree view has a specific meaning, make it a
119 # new key in the modifiers attribute.
120 modifiers['tree_invisible'] = v
121 elif v or (a not in modifiers or not isinstance(modifiers[a], list)):
122 # Don't set the attribute to False if a dynamic value was
123 # provided (i.e. a domain from attrs or states).
127 def simplify_modifiers(modifiers):
128 for a in ('invisible', 'readonly', 'required'):
129 if a in modifiers and not modifiers[a]:
133 def transfer_modifiers_to_node(modifiers, node):
135 simplify_modifiers(modifiers)
136 node.set('modifiers', simplejson.dumps(modifiers))
138 def setup_modifiers(node, field=None, context=None, in_tree_view=False):
139 """ Processes node attributes and field descriptors to generate
140 the ``modifiers`` node attribute and set it on the provided node.
142 Alters its first argument in-place.
144 :param node: ``field`` node from an OpenERP view
145 :type node: lxml.etree._Element
146 :param dict field: field descriptor corresponding to the provided node
147 :param dict context: execution context used to evaluate node attributes
148 :param bool in_tree_view: triggers the ``tree_invisible`` code
149 path (separate from ``invisible``): in
150 tree view there are two levels of
151 invisibility, cell content (a column is
152 present but the cell itself is not
153 displayed) with ``invisible`` and column
154 invisibility (the whole column is
155 hidden) with ``tree_invisible``.
159 if field is not None:
160 transfer_field_to_modifiers(field, modifiers)
161 transfer_node_to_modifiers(
162 node, modifiers, context=context, in_tree_view=in_tree_view)
163 transfer_modifiers_to_node(modifiers, node)
165 def test_modifiers(what, expected):
167 if isinstance(what, basestring):
168 node = etree.fromstring(what)
169 transfer_node_to_modifiers(node, modifiers)
170 simplify_modifiers(modifiers)
171 json = simplejson.dumps(modifiers)
172 assert json == expected, "%s != %s" % (json, expected)
173 elif isinstance(what, dict):
174 transfer_field_to_modifiers(what, modifiers)
175 simplify_modifiers(modifiers)
176 json = simplejson.dumps(modifiers)
177 assert json == expected, "%s != %s" % (json, expected)
182 # openerp.osv.orm.modifiers_tests()
183 def modifiers_tests():
184 test_modifiers('<field name="a"/>', '{}')
185 test_modifiers('<field name="a" invisible="1"/>', '{"invisible": true}')
186 test_modifiers('<field name="a" readonly="1"/>', '{"readonly": true}')
187 test_modifiers('<field name="a" required="1"/>', '{"required": true}')
188 test_modifiers('<field name="a" invisible="0"/>', '{}')
189 test_modifiers('<field name="a" readonly="0"/>', '{}')
190 test_modifiers('<field name="a" required="0"/>', '{}')
191 test_modifiers('<field name="a" invisible="1" required="1"/>', '{"invisible": true, "required": true}') # TODO order is not guaranteed
192 test_modifiers('<field name="a" invisible="1" required="0"/>', '{"invisible": true}')
193 test_modifiers('<field name="a" invisible="0" required="1"/>', '{"required": true}')
194 test_modifiers("""<field name="a" attrs="{'invisible': [('b', '=', 'c')]}"/>""", '{"invisible": [["b", "=", "c"]]}')
196 # The dictionary is supposed to be the result of fields_get().
197 test_modifiers({}, '{}')
198 test_modifiers({"invisible": True}, '{"invisible": true}')
199 test_modifiers({"invisible": False}, '{}')
202 def check_object_name(name):
203 """ Check if the given name is a valid openerp object name.
205 The _name attribute in osv and osv_memory object is subject to
206 some restrictions. This function returns True or False whether
207 the given name is allowed or not.
209 TODO: this is an approximation. The goal in this approximation
210 is to disallow uppercase characters (in some places, we quote
211 table/column names and in other not, which leads to this kind
214 psycopg2.ProgrammingError: relation "xxx" does not exist).
216 The same restriction should apply to both osv and osv_memory
217 objects for consistency.
220 if regex_object_name.match(name) is None:
224 def raise_on_invalid_object_name(name):
225 if not check_object_name(name):
226 msg = "The _name attribute %s is not valid." % name
228 raise except_orm('ValueError', msg)
230 POSTGRES_CONFDELTYPES = {
238 def intersect(la, lb):
239 return filter(lambda x: x in lb, la)
241 def fix_import_export_id_paths(fieldname):
243 Fixes the id fields in import and exports, and splits field paths
246 :param str fieldname: name of the field to import/export
247 :return: split field name
250 fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
251 fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
252 return fixed_external_id.split('/')
254 class except_orm(Exception):
255 def __init__(self, name, value):
258 self.args = (name, value)
260 class BrowseRecordError(Exception):
263 class browse_null(object):
264 """ Readonly python database object browser
270 def __getitem__(self, name):
273 def __getattr__(self, name):
274 return None # XXX: return self ?
282 def __nonzero__(self):
285 def __unicode__(self):
290 # TODO: execute an object method on browse_record_list
292 class browse_record_list(list):
293 """ Collection of browse objects
295 Such an instance will be returned when doing a ``browse([ids..])``
296 and will be iterable, yielding browse() objects
299 def __init__(self, lst, context=None):
302 super(browse_record_list, self).__init__(lst)
303 self.context = context
306 class browse_record(object):
307 """ An object that behaves like a row of an object's table.
308 It has attributes after the columns of the corresponding object.
312 uobj = pool.get('res.users')
313 user_rec = uobj.browse(cr, uid, 104)
317 def __init__(self, cr, uid, id, table, cache, context=None,
318 list_class=browse_record_list, fields_process=None):
320 :param table: the browsed object (inherited from orm)
321 :param dict cache: a dictionary of model->field->data to be shared
322 across browse objects, thus reducing the SQL
323 read()s. It can speed up things a lot, but also be
324 disastrous if not discarded after write()/unlink()
326 :param dict context: dictionary with an optional context
328 if fields_process is None:
332 self._list_class = list_class
336 self._table = table # deprecated, use _model!
338 self._table_name = self._table._name
339 self.__logger = logging.getLogger('openerp.osv.orm.browse_record.' + self._table_name)
340 self._context = context
341 self._fields_process = fields_process
343 cache.setdefault(table._name, {})
344 self._data = cache[table._name]
346 # if not (id and isinstance(id, (int, long,))):
347 # raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
348 # if not table.exists(cr, uid, id, context):
349 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
351 if id not in self._data:
352 self._data[id] = {'id': id}
356 def __getitem__(self, name):
360 if name not in self._data[self._id]:
361 # build the list of fields we will fetch
363 # fetch the definition of the field which was asked for
364 if name in self._table._columns:
365 col = self._table._columns[name]
366 elif name in self._table._inherit_fields:
367 col = self._table._inherit_fields[name][2]
368 elif hasattr(self._table, str(name)):
369 attr = getattr(self._table, name)
370 if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
371 def function_proxy(*args, **kwargs):
372 if 'context' not in kwargs and self._context:
373 kwargs.update(context=self._context)
374 return attr(self._cr, self._uid, [self._id], *args, **kwargs)
375 return function_proxy
379 error_msg = "Field '%s' does not exist in object '%s'" % (name, self)
380 self.__logger.warning(error_msg)
381 if self.__logger.isEnabledFor(logging.DEBUG):
382 self.__logger.debug(''.join(traceback.format_stack()))
383 raise KeyError(error_msg)
385 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
386 if col._prefetch and not col.groups:
387 # gen the list of "local" (ie not inherited) fields which are classic or many2one
388 field_filter = lambda x: x[1]._classic_write and x[1]._prefetch and not x[1].groups
389 fields_to_fetch = filter(field_filter, self._table._columns.items())
390 # gen the list of inherited fields
391 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
392 # complete the field list with the inherited fields which are classic or many2one
393 fields_to_fetch += filter(field_filter, inherits)
394 # otherwise we fetch only that field
396 fields_to_fetch = [(name, col)]
398 ids = filter(lambda id: name not in self._data[id], self._data.keys())
400 field_names = map(lambda x: x[0], fields_to_fetch)
402 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
403 except (openerp.exceptions.AccessError, except_orm):
406 # prefetching attempt failed, perhaps we're violating ACL restrictions involuntarily
407 _logger.info('Prefetching attempt for fields %s on %s failed for ids %s, re-trying just for id %s', field_names, self._model._name, ids, self._id)
409 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
411 # TODO: improve this, very slow for reports
412 if self._fields_process:
413 lang = self._context.get('lang', 'en_US') or 'en_US'
414 lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
416 raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
417 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
419 for field_name, field_column in fields_to_fetch:
420 if field_column._type in self._fields_process:
421 for result_line in field_values:
422 result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
423 if result_line[field_name]:
424 result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
427 # Where did those ids come from? Perhaps old entries in ir_model_dat?
428 _logger.warning("No field_values found for ids %s in %s", ids, self)
429 raise KeyError('Field %s not found in %s'%(name, self))
430 # create browse records for 'remote' objects
431 for result_line in field_values:
433 for field_name, field_column in fields_to_fetch:
434 if field_column._type == 'many2one':
435 if result_line[field_name]:
436 obj = self._table.pool.get(field_column._obj)
437 if isinstance(result_line[field_name], (list, tuple)):
438 value = result_line[field_name][0]
440 value = result_line[field_name]
442 # FIXME: this happen when a _inherits object
443 # overwrite a field of it parent. Need
444 # testing to be sure we got the right
445 # object and not the parent one.
446 if not isinstance(value, browse_record):
448 # In some cases the target model is not available yet, so we must ignore it,
449 # which is safe in most cases, this value will just be loaded later when needed.
450 # This situation can be caused by custom fields that connect objects with m2o without
451 # respecting module dependencies, causing relationships to be connected to soon when
452 # the target is not loaded yet.
454 new_data[field_name] = browse_record(self._cr,
455 self._uid, value, obj, self._cache,
456 context=self._context,
457 list_class=self._list_class,
458 fields_process=self._fields_process)
460 new_data[field_name] = value
462 new_data[field_name] = browse_null()
464 new_data[field_name] = browse_null()
465 elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
466 new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
467 elif field_column._type == 'reference':
468 if result_line[field_name]:
469 if isinstance(result_line[field_name], browse_record):
470 new_data[field_name] = result_line[field_name]
472 ref_obj, ref_id = result_line[field_name].split(',')
473 ref_id = long(ref_id)
475 obj = self._table.pool.get(ref_obj)
476 new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
478 new_data[field_name] = browse_null()
480 new_data[field_name] = browse_null()
482 new_data[field_name] = result_line[field_name]
483 self._data[result_line['id']].update(new_data)
485 if not name in self._data[self._id]:
486 # How did this happen? Could be a missing model due to custom fields used too soon, see above.
487 self.__logger.error("Fields to fetch: %s, Field values: %s", field_names, field_values)
488 self.__logger.error("Cached: %s, Table: %s", self._data[self._id], self._table)
489 raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
490 return self._data[self._id][name]
492 def __getattr__(self, name):
496 raise AttributeError(e)
498 def __contains__(self, name):
499 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
502 raise NotImplementedError("Iteration is not allowed on %s" % self)
504 def __hasattr__(self, name):
511 return "browse_record(%s, %d)" % (self._table_name, self._id)
513 def __eq__(self, other):
514 if not isinstance(other, browse_record):
516 return (self._table_name, self._id) == (other._table_name, other._id)
518 def __ne__(self, other):
519 if not isinstance(other, browse_record):
521 return (self._table_name, self._id) != (other._table_name, other._id)
523 # we need to define __unicode__ even though we've already defined __str__
524 # because we have overridden __getattr__
525 def __unicode__(self):
526 return unicode(str(self))
529 return hash((self._table_name, self._id))
534 """Force refreshing this browse_record's data and all the data of the
535 records that belong to the same cache, by emptying the cache completely,
536 preserving only the record identifiers (for prefetching optimizations).
538 for model, model_cache in self._cache.iteritems():
539 # only preserve the ids of the records that were in the cache
540 cached_ids = dict([(i, {'id': i}) for i in model_cache.keys()])
541 self._cache[model].clear()
542 self._cache[model].update(cached_ids)
544 def pg_varchar(size=0):
545 """ Returns the VARCHAR declaration for the provided size:
547 * If no size (or an empty or negative size is provided) return an
549 * Otherwise return a VARCHAR(n)
551 :type int size: varchar size, optional
555 if not isinstance(size, int):
556 raise TypeError("VARCHAR parameter should be an int, got %s"
559 return 'VARCHAR(%d)' % size
562 FIELDS_TO_PGTYPES = {
563 fields.boolean: 'bool',
564 fields.integer: 'int4',
568 fields.datetime: 'timestamp',
569 fields.binary: 'bytea',
570 fields.many2one: 'int4',
571 fields.serialized: 'text',
574 def get_pg_type(f, type_override=None):
576 :param fields._column f: field to get a Postgres type for
577 :param type type_override: use the provided type for dispatching instead of the field's own type
578 :returns: (postgres_identification_type, postgres_type_specification)
581 field_type = type_override or type(f)
583 if field_type in FIELDS_TO_PGTYPES:
584 pg_type = (FIELDS_TO_PGTYPES[field_type], FIELDS_TO_PGTYPES[field_type])
585 elif issubclass(field_type, fields.float):
587 pg_type = ('numeric', 'NUMERIC')
589 pg_type = ('float8', 'DOUBLE PRECISION')
590 elif issubclass(field_type, (fields.char, fields.reference)):
591 pg_type = ('varchar', pg_varchar(f.size))
592 elif issubclass(field_type, fields.selection):
593 if (isinstance(f.selection, list) and isinstance(f.selection[0][0], int))\
594 or getattr(f, 'size', None) == -1:
595 pg_type = ('int4', 'INTEGER')
597 pg_type = ('varchar', pg_varchar(getattr(f, 'size', None)))
598 elif issubclass(field_type, fields.function):
599 if f._type == 'selection':
600 pg_type = ('varchar', pg_varchar())
602 pg_type = get_pg_type(f, getattr(fields, f._type))
604 _logger.warning('%s type not supported!', field_type)
610 class MetaModel(type):
611 """ Metaclass for the Model.
613 This class is used as the metaclass for the Model class to discover
614 the models defined in a module (i.e. without instanciating them).
615 If the automatic discovery is not needed, it is possible to set the
616 model's _register attribute to False.
620 module_to_models = {}
622 def __init__(self, name, bases, attrs):
623 if not self._register:
624 self._register = True
625 super(MetaModel, self).__init__(name, bases, attrs)
628 # The (OpenERP) module name can be in the `openerp.addons` namespace
629 # or not. For instance module `sale` can be imported as
630 # `openerp.addons.sale` (the good way) or `sale` (for backward
632 module_parts = self.__module__.split('.')
633 if len(module_parts) > 2 and module_parts[0] == 'openerp' and \
634 module_parts[1] == 'addons':
635 module_name = self.__module__.split('.')[2]
637 module_name = self.__module__.split('.')[0]
638 if not hasattr(self, '_module'):
639 self._module = module_name
641 # Remember which models to instanciate for this module.
643 self.module_to_models.setdefault(self._module, []).append(self)
646 # Definition of log access columns, automatically added to models if
647 # self._log_access is True
648 LOG_ACCESS_COLUMNS = {
649 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
650 'create_date': 'TIMESTAMP',
651 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
652 'write_date': 'TIMESTAMP'
654 # special columns automatically created by the ORM
655 MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys()
657 class BaseModel(object):
658 """ Base class for OpenERP models.
660 OpenERP models are created by inheriting from this class' subclasses:
662 * Model: for regular database-persisted models
663 * TransientModel: for temporary data, stored in the database but automatically
664 vaccuumed every so often
665 * AbstractModel: for abstract super classes meant to be shared by multiple
666 _inheriting classes (usually Models or TransientModels)
668 The system will later instantiate the class once per database (on
669 which the class' module is installed).
671 To create a class that should not be instantiated, the _register class attribute
674 __metaclass__ = MetaModel
675 _auto = True # create database backend
676 _register = False # Set to false if the model shouldn't be automatically discovered.
683 _parent_name = 'parent_id'
684 _parent_store = False
685 _parent_order = False
692 # dict of {field:method}, with method returning the (name_get of records, {id: fold})
693 # to include in the _read_group, if grouped on this field
697 _transient = False # True in a TransientModel
700 # { 'parent_model': 'm2o_field', ... }
703 # Mapping from inherits'd field name to triple (m, r, f, n) where m is the
704 # model from which it is inherits'd, r is the (local) field towards m, f
705 # is the _column object itself, and n is the original (i.e. top-most)
708 # { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
709 # field_column_obj, origina_parent_model), ... }
712 # Mapping field name/column_info object
713 # This is similar to _inherit_fields but:
714 # 1. includes self fields,
715 # 2. uses column_info instead of a triple.
721 _sql_constraints = []
722 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
724 CONCURRENCY_CHECK_FIELD = '__last_update'
726 def log(self, cr, uid, id, message, secondary=False, context=None):
727 return _logger.warning("log() is deprecated. Please use OpenChatter notification system instead of the res.log mechanism.")
729 def view_init(self, cr, uid, fields_list, context=None):
730 """Override this method to do specific things when a view on the object is opened."""
733 def _field_create(self, cr, context=None):
734 """ Create entries in ir_model_fields for all the model's fields.
736 If necessary, also create an entry in ir_model, and if called from the
737 modules loading scheme (by receiving 'module' in the context), also
738 create entries in ir_model_data (for the model and the fields).
740 - create an entry in ir_model (if there is not already one),
741 - create an entry in ir_model_data (if there is not already one, and if
742 'module' is in the context),
743 - update ir_model_fields with the fields found in _columns
744 (TODO there is some redundancy as _columns is updated from
745 ir_model_fields in __init__).
750 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
752 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
753 model_id = cr.fetchone()[0]
754 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
756 model_id = cr.fetchone()[0]
757 if 'module' in context:
758 name_id = 'model_'+self._name.replace('.', '_')
759 cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
761 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
762 (name_id, context['module'], 'ir.model', model_id)
767 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
769 for rec in cr.dictfetchall():
770 cols[rec['name']] = rec
772 ir_model_fields_obj = self.pool.get('ir.model.fields')
774 # sparse field should be created at the end, as it depends on its serialized field already existing
775 model_fields = sorted(self._columns.items(), key=lambda x: 1 if x[1]._type == 'sparse' else 0)
776 for (k, f) in model_fields:
778 'model_id': model_id,
781 'field_description': f.string,
783 'relation': f._obj or '',
784 'view_load': (f.view_load and 1) or 0,
785 'select_level': tools.ustr(f.select or 0),
786 'readonly': (f.readonly and 1) or 0,
787 'required': (f.required and 1) or 0,
788 'selectable': (f.selectable and 1) or 0,
789 'translate': (f.translate and 1) or 0,
790 'relation_field': f._fields_id if isinstance(f, fields.one2many) else '',
791 'serialization_field_id': None,
793 if getattr(f, 'serialization_field', None):
794 # resolve link to serialization_field if specified by name
795 serialization_field_id = ir_model_fields_obj.search(cr, SUPERUSER_ID, [('model','=',vals['model']), ('name', '=', f.serialization_field)])
796 if not serialization_field_id:
797 raise except_orm(_('Error'), _("Serialization field `%s` not found for sparse field `%s`!") % (f.serialization_field, k))
798 vals['serialization_field_id'] = serialization_field_id[0]
800 # When its a custom field,it does not contain f.select
801 if context.get('field_state', 'base') == 'manual':
802 if context.get('field_name', '') == k:
803 vals['select_level'] = context.get('select', '0')
804 #setting value to let the problem NOT occur next time
806 vals['select_level'] = cols[k]['select_level']
809 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
810 id = cr.fetchone()[0]
812 cr.execute("""INSERT INTO ir_model_fields (
813 id, model_id, model, name, field_description, ttype,
814 relation,view_load,state,select_level,relation_field, translate, serialization_field_id
816 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
818 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
819 vals['relation'], bool(vals['view_load']), 'base',
820 vals['select_level'], vals['relation_field'], bool(vals['translate']), vals['serialization_field_id']
822 if 'module' in context:
823 name1 = 'field_' + self._table + '_' + k
824 cr.execute("select name from ir_model_data where name=%s", (name1,))
826 name1 = name1 + "_" + str(id)
827 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
828 (name1, context['module'], 'ir.model.fields', id)
831 for key, val in vals.items():
832 if cols[k][key] != vals[key]:
833 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
835 cr.execute("""UPDATE ir_model_fields SET
836 model_id=%s, field_description=%s, ttype=%s, relation=%s,
837 view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s, serialization_field_id=%s
839 model=%s AND name=%s""", (
840 vals['model_id'], vals['field_description'], vals['ttype'],
841 vals['relation'], bool(vals['view_load']),
842 vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['serialization_field_id'], vals['model'], vals['name']
848 # Goal: try to apply inheritance at the instanciation level and
849 # put objects in the pool var
852 def create_instance(cls, pool, cr):
853 """ Instanciate a given model.
855 This class method instanciates the class of some model (i.e. a class
856 deriving from osv or osv_memory). The class might be the class passed
857 in argument or, if it inherits from another class, a class constructed
858 by combining the two classes.
860 The ``attributes`` argument specifies which parent class attributes
863 TODO: the creation of the combined class is repeated at each call of
864 this method. This is probably unnecessary.
867 attributes = ['_columns', '_defaults', '_inherits', '_constraints',
870 parent_names = getattr(cls, '_inherit', None)
872 if isinstance(parent_names, (str, unicode)):
873 name = cls._name or parent_names
874 parent_names = [parent_names]
878 raise TypeError('_name is mandatory in case of multiple inheritance')
880 for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]):
881 parent_model = pool.get(parent_name)
883 raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
884 'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
885 if not getattr(cls, '_original_module', None) and name == parent_model._name:
886 cls._original_module = parent_model._original_module
887 parent_class = parent_model.__class__
890 new = copy.copy(getattr(parent_model, s, {}))
892 # Don't _inherit custom fields.
896 # Duplicate float fields because they have a .digits
897 # cache (which must be per-registry, not server-wide).
899 if new[c]._type == 'float':
900 new[c] = copy.copy(new[c])
901 if hasattr(new, 'update'):
902 new.update(cls.__dict__.get(s, {}))
903 elif s=='_constraints':
904 for c in cls.__dict__.get(s, []):
906 for c2 in range(len(new)):
907 #For _constraints, we should check field and methods as well
908 if new[c2][2]==c[2] and (new[c2][0] == c[0] \
909 or getattr(new[c2][0],'__name__', True) == \
910 getattr(c[0],'__name__', False)):
911 # If new class defines a constraint with
912 # same function name, we let it override
921 new.extend(cls.__dict__.get(s, []))
924 # Keep links to non-inherited constraints, e.g. useful when exporting translations
925 nattr['_local_constraints'] = cls.__dict__.get('_constraints', [])
926 nattr['_local_sql_constraints'] = cls.__dict__.get('_sql_constraints', [])
928 cls = type(name, (cls, parent_class), dict(nattr, _register=False))
930 cls._local_constraints = getattr(cls, '_constraints', [])
931 cls._local_sql_constraints = getattr(cls, '_sql_constraints', [])
933 if not getattr(cls, '_original_module', None):
934 cls._original_module = cls._module
935 obj = object.__new__(cls)
936 obj.__init__(pool, cr)
940 """Register this model.
942 This doesn't create an instance but simply register the model
943 as being part of the module where it is defined.
948 # Set the module name (e.g. base, sale, accounting, ...) on the class.
949 module = cls.__module__.split('.')[0]
950 if not hasattr(cls, '_module'):
953 # Record this class in the list of models to instantiate for this module,
954 # managed by the metaclass.
955 module_model_list = MetaModel.module_to_models.setdefault(cls._module, [])
956 if cls not in module_model_list:
958 module_model_list.append(cls)
960 # Since we don't return an instance here, the __init__
961 # method won't be called.
964 def __init__(self, pool, cr):
965 """ Initialize a model and make it part of the given registry.
967 - copy the stored fields' functions in the osv_pool,
968 - update the _columns with the fields found in ir_model_fields,
969 - ensure there is a many2one for each _inherits'd parent,
970 - update the children's _columns,
971 - give a chance to each field to initialize itself.
974 pool.add(self._name, self)
977 if not self._name and not hasattr(self, '_inherit'):
978 name = type(self).__name__.split('.')[0]
979 msg = "The class %s has to have a _name attribute" % name
982 raise except_orm('ValueError', msg)
984 if not self._description:
985 self._description = self._name
987 self._table = self._name.replace('.', '_')
989 if not hasattr(self, '_log_access'):
990 # If _log_access is not specified, it is the same value as _auto.
991 self._log_access = getattr(self, "_auto", True)
993 self._columns = self._columns.copy()
994 for store_field in self._columns:
995 f = self._columns[store_field]
996 if hasattr(f, 'digits_change'):
998 def not_this_field(stored_func):
999 x, y, z, e, f, l = stored_func
1000 return x != self._name or y != store_field
1001 self.pool._store_function[self._name] = filter(not_this_field, self.pool._store_function.get(self._name, []))
1002 if not isinstance(f, fields.function):
1008 sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, 10, None)}
1009 for object, aa in sm.items():
1011 (fnct, fields2, order, length) = aa
1013 (fnct, fields2, order) = aa
1016 raise except_orm('Error',
1017 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
1018 self.pool._store_function.setdefault(object, [])
1019 t = (self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length)
1020 if not t in self.pool._store_function[object]:
1021 self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
1022 self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
1024 for (key, _, msg) in self._sql_constraints:
1025 self.pool._sql_error[self._table+'_'+key] = msg
1027 # Load manual fields
1029 # Check the query is already done for all modules of if we need to
1031 if self.pool.fields_by_model is not None:
1032 manual_fields = self.pool.fields_by_model.get(self._name, [])
1034 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
1035 manual_fields = cr.dictfetchall()
1036 for field in manual_fields:
1037 if field['name'] in self._columns:
1040 'string': field['field_description'],
1041 'required': bool(field['required']),
1042 'readonly': bool(field['readonly']),
1043 'domain': eval(field['domain']) if field['domain'] else None,
1044 'size': field['size'] or None,
1045 'ondelete': field['on_delete'],
1046 'translate': (field['translate']),
1048 #'select': int(field['select_level'])
1051 if field['serialization_field_id']:
1052 cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],))
1053 attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']})
1054 if field['ttype'] in ['many2one', 'one2many', 'many2many']:
1055 attrs.update({'relation': field['relation']})
1056 self._columns[field['name']] = fields.sparse(**attrs)
1057 elif field['ttype'] == 'selection':
1058 self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
1059 elif field['ttype'] == 'reference':
1060 self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
1061 elif field['ttype'] == 'many2one':
1062 self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
1063 elif field['ttype'] == 'one2many':
1064 self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
1065 elif field['ttype'] == 'many2many':
1066 _rel1 = field['relation'].replace('.', '_')
1067 _rel2 = field['model'].replace('.', '_')
1068 _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
1069 self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
1071 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
1073 self._inherits_check()
1074 self._inherits_reload()
1075 if not self._sequence:
1076 self._sequence = self._table + '_id_seq'
1077 for k in self._defaults:
1078 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
1079 for f in self._columns:
1080 self._columns[f].restart()
1083 if self.is_transient():
1084 self._transient_check_count = 0
1085 self._transient_max_count = config.get('osv_memory_count_limit')
1086 self._transient_max_hours = config.get('osv_memory_age_limit')
1087 assert self._log_access, "TransientModels must have log_access turned on, "\
1088 "in order to implement their access rights policy"
1091 if self._rec_name is not None:
1092 assert self._rec_name in self._all_columns.keys() + ['id'], "Invalid rec_name %s for model %s" % (self._rec_name, self._name)
1094 self._rec_name = 'name'
1097 def __export_row(self, cr, uid, row, fields, context=None):
1101 def check_type(field_type):
1102 if field_type == 'float':
1104 elif field_type == 'integer':
1106 elif field_type == 'boolean':
1110 def selection_field(in_field):
1111 col_obj = self.pool.get(in_field.keys()[0])
1112 if f[i] in col_obj._columns.keys():
1113 return col_obj._columns[f[i]]
1114 elif f[i] in col_obj._inherits.keys():
1115 selection_field(col_obj._inherits)
1119 def _get_xml_id(self, cr, uid, r):
1120 model_data = self.pool.get('ir.model.data')
1121 data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
1123 d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
1125 r = '%s.%s' % (d['module'], d['name'])
1131 n = self._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' )
1132 if not model_data.search(cr, uid, [('name', '=', n)]):
1135 model_data.create(cr, SUPERUSER_ID, {
1137 'model': self._name,
1139 'module': '__export__',
1145 data = map(lambda x: '', range(len(fields)))
1147 for fpos in range(len(fields)):
1157 r = _get_xml_id(self, cr, uid, r)
1160 # To display external name of selection field when its exported
1161 if f[i] in self._columns.keys():
1162 cols = self._columns[f[i]]
1163 elif f[i] in self._inherit_fields.keys():
1164 cols = selection_field(self._inherits)
1165 if cols and cols._type == 'selection':
1166 sel_list = cols.selection
1167 if r and type(sel_list) == type([]):
1168 r = [x[1] for x in sel_list if r==x[0]]
1169 r = r and r[0] or False
1171 if f[i] in self._columns:
1172 r = check_type(self._columns[f[i]]._type)
1173 elif f[i] in self._inherit_fields:
1174 r = check_type(self._inherit_fields[f[i]][2]._type)
1175 data[fpos] = r or False
1177 if isinstance(r, (browse_record_list, list)):
1179 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
1182 if [x for x in fields2 if x]:
1184 done.append(fields2)
1185 if cols and cols._type=='many2many' and len(fields[fpos])>(i+1) and (fields[fpos][i+1]=='id'):
1186 data[fpos] = ','.join([_get_xml_id(self, cr, uid, x) for x in r])
1190 lines2 = row2._model.__export_row(cr, uid, row2, fields2,
1193 for fpos2 in range(len(fields)):
1194 if lines2 and lines2[0][fpos2]:
1195 data[fpos2] = lines2[0][fpos2]
1199 name_relation = self.pool.get(rr._table_name)._rec_name
1200 if isinstance(rr[name_relation], browse_record):
1201 rr = rr[name_relation]
1202 rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
1203 rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
1204 dt += tools.ustr(rr_name or '') + ','
1205 data[fpos] = dt[:-1]
1214 if isinstance(r, browse_record):
1215 r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
1216 r = r and r[0] and r[0][1] or ''
1217 data[fpos] = tools.ustr(r or '')
1218 return [data] + lines
1220 def export_data(self, cr, uid, ids, fields_to_export, context=None):
1222 Export fields for selected objects
1224 :param cr: database cursor
1225 :param uid: current user id
1226 :param ids: list of ids
1227 :param fields_to_export: list of fields
1228 :param context: context arguments, like lang, time zone
1229 :rtype: dictionary with a *datas* matrix
1231 This method is used when exporting data via client menu
1236 cols = self._columns.copy()
1237 for f in self._inherit_fields:
1238 cols.update({f: self._inherit_fields[f][2]})
1239 fields_to_export = map(fix_import_export_id_paths, fields_to_export)
1241 for row in self.browse(cr, uid, ids, context):
1242 datas += self.__export_row(cr, uid, row, fields_to_export, context)
1243 return {'datas': datas}
1245 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
1248 Use :meth:`~load` instead
1250 Import given data in given module
1252 This method is used when importing data via client menu.
1254 Example of fields to import for a sale.order::
1257 partner_id, (=name_search)
1258 order_line/.id, (=database_id)
1260 order_line/product_id/id, (=xml id)
1261 order_line/price_unit,
1262 order_line/product_uom_qty,
1263 order_line/product_uom/id (=xml_id)
1265 This method returns a 4-tuple with the following structure::
1267 (return_code, errored_resource, error_message, unused)
1269 * The first item is a return code, it is ``-1`` in case of
1270 import error, or the last imported row number in case of success
1271 * The second item contains the record data dict that failed to import
1272 in case of error, otherwise it's 0
1273 * The third item contains an error message string in case of error,
1275 * The last item is currently unused, with no specific semantics
1277 :param fields: list of fields to import
1278 :param datas: data to import
1279 :param mode: 'init' or 'update' for record creation
1280 :param current_module: module name
1281 :param noupdate: flag for record creation
1282 :param filename: optional file to store partial import state for recovery
1283 :returns: 4-tuple in the form (return_code, errored_resource, error_message, unused)
1284 :rtype: (int, dict or 0, str or 0, str or 0)
1286 context = dict(context) if context is not None else {}
1287 context['_import_current_module'] = current_module
1289 fields = map(fix_import_export_id_paths, fields)
1290 ir_model_data_obj = self.pool.get('ir.model.data')
1293 if m['type'] == 'error':
1294 raise Exception(m['message'])
1296 if config.get('import_partial') and filename:
1297 with open(config.get('import_partial'), 'rb') as partial_import_file:
1298 data = pickle.load(partial_import_file)
1299 position = data.get(filename, 0)
1303 for res_id, xml_id, res, info in self._convert_records(cr, uid,
1304 self._extract_records(cr, uid, fields, datas,
1305 context=context, log=log),
1306 context=context, log=log):
1307 ir_model_data_obj._update(cr, uid, self._name,
1308 current_module, res, mode=mode, xml_id=xml_id,
1309 noupdate=noupdate, res_id=res_id, context=context)
1310 position = info.get('rows', {}).get('to', 0) + 1
1311 if config.get('import_partial') and filename and (not (position%100)):
1312 with open(config.get('import_partial'), 'rb') as partial_import:
1313 data = pickle.load(partial_import)
1314 data[filename] = position
1315 with open(config.get('import_partial'), 'wb') as partial_import:
1316 pickle.dump(data, partial_import)
1317 if context.get('defer_parent_store_computation'):
1318 self._parent_store_compute(cr)
1320 except Exception, e:
1322 return -1, {}, 'Line %d : %s' % (position + 1, tools.ustr(e)), ''
1324 if context.get('defer_parent_store_computation'):
1325 self._parent_store_compute(cr)
1326 return position, 0, 0, 0
1328 def load(self, cr, uid, fields, data, context=None):
1330 Attempts to load the data matrix, and returns a list of ids (or
1331 ``False`` if there was an error and no id could be generated) and a
1334 The ids are those of the records created and saved (in database), in
1335 the same order they were extracted from the file. They can be passed
1336 directly to :meth:`~read`
1338 :param fields: list of fields to import, at the same index as the corresponding data
1339 :type fields: list(str)
1340 :param data: row-major matrix of data to import
1341 :type data: list(list(str))
1342 :param dict context:
1343 :returns: {ids: list(int)|False, messages: [Message]}
1345 cr.execute('SAVEPOINT model_load')
1348 fields = map(fix_import_export_id_paths, fields)
1349 ModelData = self.pool['ir.model.data'].clear_caches()
1351 fg = self.fields_get(cr, uid, context=context)
1358 for id, xid, record, info in self._convert_records(cr, uid,
1359 self._extract_records(cr, uid, fields, data,
1360 context=context, log=messages.append),
1361 context=context, log=messages.append):
1363 cr.execute('SAVEPOINT model_load_save')
1364 except psycopg2.InternalError, e:
1365 # broken transaction, exit and hope the source error was
1367 if not any(message['type'] == 'error' for message in messages):
1368 messages.append(dict(info, type='error',message=
1369 u"Unknown database error: '%s'" % e))
1372 ids.append(ModelData._update(cr, uid, self._name,
1373 current_module, record, mode=mode, xml_id=xid,
1374 noupdate=noupdate, res_id=id, context=context))
1375 cr.execute('RELEASE SAVEPOINT model_load_save')
1376 except psycopg2.Warning, e:
1377 messages.append(dict(info, type='warning', message=str(e)))
1378 cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
1379 except psycopg2.Error, e:
1380 messages.append(dict(
1382 **PGERROR_TO_OE[e.pgcode](self, fg, info, e)))
1383 # Failed to write, log to messages, rollback savepoint (to
1384 # avoid broken transaction) and keep going
1385 cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
1386 if any(message['type'] == 'error' for message in messages):
1387 cr.execute('ROLLBACK TO SAVEPOINT model_load')
1389 return {'ids': ids, 'messages': messages}
1390 def _extract_records(self, cr, uid, fields_, data,
1391 context=None, log=lambda a: None):
1392 """ Generates record dicts from the data sequence.
1394 The result is a generator of dicts mapping field names to raw
1395 (unconverted, unvalidated) values.
1397 For relational fields, if sub-fields were provided the value will be
1398 a list of sub-records
1400 The following sub-fields may be set on the record (by key):
1401 * None is the name_get for the record (to use with name_create/name_search)
1402 * "id" is the External ID for the record
1403 * ".id" is the Database ID for the record
1405 columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
1406 # Fake columns to avoid special cases in extractor
1407 columns[None] = fields.char('rec_name')
1408 columns['id'] = fields.char('External ID')
1409 columns['.id'] = fields.integer('Database ID')
1411 # m2o fields can't be on multiple lines so exclude them from the
1412 # is_relational field rows filter, but special-case it later on to
1413 # be handled with relational fields (as it can have subfields)
1414 is_relational = lambda field: columns[field]._type in ('one2many', 'many2many', 'many2one')
1415 get_o2m_values = itemgetter_tuple(
1416 [index for index, field in enumerate(fields_)
1417 if columns[field[0]]._type == 'one2many'])
1418 get_nono2m_values = itemgetter_tuple(
1419 [index for index, field in enumerate(fields_)
1420 if columns[field[0]]._type != 'one2many'])
1421 # Checks if the provided row has any non-empty non-relational field
1422 def only_o2m_values(row, f=get_nono2m_values, g=get_o2m_values):
1423 return any(g(row)) and not any(f(row))
1427 if index >= len(data): return
1430 # copy non-relational fields to record dict
1431 record = dict((field[0], value)
1432 for field, value in itertools.izip(fields_, row)
1433 if not is_relational(field[0]))
1435 # Get all following rows which have relational values attached to
1436 # the current record (no non-relational values)
1437 record_span = itertools.takewhile(
1438 only_o2m_values, itertools.islice(data, index + 1, None))
1439 # stitch record row back on for relational fields
1440 record_span = list(itertools.chain([row], record_span))
1441 for relfield in set(
1442 field[0] for field in fields_
1443 if is_relational(field[0])):
1444 column = columns[relfield]
1445 # FIXME: how to not use _obj without relying on fields_get?
1446 Model = self.pool[column._obj]
1448 # get only cells for this sub-field, should be strictly
1449 # non-empty, field path [None] is for name_get column
1450 indices, subfields = zip(*((index, field[1:] or [None])
1451 for index, field in enumerate(fields_)
1452 if field[0] == relfield))
1454 # return all rows which have at least one value for the
1455 # subfields of relfield
1456 relfield_data = filter(any, map(itemgetter_tuple(indices), record_span))
1457 record[relfield] = [subrecord
1458 for subrecord, _subinfo in Model._extract_records(
1459 cr, uid, subfields, relfield_data,
1460 context=context, log=log)]
1462 yield record, {'rows': {
1464 'to': index + len(record_span) - 1
1466 index += len(record_span)
1467 def _convert_records(self, cr, uid, records,
1468 context=None, log=lambda a: None):
1469 """ Converts records from the source iterable (recursive dicts of
1470 strings) into forms which can be written to the database (via
1471 self.create or (ir.model.data)._update)
1473 :returns: a list of triplets of (id, xid, record)
1474 :rtype: list((int|None, str|None, dict))
1476 if context is None: context = {}
1477 Converter = self.pool['ir.fields.converter']
1478 columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
1479 Translation = self.pool['ir.translation']
1481 (f, (Translation._get_source(cr, uid, self._name + ',' + f, 'field',
1482 context.get('lang'))
1484 for f, column in columns.iteritems())
1486 convert = Converter.for_model(cr, uid, self, context=context)
1488 def _log(base, field, exception):
1489 type = 'warning' if isinstance(exception, Warning) else 'error'
1490 # logs the logical (not human-readable) field name for automated
1491 # processing of response, but injects human readable in message
1492 record = dict(base, type=type, field=field,
1493 message=unicode(exception.args[0]) % base)
1494 if len(exception.args) > 1 and exception.args[1]:
1495 record.update(exception.args[1])
1498 stream = CountingStream(records)
1499 for record, extras in stream:
1502 # name_get/name_create
1503 if None in record: pass
1510 dbid = int(record['.id'])
1512 # in case of overridden id column
1513 dbid = record['.id']
1514 if not self.search(cr, uid, [('id', '=', dbid)], context=context):
1517 record=stream.index,
1519 message=_(u"Unknown database identifier '%s'") % dbid))
1522 converted = convert(record, lambda field, err:\
1523 _log(dict(extras, record=stream.index, field=field_names[field]), field, err))
1525 yield dbid, xid, converted, dict(extras, record=stream.index)
1527 def get_invalid_fields(self, cr, uid):
1528 return list(self._invalids)
1530 def _validate(self, cr, uid, ids, context=None):
1531 context = context or {}
1532 lng = context.get('lang')
1533 trans = self.pool.get('ir.translation')
1535 for constraint in self._constraints:
1536 fun, msg, fields = constraint
1537 # We don't pass around the context here: validation code
1538 # must always yield the same results.
1539 if not fun(self, cr, uid, ids):
1540 # Check presence of __call__ directly instead of using
1541 # callable() because it will be deprecated as of Python 3.0
1542 if hasattr(msg, '__call__'):
1543 tmp_msg = msg(self, cr, uid, ids, context=context)
1544 if isinstance(tmp_msg, tuple):
1545 tmp_msg, params = tmp_msg
1546 translated_msg = tmp_msg % params
1548 translated_msg = tmp_msg
1550 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg)
1552 _("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
1554 self._invalids.update(fields)
1556 raise except_orm('ValidateError', '\n'.join(error_msgs))
1558 self._invalids.clear()
1560 def default_get(self, cr, uid, fields_list, context=None):
1562 Returns default values for the fields in fields_list.
1564 :param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
1565 :type fields_list: list
1566 :param context: optional context dictionary - it may contains keys for specifying certain options
1567 like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
1568 It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
1569 or override a default value for a field.
1570 A special ``bin_size`` boolean flag may also be passed in the context to request the
1571 value of all fields.binary columns to be returned as the size of the binary instead of its
1572 contents. This can also be selectively overriden by passing a field-specific flag
1573 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
1574 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
1575 :return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
1577 # trigger view init hook
1578 self.view_init(cr, uid, fields_list, context)
1584 # get the default values for the inherited fields
1585 for t in self._inherits.keys():
1586 defaults.update(self.pool.get(t).default_get(cr, uid, fields_list,
1589 # get the default values defined in the object
1590 for f in fields_list:
1591 if f in self._defaults:
1592 if callable(self._defaults[f]):
1593 defaults[f] = self._defaults[f](self, cr, uid, context)
1595 defaults[f] = self._defaults[f]
1597 fld_def = ((f in self._columns) and self._columns[f]) \
1598 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1601 if isinstance(fld_def, fields.property):
1602 property_obj = self.pool.get('ir.property')
1603 prop_value = property_obj.get(cr, uid, f, self._name, context=context)
1605 if isinstance(prop_value, (browse_record, browse_null)):
1606 defaults[f] = prop_value.id
1608 defaults[f] = prop_value
1610 if f not in defaults:
1613 # get the default values set by the user and override the default
1614 # values defined in the object
1615 ir_values_obj = self.pool.get('ir.values')
1616 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1617 for id, field, field_value in res:
1618 if field in fields_list:
1619 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1620 if fld_def._type == 'many2one':
1621 obj = self.pool.get(fld_def._obj)
1622 if not obj.search(cr, uid, [('id', '=', field_value or False)]):
1624 if fld_def._type == 'many2many':
1625 obj = self.pool.get(fld_def._obj)
1627 for i in range(len(field_value or [])):
1628 if not obj.search(cr, uid, [('id', '=',
1631 field_value2.append(field_value[i])
1632 field_value = field_value2
1633 if fld_def._type == 'one2many':
1634 obj = self.pool.get(fld_def._obj)
1636 for i in range(len(field_value or [])):
1637 field_value2.append({})
1638 for field2 in field_value[i]:
1639 if field2 in obj._columns.keys() and obj._columns[field2]._type == 'many2one':
1640 obj2 = self.pool.get(obj._columns[field2]._obj)
1641 if not obj2.search(cr, uid,
1642 [('id', '=', field_value[i][field2])]):
1644 elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type == 'many2one':
1645 obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
1646 if not obj2.search(cr, uid,
1647 [('id', '=', field_value[i][field2])]):
1649 # TODO add test for many2many and one2many
1650 field_value2[i][field2] = field_value[i][field2]
1651 field_value = field_value2
1652 defaults[field] = field_value
1654 # get the default values from the context
1655 for key in context or {}:
1656 if key.startswith('default_') and (key[8:] in fields_list):
1657 defaults[key[8:]] = context[key]
1660 def fields_get_keys(self, cr, user, context=None):
1661 res = self._columns.keys()
1662 # TODO I believe this loop can be replace by
1663 # res.extend(self._inherit_fields.key())
1664 for parent in self._inherits:
1665 res.extend(self.pool.get(parent).fields_get_keys(cr, user, context))
1668 def _rec_name_fallback(self, cr, uid, context=None):
1669 rec_name = self._rec_name
1670 if rec_name not in self._columns:
1671 rec_name = self._columns.keys()[0] if len(self._columns.keys()) > 0 else "id"
1675 # Overload this method if you need a window title which depends on the context
1677 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
1680 def user_has_groups(self, cr, uid, groups, context=None):
1681 """Return true if the user is at least member of one of the groups
1682 in groups_str. Typically used to resolve ``groups`` attribute
1683 in view and model definitions.
1685 :param str groups: comma-separated list of fully-qualified group
1686 external IDs, e.g.: ``base.group_user,base.group_system``
1687 :return: True if the current user is a member of one of the
1690 return any([self.pool.get('res.users').has_group(cr, uid, group_ext_id)
1691 for group_ext_id in groups.split(',')])
1693 def __view_look_dom(self, cr, user, node, view_id, in_tree_view, model_fields, context=None):
1694 """Return the description of the fields in the node.
1696 In a normal call to this method, node is a complete view architecture
1697 but it is actually possible to give some sub-node (this is used so
1698 that the method can call itself recursively).
1700 Originally, the field descriptions are drawn from the node itself.
1701 But there is now some code calling fields_get() in order to merge some
1702 of those information in the architecture.
1714 if isinstance(s, unicode):
1715 return s.encode('utf8')
1718 def check_group(node):
1719 """Apply group restrictions, may be set at view level or model level::
1720 * at view level this means the element should be made invisible to
1721 people who are not members
1722 * at model level (exclusively for fields, obviously), this means
1723 the field should be completely removed from the view, as it is
1724 completely unavailable for non-members
1726 :return: True if field should be included in the result of fields_view_get
1728 if node.tag == 'field' and node.get('name') in self._all_columns:
1729 column = self._all_columns[node.get('name')].column
1730 if column.groups and not self.user_has_groups(cr, user,
1731 groups=column.groups,
1733 node.getparent().remove(node)
1734 fields.pop(node.get('name'), None)
1735 # no point processing view-level ``groups`` anymore, return
1737 if node.get('groups'):
1738 can_see = self.user_has_groups(cr, user,
1739 groups=node.get('groups'),
1742 node.set('invisible', '1')
1743 modifiers['invisible'] = True
1744 if 'attrs' in node.attrib:
1745 del(node.attrib['attrs']) #avoid making field visible later
1746 del(node.attrib['groups'])
1749 if node.tag in ('field', 'node', 'arrow'):
1750 if node.get('object'):
1755 if f.tag == 'field':
1756 xml += etree.tostring(f, encoding="utf-8")
1758 new_xml = etree.fromstring(encode(xml))
1759 ctx = context.copy()
1760 ctx['base_model_name'] = self._name
1761 xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
1766 attrs = {'views': views}
1768 if node.get('name'):
1771 if node.get('name') in self._columns:
1772 column = self._columns[node.get('name')]
1774 column = self._inherit_fields[node.get('name')][2]
1779 relation = self.pool.get(column._obj)
1784 if f.tag in ('form', 'tree', 'graph', 'kanban'):
1786 ctx = context.copy()
1787 ctx['base_model_name'] = self._name
1788 xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
1789 views[str(f.tag)] = {
1793 attrs = {'views': views}
1794 if node.get('widget') and node.get('widget') == 'selection':
1795 # Prepare the cached selection list for the client. This needs to be
1796 # done even when the field is invisible to the current user, because
1797 # other events could need to change its value to any of the selectable ones
1798 # (such as on_change events, refreshes, etc.)
1800 # If domain and context are strings, we keep them for client-side, otherwise
1801 # we evaluate them server-side to consider them when generating the list of
1803 # TODO: find a way to remove this hack, by allow dynamic domains
1805 if column._domain and not isinstance(column._domain, basestring):
1806 dom = list(column._domain)
1807 dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
1808 search_context = dict(context)
1809 if column._context and not isinstance(column._context, basestring):
1810 search_context.update(column._context)
1811 attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
1812 if (node.get('required') and not int(node.get('required'))) or not column.required:
1813 attrs['selection'].append((False, ''))
1814 fields[node.get('name')] = attrs
1816 field = model_fields.get(node.get('name'))
1818 transfer_field_to_modifiers(field, modifiers)
1821 elif node.tag in ('form', 'tree'):
1822 result = self.view_header_get(cr, user, False, node.tag, context)
1824 node.set('string', result)
1825 in_tree_view = node.tag == 'tree'
1827 elif node.tag == 'calendar':
1828 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
1829 if node.get(additional_field):
1830 fields[node.get(additional_field)] = {}
1832 if not check_group(node):
1833 # node must be removed, no need to proceed further with its children
1836 # The view architeture overrides the python model.
1837 # Get the attrs before they are (possibly) deleted by check_group below
1838 transfer_node_to_modifiers(node, modifiers, context, in_tree_view)
1840 # TODO remove attrs couterpart in modifiers when invisible is true ?
1843 if 'lang' in context:
1844 if node.text and node.text.strip():
1845 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.text.strip())
1847 node.text = node.text.replace(node.text.strip(), trans)
1848 if node.tail and node.tail.strip():
1849 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.tail.strip())
1851 node.tail = node.tail.replace(node.tail.strip(), trans)
1853 if node.get('string') and not result:
1854 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
1855 if trans == node.get('string') and ('base_model_name' in context):
1856 # If translation is same as source, perhaps we'd have more luck with the alternative model name
1857 # (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
1858 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
1860 node.set('string', trans)
1862 for attr_name in ('confirm', 'sum', 'avg', 'help', 'placeholder'):
1863 attr_value = node.get(attr_name)
1865 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], attr_value)
1867 node.set(attr_name, trans)
1870 if children or (node.tag == 'field' and f.tag in ('filter','separator')):
1871 fields.update(self.__view_look_dom(cr, user, f, view_id, in_tree_view, model_fields, context))
1873 transfer_modifiers_to_node(modifiers, node)
1876 def _disable_workflow_buttons(self, cr, user, node):
1877 """ Set the buttons in node to readonly if the user can't activate them. """
1879 # admin user can always activate workflow buttons
1882 # TODO handle the case of more than one workflow for a model or multiple
1883 # transitions with different groups and same signal
1884 usersobj = self.pool.get('res.users')
1885 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1886 for button in buttons:
1887 user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
1888 cr.execute("""SELECT DISTINCT t.group_id
1890 INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
1891 INNER JOIN wkf_transition t ON (t.act_to = a.id)
1894 AND t.group_id is NOT NULL
1895 """, (self._name, button.get('name')))
1896 group_ids = [x[0] for x in cr.fetchall() if x[0]]
1897 can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
1898 button.set('readonly', str(int(not can_click)))
1901 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1902 """ Return an architecture and a description of all the fields.
1904 The field description combines the result of fields_get() and
1907 :param node: the architecture as as an etree
1908 :return: a tuple (arch, fields) where arch is the given node as a
1909 string and fields is the description of all the fields.
1913 if node.tag == 'diagram':
1914 if node.getchildren()[0].tag == 'node':
1915 node_model = self.pool.get(node.getchildren()[0].get('object'))
1916 node_fields = node_model.fields_get(cr, user, None, context)
1917 fields.update(node_fields)
1918 if not node.get("create") and not node_model.check_access_rights(cr, user, 'create', raise_exception=False):
1919 node.set("create", 'false')
1920 if node.getchildren()[1].tag == 'arrow':
1921 arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, None, context)
1922 fields.update(arrow_fields)
1924 fields = self.fields_get(cr, user, None, context)
1925 fields_def = self.__view_look_dom(cr, user, node, view_id, False, fields, context=context)
1926 node = self._disable_workflow_buttons(cr, user, node)
1927 if node.tag in ('kanban', 'tree', 'form', 'gantt'):
1928 for action, operation in (('create', 'create'), ('delete', 'unlink'), ('edit', 'write')):
1929 if not node.get(action) and not self.check_access_rights(cr, user, operation, raise_exception=False):
1930 node.set(action, 'false')
1931 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1932 for k in fields.keys():
1933 if k not in fields_def:
1935 for field in fields_def:
1937 # sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1938 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1939 elif field in fields:
1940 fields[field].update(fields_def[field])
1942 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1943 res = cr.fetchall()[:]
1945 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1946 msg = "\n * ".join([r[0] for r in res])
1947 msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
1949 raise except_orm('View error', msg)
1952 def _get_default_form_view(self, cr, user, context=None):
1953 """ Generates a default single-line form view using all fields
1954 of the current model except the m2m and o2m ones.
1956 :param cr: database cursor
1957 :param int user: user id
1958 :param dict context: connection context
1959 :returns: a form view as an lxml document
1960 :rtype: etree._Element
1962 view = etree.Element('form', string=self._description)
1963 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
1964 for field, descriptor in self.fields_get(cr, user, context=context).iteritems():
1965 if descriptor['type'] in ('one2many', 'many2many'):
1967 etree.SubElement(view, 'field', name=field)
1968 if descriptor['type'] == 'text':
1969 etree.SubElement(view, 'newline')
1972 def _get_default_search_view(self, cr, user, context=None):
1973 """ Generates a single-field search view, based on _rec_name.
1975 :param cr: database cursor
1976 :param int user: user id
1977 :param dict context: connection context
1978 :returns: a tree view as an lxml document
1979 :rtype: etree._Element
1981 view = etree.Element('search', string=self._description)
1982 etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
1985 def _get_default_tree_view(self, cr, user, context=None):
1986 """ Generates a single-field tree view, based on _rec_name.
1988 :param cr: database cursor
1989 :param int user: user id
1990 :param dict context: connection context
1991 :returns: a tree view as an lxml document
1992 :rtype: etree._Element
1994 view = etree.Element('tree', string=self._description)
1995 etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
1998 def _get_default_calendar_view(self, cr, user, context=None):
1999 """ Generates a default calendar view by trying to infer
2000 calendar fields from a number of pre-set attribute names
2002 :param cr: database cursor
2003 :param int user: user id
2004 :param dict context: connection context
2005 :returns: a calendar view
2006 :rtype: etree._Element
2008 def set_first_of(seq, in_, to):
2009 """Sets the first value of ``seq`` also found in ``in_`` to
2010 the ``to`` attribute of the view being closed over.
2012 Returns whether it's found a suitable value (and set it on
2013 the attribute) or not
2021 view = etree.Element('calendar', string=self._description)
2022 etree.SubElement(view, 'field', self._rec_name_fallback(cr, user, context))
2024 if self._date_name not in self._columns:
2026 for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
2027 if dt in self._columns:
2028 self._date_name = dt
2033 raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
2034 view.set('date_start', self._date_name)
2036 set_first_of(["user_id", "partner_id", "x_user_id", "x_partner_id"],
2037 self._columns, 'color')
2039 if not set_first_of(["date_stop", "date_end", "x_date_stop", "x_date_end"],
2040 self._columns, 'date_stop'):
2041 if not set_first_of(["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"],
2042 self._columns, 'date_delay'):
2044 _('Invalid Object Architecture!'),
2045 _("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % self._name))
2050 # if view_id, view_type is not required
2052 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
2054 Get the detailed composition of the requested view like fields, model, view architecture
2056 :param cr: database cursor
2057 :param user: current user id
2058 :param view_id: id of the view or None
2059 :param view_type: type of the view to return if view_id is None ('form', tree', ...)
2060 :param context: context arguments, like lang, time zone
2061 :param toolbar: true to include contextual actions
2062 :param submenu: deprecated
2063 :return: dictionary describing the composition of the requested view (including inherited views and extensions)
2064 :raise AttributeError:
2065 * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
2066 * if some tag other than 'position' is found in parent view
2067 :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
2074 if isinstance(s, unicode):
2075 return s.encode('utf8')
2078 def raise_view_error(error_msg, child_view_id):
2079 view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
2080 error_msg = error_msg % {'parent_xml_id': view.xml_id}
2081 raise AttributeError("View definition error for inherited view '%s' on model '%s': %s"
2082 % (child_view.xml_id, self._name, error_msg))
2084 def locate(source, spec):
2085 """ Locate a node in a source (parent) architecture.
2087 Given a complete source (parent) architecture (i.e. the field
2088 `arch` in a view), and a 'spec' node (a node in an inheriting
2089 view that specifies the location in the source view of what
2090 should be changed), return (if it exists) the node in the
2091 source view matching the specification.
2093 :param source: a parent architecture to modify
2094 :param spec: a modifying node in an inheriting view
2095 :return: a node in the source matching the spec
2098 if spec.tag == 'xpath':
2099 nodes = source.xpath(spec.get('expr'))
2100 return nodes[0] if nodes else None
2101 elif spec.tag == 'field':
2102 # Only compare the field name: a field can be only once in a given view
2103 # at a given level (and for multilevel expressions, we should use xpath
2104 # inheritance spec anyway).
2105 for node in source.getiterator('field'):
2106 if node.get('name') == spec.get('name'):
2110 for node in source.getiterator(spec.tag):
2111 if isinstance(node, SKIPPED_ELEMENT_TYPES):
2113 if all(node.get(attr) == spec.get(attr) \
2114 for attr in spec.attrib
2115 if attr not in ('position','version')):
2116 # Version spec should match parent's root element's version
2117 if spec.get('version') and spec.get('version') != source.get('version'):
2122 def apply_inheritance_specs(source, specs_arch, inherit_id=None):
2123 """ Apply an inheriting view.
2125 Apply to a source architecture all the spec nodes (i.e. nodes
2126 describing where and what changes to apply to some parent
2127 architecture) given by an inheriting view.
2129 :param source: a parent architecture to modify
2130 :param specs_arch: a modifying architecture in an inheriting view
2131 :param inherit_id: the database id of the inheriting view
2132 :return: a modified source where the specs are applied
2135 specs_tree = etree.fromstring(encode(specs_arch))
2136 # Queue of specification nodes (i.e. nodes describing where and
2137 # changes to apply to some parent architecture).
2138 specs = [specs_tree]
2142 if isinstance(spec, SKIPPED_ELEMENT_TYPES):
2144 if spec.tag == 'data':
2145 specs += [ c for c in specs_tree ]
2147 node = locate(source, spec)
2148 if node is not None:
2149 pos = spec.get('position', 'inside')
2150 if pos == 'replace':
2151 if node.getparent() is None:
2152 source = copy.deepcopy(spec[0])
2155 node.addprevious(child)
2156 node.getparent().remove(node)
2157 elif pos == 'attributes':
2158 for child in spec.getiterator('attribute'):
2159 attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
2161 node.set(attribute[0], attribute[1])
2163 del(node.attrib[attribute[0]])
2165 sib = node.getnext()
2169 elif pos == 'after':
2174 sib.addprevious(child)
2175 elif pos == 'before':
2176 node.addprevious(child)
2178 raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
2181 ' %s="%s"' % (attr, spec.get(attr))
2182 for attr in spec.attrib
2183 if attr != 'position'
2185 tag = "<%s%s>" % (spec.tag, attrs)
2186 if spec.get('version') and spec.get('version') != source.get('version'):
2187 raise_view_error("Mismatching view API version for element '%s': %r vs %r in parent view '%%(parent_xml_id)s'" % \
2188 (tag, spec.get('version'), source.get('version')), inherit_id)
2189 raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
2193 def apply_view_inheritance(cr, user, source, inherit_id):
2194 """ Apply all the (directly and indirectly) inheriting views.
2196 :param source: a parent architecture to modify (with parent
2197 modifications already applied)
2198 :param inherit_id: the database view_id of the parent view
2199 :return: a modified source where all the modifying architecture
2203 sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name, context=context)
2204 for (view_arch, view_id) in sql_inherit:
2205 source = apply_inheritance_specs(source, view_arch, view_id)
2206 source = apply_view_inheritance(cr, user, source, view_id)
2209 result = {'type': view_type, 'model': self._name}
2212 parent_view_model = None
2213 view_ref = context.get(view_type + '_view_ref')
2214 # Search for a root (i.e. without any parent) view.
2216 if view_ref and not view_id:
2218 module, view_ref = view_ref.split('.', 1)
2219 cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
2220 view_ref_res = cr.fetchone()
2222 view_id = view_ref_res[0]
2225 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2227 WHERE id=%s""", (view_id,))
2229 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2231 WHERE model=%s AND type=%s AND inherit_id IS NULL
2232 ORDER BY priority""", (self._name, view_type))
2233 sql_res = cr.dictfetchone()
2238 view_id = sql_res['inherit_id'] or sql_res['id']
2239 parent_view_model = sql_res['model']
2240 if not sql_res['inherit_id']:
2243 # if a view was found
2245 source = etree.fromstring(encode(sql_res['arch']))
2247 arch=apply_view_inheritance(cr, user, source, sql_res['id']),
2248 type=sql_res['type'],
2249 view_id=sql_res['id'],
2250 name=sql_res['name'],
2251 field_parent=sql_res['field_parent'] or False)
2253 # otherwise, build some kind of default view
2255 view = getattr(self, '_get_default_%s_view' % view_type)(
2257 except AttributeError:
2258 # what happens here, graph case?
2259 raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
2267 if parent_view_model != self._name:
2268 ctx = context.copy()
2269 ctx['base_model_name'] = parent_view_model
2272 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
2273 result['arch'] = xarch
2274 result['fields'] = xfields
2279 for key in ('report_sxw_content', 'report_rml_content',
2280 'report_sxw', 'report_rml',
2281 'report_sxw_content_data', 'report_rml_content_data'):
2285 ir_values_obj = self.pool.get('ir.values')
2286 resprint = ir_values_obj.get(cr, user, 'action',
2287 'client_print_multi', [(self._name, False)], False,
2289 resaction = ir_values_obj.get(cr, user, 'action',
2290 'client_action_multi', [(self._name, False)], False,
2293 resrelate = ir_values_obj.get(cr, user, 'action',
2294 'client_action_relate', [(self._name, False)], False,
2296 resaction = [clean(action) for action in resaction
2297 if view_type == 'tree' or not action[2].get('multi')]
2298 resprint = [clean(print_) for print_ in resprint
2299 if view_type == 'tree' or not print_[2].get('multi')]
2300 #When multi="True" set it will display only in More of the list view
2301 resrelate = [clean(action) for action in resrelate
2302 if (action[2].get('multi') and view_type == 'tree') or (not action[2].get('multi') and view_type == 'form')]
2304 for x in itertools.chain(resprint, resaction, resrelate):
2305 x['string'] = x['name']
2307 result['toolbar'] = {
2309 'action': resaction,
2314 _view_look_dom_arch = __view_look_dom_arch
2316 def search_count(self, cr, user, args, context=None):
2319 res = self.search(cr, user, args, context=context, count=True)
2320 if isinstance(res, list):
2324 def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
2326 Search for records based on a search domain.
2328 :param cr: database cursor
2329 :param user: current user id
2330 :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
2331 :param offset: optional number of results to skip in the returned values (default: 0)
2332 :param limit: optional max number of records to return (default: **None**)
2333 :param order: optional columns to sort by (default: self._order=id )
2334 :param context: optional context arguments, like lang, time zone
2335 :type context: dictionary
2336 :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
2337 :return: id or list of ids of records matching the criteria
2338 :rtype: integer or list of integers
2339 :raise AccessError: * if user tries to bypass access rules for read on the requested object.
2341 **Expressing a search domain (args)**
2343 Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
2345 * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
2346 * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
2347 The semantics of most of these operators are obvious.
2348 The ``child_of`` operator will look for records who are children or grand-children of a given record,
2349 according to the semantics of this model (i.e following the relationship field named by
2350 ``self._parent_name``, by default ``parent_id``.
2351 * **value** must be a valid value to compare with the values of **field_name**, depending on its type.
2353 Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
2354 These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
2355 Be very careful about this when you combine them the first time.
2357 Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
2359 [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
2361 The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
2363 (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
2366 return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
2368 def name_get(self, cr, user, ids, context=None):
2369 """Returns the preferred display value (text representation) for the records with the
2370 given ``ids``. By default this will be the value of the ``name`` column, unless
2371 the model implements a custom behavior.
2372 Can sometimes be seen as the inverse function of :meth:`~.name_search`, but it is not
2376 :return: list of pairs ``(id,text_repr)`` for all records with the given ``ids``.
2380 if isinstance(ids, (int, long)):
2383 if self._rec_name in self._all_columns:
2384 rec_name_column = self._all_columns[self._rec_name].column
2385 return [(r['id'], rec_name_column.as_display_name(cr, user, self, r[self._rec_name], context=context))
2386 for r in self.read(cr, user, ids, [self._rec_name],
2387 load='_classic_write', context=context)]
2388 return [(id, "%s,%s" % (self._name, id)) for id in ids]
2390 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
2391 """Search for records that have a display name matching the given ``name`` pattern if compared
2392 with the given ``operator``, while also matching the optional search domain (``args``).
2393 This is used for example to provide suggestions based on a partial value for a relational
2395 Sometimes be seen as the inverse function of :meth:`~.name_get`, but it is not
2398 This method is equivalent to calling :meth:`~.search` with a search domain based on ``name``
2399 and then :meth:`~.name_get` on the result of the search.
2401 :param list args: optional search domain (see :meth:`~.search` for syntax),
2402 specifying further restrictions
2403 :param str operator: domain operator for matching the ``name`` pattern, such as ``'like'``
2405 :param int limit: optional max number of records to return
2407 :return: list of pairs ``(id,text_repr)`` for all matching records.
2409 return self._name_search(cr, user, name, args, operator, context, limit)
2411 def name_create(self, cr, uid, name, context=None):
2412 """Creates a new record by calling :meth:`~.create` with only one
2413 value provided: the name of the new record (``_rec_name`` field).
2414 The new record will also be initialized with any default values applicable
2415 to this model, or provided through the context. The usual behavior of
2416 :meth:`~.create` applies.
2417 Similarly, this method may raise an exception if the model has multiple
2418 required fields and some do not have default values.
2420 :param name: name of the record to create
2423 :return: the :meth:`~.name_get` pair value for the newly-created record.
2425 rec_id = self.create(cr, uid, {self._rec_name: name}, context)
2426 return self.name_get(cr, uid, [rec_id], context)[0]
2428 # private implementation of name_search, allows passing a dedicated user for the name_get part to
2429 # solve some access rights issues
2430 def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
2436 # optimize out the default criterion of ``ilike ''`` that matches everything
2437 if not (name == '' and operator == 'ilike'):
2438 args += [(self._rec_name, operator, name)]
2439 access_rights_uid = name_get_uid or user
2440 ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
2441 res = self.name_get(cr, access_rights_uid, ids, context)
2444 def read_string(self, cr, uid, id, langs, fields=None, context=None):
2447 self.pool.get('ir.translation').check_access_rights(cr, uid, 'read')
2449 fields = self._columns.keys() + self._inherit_fields.keys()
2450 #FIXME: collect all calls to _get_source into one SQL call.
2452 res[lang] = {'code': lang}
2454 if f in self._columns:
2455 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
2457 res[lang][f] = res_trans
2459 res[lang][f] = self._columns[f].string
2460 for table in self._inherits:
2461 cols = intersect(self._inherit_fields.keys(), fields)
2462 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
2465 res[lang]['code'] = lang
2466 for f in res2[lang]:
2467 res[lang][f] = res2[lang][f]
2470 def write_string(self, cr, uid, id, langs, vals, context=None):
2471 self.pool.get('ir.translation').check_access_rights(cr, uid, 'write')
2472 #FIXME: try to only call the translation in one SQL
2475 if field in self._columns:
2476 src = self._columns[field].string
2477 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
2478 for table in self._inherits:
2479 cols = intersect(self._inherit_fields.keys(), vals)
2481 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
2484 def _add_missing_default_values(self, cr, uid, values, context=None):
2485 missing_defaults = []
2486 avoid_tables = [] # avoid overriding inherited values when parent is set
2487 for tables, parent_field in self._inherits.items():
2488 if parent_field in values:
2489 avoid_tables.append(tables)
2490 for field in self._columns.keys():
2491 if not field in values:
2492 missing_defaults.append(field)
2493 for field in self._inherit_fields.keys():
2494 if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
2495 missing_defaults.append(field)
2497 if len(missing_defaults):
2498 # override defaults with the provided values, never allow the other way around
2499 defaults = self.default_get(cr, uid, missing_defaults, context)
2501 if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
2502 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
2503 and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
2504 defaults[dv] = [(6, 0, defaults[dv])]
2505 if (dv in self._columns and self._columns[dv]._type == 'one2many' \
2506 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
2507 and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
2508 defaults[dv] = [(0, 0, x) for x in defaults[dv]]
2509 defaults.update(values)
2513 def clear_caches(self):
2514 """ Clear the caches
2516 This clears the caches associated to methods decorated with
2517 ``tools.ormcache`` or ``tools.ormcache_multi``.
2520 getattr(self, '_ormcache')
2522 self.pool._any_cache_cleared = True
2523 except AttributeError:
2527 def _read_group_fill_results(self, cr, uid, domain, groupby, groupby_list, aggregated_fields,
2528 read_group_result, read_group_order=None, context=None):
2529 """Helper method for filling in empty groups for all possible values of
2530 the field being grouped by"""
2532 # self._group_by_full should map groupable fields to a method that returns
2533 # a list of all aggregated values that we want to display for this field,
2534 # in the form of a m2o-like pair (key,label).
2535 # This is useful to implement kanban views for instance, where all columns
2536 # should be displayed even if they don't contain any record.
2538 # Grab the list of all groups that should be displayed, including all present groups
2539 present_group_ids = [x[groupby][0] for x in read_group_result if x[groupby]]
2540 all_groups,folded = self._group_by_full[groupby](self, cr, uid, present_group_ids, domain,
2541 read_group_order=read_group_order,
2542 access_rights_uid=openerp.SUPERUSER_ID,
2545 result_template = dict.fromkeys(aggregated_fields, False)
2546 result_template[groupby + '_count'] = 0
2547 if groupby_list and len(groupby_list) > 1:
2548 result_template['__context'] = {'group_by': groupby_list[1:]}
2550 # Merge the left_side (current results as dicts) with the right_side (all
2551 # possible values as m2o pairs). Both lists are supposed to be using the
2552 # same ordering, and can be merged in one pass.
2555 def append_left(left_side):
2556 grouped_value = left_side[groupby] and left_side[groupby][0]
2557 if not grouped_value in known_values:
2558 result.append(left_side)
2559 known_values[grouped_value] = left_side
2561 count_attr = groupby + '_count'
2562 known_values[grouped_value].update({count_attr: left_side[count_attr]})
2563 def append_right(right_side):
2564 grouped_value = right_side[0]
2565 if not grouped_value in known_values:
2566 line = dict(result_template)
2567 line[groupby] = right_side
2568 line['__domain'] = [(groupby,'=',grouped_value)] + domain
2570 known_values[grouped_value] = line
2571 while read_group_result or all_groups:
2572 left_side = read_group_result[0] if read_group_result else None
2573 right_side = all_groups[0] if all_groups else None
2574 assert left_side is None or left_side[groupby] is False \
2575 or isinstance(left_side[groupby], (tuple,list)), \
2576 'M2O-like pair expected, got %r' % left_side[groupby]
2577 assert right_side is None or isinstance(right_side, (tuple,list)), \
2578 'M2O-like pair expected, got %r' % right_side
2579 if left_side is None:
2580 append_right(all_groups.pop(0))
2581 elif right_side is None:
2582 append_left(read_group_result.pop(0))
2583 elif left_side[groupby] == right_side:
2584 append_left(read_group_result.pop(0))
2585 all_groups.pop(0) # discard right_side
2586 elif not left_side[groupby] or not left_side[groupby][0]:
2587 # left side == "Undefined" entry, not present on right_side
2588 append_left(read_group_result.pop(0))
2590 append_right(all_groups.pop(0))
2594 r['__fold'] = folded.get(r[groupby] and r[groupby][0], False)
2597 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
2599 Get the list of records in list view grouped by the given ``groupby`` fields
2601 :param cr: database cursor
2602 :param uid: current user id
2603 :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
2604 :param list fields: list of fields present in the list view specified on the object
2605 :param list groupby: fields by which the records will be grouped
2606 :param int offset: optional number of records to skip
2607 :param int limit: optional max number of records to return
2608 :param dict context: context arguments, like lang, time zone
2609 :param list orderby: optional ``order by`` specification, for
2610 overriding the natural sort ordering of the
2611 groups, see also :py:meth:`~osv.osv.osv.search`
2612 (supported only for many2one fields currently)
2613 :return: list of dictionaries(one dictionary for each record) containing:
2615 * the values of fields grouped by the fields in ``groupby`` argument
2616 * __domain: list of tuples specifying the search criteria
2617 * __context: dictionary with argument like ``groupby``
2618 :rtype: [{'field_name_1': value, ...]
2619 :raise AccessError: * if user has no read rights on the requested object
2620 * if user tries to bypass access rules for read on the requested object
2623 context = context or {}
2624 self.check_access_rights(cr, uid, 'read')
2626 fields = self._columns.keys()
2628 query = self._where_calc(cr, uid, domain, context=context)
2629 self._apply_ir_rules(cr, uid, query, 'read', context=context)
2631 # Take care of adding join(s) if groupby is an '_inherits'ed field
2632 groupby_list = groupby
2633 qualified_groupby_field = groupby
2635 if isinstance(groupby, list):
2636 groupby = groupby[0]
2637 qualified_groupby_field = self._inherits_join_calc(groupby, query)
2640 assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
2641 groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
2642 assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
2644 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
2645 fget = self.fields_get(cr, uid, fields)
2647 group_count = group_by = groupby
2649 if fget.get(groupby):
2650 groupby_type = fget[groupby]['type']
2651 if groupby_type in ('date', 'datetime'):
2652 qualified_groupby_field = "to_char(%s,'yyyy-mm')" % qualified_groupby_field
2653 flist = "%s as %s " % (qualified_groupby_field, groupby)
2654 elif groupby_type == 'boolean':
2655 qualified_groupby_field = "coalesce(%s,false)" % qualified_groupby_field
2656 flist = "%s as %s " % (qualified_groupby_field, groupby)
2658 flist = qualified_groupby_field
2660 # Don't allow arbitrary values, as this would be a SQL injection vector!
2661 raise except_orm(_('Invalid group_by'),
2662 _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
2664 aggregated_fields = [
2666 if f not in ('id', 'sequence')
2667 if fget[f]['type'] in ('integer', 'float')
2668 if (f in self._columns and getattr(self._columns[f], '_classic_write'))]
2669 for f in aggregated_fields:
2670 group_operator = fget[f].get('group_operator', 'sum')
2673 qualified_field = '"%s"."%s"' % (self._table, f)
2674 flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
2676 gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
2678 from_clause, where_clause, where_clause_params = query.get_sql()
2679 where_clause = where_clause and ' WHERE ' + where_clause
2680 limit_str = limit and ' limit %d' % limit or ''
2681 offset_str = offset and ' offset %d' % offset or ''
2682 if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
2684 cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
2687 for r in cr.dictfetchall():
2688 for fld, val in r.items():
2689 if val is None: r[fld] = False
2690 alldata[r['id']] = r
2693 order = orderby or groupby
2694 data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=order, context=context)
2696 # the IDs of records that have groupby field value = False or '' should be included too
2697 data_ids += set(alldata.keys()).difference(data_ids)
2700 data = self.read(cr, uid, data_ids, [groupby], context=context)
2701 # restore order of the search as read() uses the default _order (this is only for groups, so the footprint of data should be small):
2702 data_dict = dict((d['id'], d[groupby] ) for d in data)
2703 result = [{'id': i, groupby: data_dict[i]} for i in data_ids]
2705 result = [{'id': i} for i in data_ids]
2709 d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
2710 if not isinstance(groupby_list, (str, unicode)):
2711 if groupby or not context.get('group_by_no_leaf', False):
2712 d['__context'] = {'group_by': groupby_list[1:]}
2713 if groupby and groupby in fget:
2714 if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
2715 dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7], '%Y-%m')
2716 days = calendar.monthrange(dt.year, dt.month)[1]
2718 date_value = datetime.datetime.strptime(d[groupby][:10], '%Y-%m-%d')
2719 d[groupby] = babel.dates.format_date(
2720 date_value, format='MMMM yyyy', locale=context.get('lang', 'en_US'))
2721 d['__domain'] = [(groupby, '>=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01', '%Y-%m-%d').strftime('%Y-%m-%d') or False),\
2722 (groupby, '<=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days), '%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
2723 del alldata[d['id']][groupby]
2724 d.update(alldata[d['id']])
2727 if groupby and groupby in self._group_by_full:
2728 result = self._read_group_fill_results(cr, uid, domain, groupby, groupby_list,
2729 aggregated_fields, result, read_group_order=order,
2734 def _inherits_join_add(self, current_model, parent_model_name, query):
2736 Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
2737 :param current_model: current model object
2738 :param parent_model_name: name of the parent model for which the clauses should be added
2739 :param query: query object on which the JOIN should be added
2741 inherits_field = current_model._inherits[parent_model_name]
2742 parent_model = self.pool.get(parent_model_name)
2743 parent_alias, parent_alias_statement = query.add_join((current_model._table, parent_model._table, inherits_field, 'id', inherits_field), implicit=True)
2746 def _inherits_join_calc(self, field, query):
2748 Adds missing table select and join clause(s) to ``query`` for reaching
2749 the field coming from an '_inherits' parent table (no duplicates).
2751 :param field: name of inherited field to reach
2752 :param query: query object on which the JOIN should be added
2753 :return: qualified name of field, to be used in SELECT clause
2755 current_table = self
2756 parent_alias = '"%s"' % current_table._table
2757 while field in current_table._inherit_fields and not field in current_table._columns:
2758 parent_model_name = current_table._inherit_fields[field][0]
2759 parent_table = self.pool.get(parent_model_name)
2760 parent_alias = self._inherits_join_add(current_table, parent_model_name, query)
2761 current_table = parent_table
2762 return '%s."%s"' % (parent_alias, field)
2764 def _parent_store_compute(self, cr):
2765 if not self._parent_store:
2767 _logger.info('Computing parent left and right for table %s...', self._table)
2768 def browse_rec(root, pos=0):
2770 where = self._parent_name+'='+str(root)
2772 where = self._parent_name+' IS NULL'
2773 if self._parent_order:
2774 where += ' order by '+self._parent_order
2775 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
2777 for id in cr.fetchall():
2778 pos2 = browse_rec(id[0], pos2)
2779 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
2781 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
2782 if self._parent_order:
2783 query += ' order by ' + self._parent_order
2786 for (root,) in cr.fetchall():
2787 pos = browse_rec(root, pos)
2790 def _update_store(self, cr, f, k):
2791 _logger.info("storing computed values of fields.function '%s'", k)
2792 ss = self._columns[k]._symbol_set
2793 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
2794 cr.execute('select id from '+self._table)
2795 ids_lst = map(lambda x: x[0], cr.fetchall())
2798 ids_lst = ids_lst[40:]
2799 res = f.get(cr, self, iids, k, SUPERUSER_ID, {})
2800 for key, val in res.items():
2803 # if val is a many2one, just write the ID
2804 if type(val) == tuple:
2806 if val is not False:
2807 cr.execute(update_query, (ss[1](val), key))
2809 def _check_selection_field_value(self, cr, uid, field, value, context=None):
2810 """Raise except_orm if value is not among the valid values for the selection field"""
2811 if self._columns[field]._type == 'reference':
2812 val_model, val_id_str = value.split(',', 1)
2815 val_id = long(val_id_str)
2819 raise except_orm(_('ValidateError'),
2820 _('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
2824 if isinstance(self._columns[field].selection, (tuple, list)):
2825 if val in dict(self._columns[field].selection):
2827 elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
2829 raise except_orm(_('ValidateError'),
2830 _('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._table, field))
2832 def _check_removed_columns(self, cr, log=False):
2833 # iterate on the database columns to drop the NOT NULL constraints
2834 # of fields which were required but have been removed (or will be added by another module)
2835 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
2836 columns += MAGIC_COLUMNS
2837 cr.execute("SELECT a.attname, a.attnotnull"
2838 " FROM pg_class c, pg_attribute a"
2839 " WHERE c.relname=%s"
2840 " AND c.oid=a.attrelid"
2841 " AND a.attisdropped=%s"
2842 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
2843 " AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
2845 for column in cr.dictfetchall():
2847 _logger.debug("column %s is in the table %s but not in the corresponding object %s",
2848 column['attname'], self._table, self._name)
2849 if column['attnotnull']:
2850 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
2851 _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2852 self._table, column['attname'])
2854 def _save_constraint(self, cr, constraint_name, type):
2856 Record the creation of a constraint for this model, to make it possible
2857 to delete it later when the module is uninstalled. Type can be either
2858 'f' or 'u' depending on the constraint being a foreign key or not.
2860 if not self._module:
2861 # no need to save constraints for custom models as they're not part
2864 assert type in ('f', 'u')
2866 SELECT 1 FROM ir_model_constraint, ir_module_module
2867 WHERE ir_model_constraint.module=ir_module_module.id
2868 AND ir_model_constraint.name=%s
2869 AND ir_module_module.name=%s
2870 """, (constraint_name, self._module))
2873 INSERT INTO ir_model_constraint
2874 (name, date_init, date_update, module, model, type)
2875 VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
2876 (SELECT id FROM ir_module_module WHERE name=%s),
2877 (SELECT id FROM ir_model WHERE model=%s), %s)""",
2878 (constraint_name, self._module, self._name, type))
2880 def _save_relation_table(self, cr, relation_table):
2882 Record the creation of a many2many for this model, to make it possible
2883 to delete it later when the module is uninstalled.
2886 SELECT 1 FROM ir_model_relation, ir_module_module
2887 WHERE ir_model_relation.module=ir_module_module.id
2888 AND ir_model_relation.name=%s
2889 AND ir_module_module.name=%s
2890 """, (relation_table, self._module))
2892 cr.execute("""INSERT INTO ir_model_relation (name, date_init, date_update, module, model)
2893 VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
2894 (SELECT id FROM ir_module_module WHERE name=%s),
2895 (SELECT id FROM ir_model WHERE model=%s))""",
2896 (relation_table, self._module, self._name))
2898 # checked version: for direct m2o starting from `self`
2899 def _m2o_add_foreign_key_checked(self, source_field, dest_model, ondelete):
2900 assert self.is_transient() or not dest_model.is_transient(), \
2901 'Many2One relationships from non-transient Model to TransientModel are forbidden'
2902 if self.is_transient() and not dest_model.is_transient():
2903 # TransientModel relationships to regular Models are annoying
2904 # usually because they could block deletion due to the FKs.
2905 # So unless stated otherwise we default them to ondelete=cascade.
2906 ondelete = ondelete or 'cascade'
2907 fk_def = (self._table, source_field, dest_model._table, ondelete or 'set null')
2908 self._foreign_keys.add(fk_def)
2909 _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
2911 # unchecked version: for custom cases, such as m2m relationships
2912 def _m2o_add_foreign_key_unchecked(self, source_table, source_field, dest_model, ondelete):
2913 fk_def = (source_table, source_field, dest_model._table, ondelete or 'set null')
2914 self._foreign_keys.add(fk_def)
2915 _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
2917 def _drop_constraint(self, cr, source_table, constraint_name):
2918 cr.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (source_table,constraint_name))
2920 def _m2o_fix_foreign_key(self, cr, source_table, source_field, dest_model, ondelete):
2921 # Find FK constraint(s) currently established for the m2o field,
2922 # and see whether they are stale or not
2923 cr.execute("""SELECT confdeltype as ondelete_rule, conname as constraint_name,
2924 cl2.relname as foreign_table
2925 FROM pg_constraint as con, pg_class as cl1, pg_class as cl2,
2926 pg_attribute as att1, pg_attribute as att2
2927 WHERE con.conrelid = cl1.oid
2928 AND cl1.relname = %s
2929 AND con.confrelid = cl2.oid
2930 AND array_lower(con.conkey, 1) = 1
2931 AND con.conkey[1] = att1.attnum
2932 AND att1.attrelid = cl1.oid
2933 AND att1.attname = %s
2934 AND array_lower(con.confkey, 1) = 1
2935 AND con.confkey[1] = att2.attnum
2936 AND att2.attrelid = cl2.oid
2937 AND att2.attname = %s
2938 AND con.contype = 'f'""", (source_table, source_field, 'id'))
2939 constraints = cr.dictfetchall()
2941 if len(constraints) == 1:
2942 # Is it the right constraint?
2944 if cons['ondelete_rule'] != POSTGRES_CONFDELTYPES.get((ondelete or 'set null').upper(), 'a')\
2945 or cons['foreign_table'] != dest_model._table:
2946 # Wrong FK: drop it and recreate
2947 _schema.debug("Table '%s': dropping obsolete FK constraint: '%s'",
2948 source_table, cons['constraint_name'])
2949 self._drop_constraint(cr, source_table, cons['constraint_name'])
2951 # it's all good, nothing to do!
2954 # Multiple FKs found for the same field, drop them all, and re-create
2955 for cons in constraints:
2956 _schema.debug("Table '%s': dropping duplicate FK constraints: '%s'",
2957 source_table, cons['constraint_name'])
2958 self._drop_constraint(cr, source_table, cons['constraint_name'])
2960 # (re-)create the FK
2961 self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete)
2965 def _auto_init(self, cr, context=None):
2968 Call _field_create and, unless _auto is False:
2970 - create the corresponding table in database for the model,
2971 - possibly add the parent columns in database,
2972 - possibly add the columns 'create_uid', 'create_date', 'write_uid',
2973 'write_date' in database if _log_access is True (the default),
2974 - report on database columns no more existing in _columns,
2975 - remove no more existing not null constraints,
2976 - alter existing database columns to match _columns,
2977 - create database tables to match _columns,
2978 - add database indices to match _columns,
2979 - save in self._foreign_keys a list a foreign keys to create (see
2983 self._foreign_keys = set()
2984 raise_on_invalid_object_name(self._name)
2987 store_compute = False
2989 update_custom_fields = context.get('update_custom_fields', False)
2990 self._field_create(cr, context=context)
2991 create = not self._table_exist(cr)
2992 if getattr(self, '_auto', True):
2995 self._create_table(cr)
2998 if self._parent_store:
2999 if not self._parent_columns_exist(cr):
3000 self._create_parent_columns(cr)
3001 store_compute = True
3003 # Create the create_uid, create_date, write_uid, write_date, columns if desired.
3004 if self._log_access:
3005 self._add_log_columns(cr)
3007 self._check_removed_columns(cr, log=False)
3009 # iterate on the "object columns"
3010 column_data = self._select_column_data(cr)
3012 for k, f in self._columns.iteritems():
3013 if k in MAGIC_COLUMNS:
3015 # Don't update custom (also called manual) fields
3016 if f.manual and not update_custom_fields:
3019 if isinstance(f, fields.one2many):
3020 self._o2m_raise_on_missing_reference(cr, f)
3022 elif isinstance(f, fields.many2many):
3023 self._m2m_raise_or_create_relation(cr, f)
3026 res = column_data.get(k)
3028 # The field is not found as-is in database, try if it
3029 # exists with an old name.
3030 if not res and hasattr(f, 'oldname'):
3031 res = column_data.get(f.oldname)
3033 cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
3035 column_data[k] = res
3036 _schema.debug("Table '%s': renamed column '%s' to '%s'",
3037 self._table, f.oldname, k)
3039 # The field already exists in database. Possibly
3040 # change its type, rename it, drop it or change its
3043 f_pg_type = res['typname']
3044 f_pg_size = res['size']
3045 f_pg_notnull = res['attnotnull']
3046 if isinstance(f, fields.function) and not f.store and\
3047 not getattr(f, 'nodrop', False):
3048 _logger.info('column %s (%s) in table %s removed: converted to a function !\n',
3049 k, f.string, self._table)
3050 cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
3052 _schema.debug("Table '%s': dropped column '%s' with cascade",
3056 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
3061 ('text', 'char', pg_varchar(f.size), '::%s' % pg_varchar(f.size)),
3062 ('varchar', 'text', 'TEXT', ''),
3063 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3064 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
3065 ('timestamp', 'date', 'date', '::date'),
3066 ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3067 ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3069 if f_pg_type == 'varchar' and f._type == 'char' and ((f.size is None and f_pg_size) or f_pg_size < f.size):
3070 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
3071 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, pg_varchar(f.size)))
3072 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::%s' % (self._table, k, pg_varchar(f.size)))
3073 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
3075 _schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
3076 self._table, k, f_pg_size or 'unlimited', f.size or 'unlimited')
3078 if (f_pg_type==c[0]) and (f._type==c[1]):
3079 if f_pg_type != f_obj_type:
3081 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
3082 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
3083 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
3084 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
3086 _schema.debug("Table '%s': column '%s' changed type from %s to %s",
3087 self._table, k, c[0], c[1])
3090 if f_pg_type != f_obj_type:
3094 newname = k + '_moved' + str(i)
3095 cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
3096 "WHERE c.relname=%s " \
3097 "AND a.attname=%s " \
3098 "AND c.oid=a.attrelid ", (self._table, newname))
3099 if not cr.fetchone()[0]:
3103 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
3104 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
3105 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
3106 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
3107 _schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
3108 self._table, k, f_pg_type, f._type, newname)
3110 # if the field is required and hasn't got a NOT NULL constraint
3111 if f.required and f_pg_notnull == 0:
3112 # set the field to the default value if any
3113 if k in self._defaults:
3114 if callable(self._defaults[k]):
3115 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
3117 default = self._defaults[k]
3119 if default is not None:
3120 ss = self._columns[k]._symbol_set
3121 query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
3122 cr.execute(query, (ss[1](default),))
3123 # add the NOT NULL constraint
3126 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
3128 _schema.debug("Table '%s': column '%s': added NOT NULL constraint",
3131 msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
3132 "If you want to have it, you should update the records and execute manually:\n"\
3133 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
3134 _schema.warning(msg, self._table, k, self._table, k)
3136 elif not f.required and f_pg_notnull == 1:
3137 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
3139 _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
3142 indexname = '%s_%s_index' % (self._table, k)
3143 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
3144 res2 = cr.dictfetchall()
3145 if not res2 and f.select:
3146 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
3148 if f._type == 'text':
3149 # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
3150 msg = "Table '%s': Adding (b-tree) index for %s column '%s'."\
3151 "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
3152 " because there is a length limit for indexable btree values!\n"\
3153 "Use a search view instead if you simply want to make the field searchable."
3154 _schema.warning(msg, self._table, f._type, k)
3155 if res2 and not f.select:
3156 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
3158 msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
3159 _schema.debug(msg, self._table, k, f._type)
3161 if isinstance(f, fields.many2one):
3162 dest_model = self.pool.get(f._obj)
3163 if dest_model._table != 'ir_actions':
3164 self._m2o_fix_foreign_key(cr, self._table, k, dest_model, f.ondelete)
3166 # The field doesn't exist in database. Create it if necessary.
3168 if not isinstance(f, fields.function) or f.store:
3169 # add the missing field
3170 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
3171 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
3172 _schema.debug("Table '%s': added column '%s' with definition=%s",
3173 self._table, k, get_pg_type(f)[1])
3176 if not create and k in self._defaults:
3177 if callable(self._defaults[k]):
3178 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
3180 default = self._defaults[k]
3182 ss = self._columns[k]._symbol_set
3183 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
3184 cr.execute(query, (ss[1](default),))
3186 _logger.debug("Table '%s': setting default value of new column %s", self._table, k)
3188 # remember the functions to call for the stored fields
3189 if isinstance(f, fields.function):
3191 if f.store is not True: # i.e. if f.store is a dict
3192 order = f.store[f.store.keys()[0]][2]
3193 todo_end.append((order, self._update_store, (f, k)))
3195 # and add constraints if needed
3196 if isinstance(f, fields.many2one):
3197 if not self.pool.get(f._obj):
3198 raise except_orm('Programming Error', 'There is no reference available for %s' % (f._obj,))
3199 dest_model = self.pool.get(f._obj)
3200 ref = dest_model._table
3201 # ir_actions is inherited so foreign key doesn't work on it
3202 if ref != 'ir_actions':
3203 self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete)
3205 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
3209 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
3210 _schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
3213 msg = "WARNING: unable to set column %s of table %s not null !\n"\
3214 "Try to re-run: openerp-server --update=module\n"\
3215 "If it doesn't work, update records and execute manually:\n"\
3216 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
3217 _logger.warning(msg, k, self._table, self._table, k)
3221 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
3222 create = not bool(cr.fetchone())
3224 cr.commit() # start a new transaction
3226 self._add_sql_constraints(cr)
3229 self._execute_sql(cr)
3232 self._parent_store_compute(cr)
3237 def _auto_end(self, cr, context=None):
3238 """ Create the foreign keys recorded by _auto_init. """
3239 for t, k, r, d in self._foreign_keys:
3240 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
3241 self._save_constraint(cr, "%s_%s_fkey" % (t, k), 'f')
3243 del self._foreign_keys
3246 def _table_exist(self, cr):
3247 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
3251 def _create_table(self, cr):
3252 cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id))' % (self._table,))
3253 cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,))
3254 _schema.debug("Table '%s': created", self._table)
3257 def _parent_columns_exist(self, cr):
3258 cr.execute("""SELECT c.relname
3259 FROM pg_class c, pg_attribute a
3260 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
3261 """, (self._table, 'parent_left'))
3265 def _create_parent_columns(self, cr):
3266 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
3267 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
3268 if 'parent_left' not in self._columns:
3269 _logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
3271 _schema.debug("Table '%s': added column '%s' with definition=%s",
3272 self._table, 'parent_left', 'INTEGER')
3273 elif not self._columns['parent_left'].select:
3274 _logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
3276 if 'parent_right' not in self._columns:
3277 _logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
3279 _schema.debug("Table '%s': added column '%s' with definition=%s",
3280 self._table, 'parent_right', 'INTEGER')
3281 elif not self._columns['parent_right'].select:
3282 _logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
3284 if self._columns[self._parent_name].ondelete not in ('cascade', 'restrict'):
3285 _logger.error("The column %s on object %s must be set as ondelete='cascade' or 'restrict'",
3286 self._parent_name, self._name)
3291 def _add_log_columns(self, cr):
3292 for field, field_def in LOG_ACCESS_COLUMNS.iteritems():
3295 FROM pg_class c, pg_attribute a
3296 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
3297 """, (self._table, field))
3299 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, field, field_def))
3301 _schema.debug("Table '%s': added column '%s' with definition=%s",
3302 self._table, field, field_def)
3305 def _select_column_data(self, cr):
3306 # attlen is the number of bytes necessary to represent the type when
3307 # the type has a fixed size. If the type has a varying size attlen is
3308 # -1 and atttypmod is the size limit + 4, or -1 if there is no limit.
3309 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN (CASE WHEN a.atttypmod=-1 THEN 0 ELSE a.atttypmod-4 END) ELSE a.attlen END as size " \
3310 "FROM pg_class c,pg_attribute a,pg_type t " \
3311 "WHERE c.relname=%s " \
3312 "AND c.oid=a.attrelid " \
3313 "AND a.atttypid=t.oid", (self._table,))
3314 return dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
3317 def _o2m_raise_on_missing_reference(self, cr, f):
3318 # TODO this check should be a method on fields.one2many.
3320 other = self.pool.get(f._obj)
3322 # TODO the condition could use fields_get_keys().
3323 if f._fields_id not in other._columns.keys():
3324 if f._fields_id not in other._inherit_fields.keys():
3325 raise except_orm('Programming Error', "There is no reference field '%s' found for '%s'" % (f._fields_id, f._obj,))
3327 def _m2m_raise_or_create_relation(self, cr, f):
3328 m2m_tbl, col1, col2 = f._sql_names(self)
3329 self._save_relation_table(cr, m2m_tbl)
3330 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (m2m_tbl,))
3331 if not cr.dictfetchall():
3332 if not self.pool.get(f._obj):
3333 raise except_orm('Programming Error', 'Many2Many destination model does not exist: `%s`' % (f._obj,))
3334 dest_model = self.pool.get(f._obj)
3335 ref = dest_model._table
3336 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s"))' % (m2m_tbl, col1, col2, col1, col2))
3337 # create foreign key references with ondelete=cascade, unless the targets are SQL views
3338 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (ref,))
3339 if not cr.fetchall():
3340 self._m2o_add_foreign_key_unchecked(m2m_tbl, col2, dest_model, 'cascade')
3341 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (self._table,))
3342 if not cr.fetchall():
3343 self._m2o_add_foreign_key_unchecked(m2m_tbl, col1, self, 'cascade')
3345 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col1, m2m_tbl, col1))
3346 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col2, m2m_tbl, col2))
3347 cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (m2m_tbl, self._table, ref))
3349 _schema.debug("Create table '%s': m2m relation between '%s' and '%s'", m2m_tbl, self._table, ref)
3352 def _add_sql_constraints(self, cr):
3355 Modify this model's database table constraints so they match the one in
3359 def unify_cons_text(txt):
3360 return txt.lower().replace(', ',',').replace(' (','(')
3362 for (key, con, _) in self._sql_constraints:
3363 conname = '%s_%s' % (self._table, key)
3365 self._save_constraint(cr, conname, 'u')
3366 cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
3367 existing_constraints = cr.dictfetchall()
3371 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
3372 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
3373 self._table, conname, con),
3374 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
3379 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
3380 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
3381 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
3387 if not existing_constraints:
3388 # constraint does not exists:
3389 sql_actions['add']['execute'] = True
3390 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3391 elif unify_cons_text(con) not in [unify_cons_text(item['condef']) for item in existing_constraints]:
3392 # constraint exists but its definition has changed:
3393 sql_actions['drop']['execute'] = True
3394 sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
3395 sql_actions['add']['execute'] = True
3396 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3398 # we need to add the constraint:
3399 sql_actions = [item for item in sql_actions.values()]
3400 sql_actions.sort(key=lambda x: x['order'])
3401 for sql_action in [action for action in sql_actions if action['execute']]:
3403 cr.execute(sql_action['query'])
3405 _schema.debug(sql_action['msg_ok'])
3407 _schema.warning(sql_action['msg_err'])
3411 def _execute_sql(self, cr):
3412 """ Execute the SQL code from the _sql attribute (if any)."""
3413 if hasattr(self, "_sql"):
3414 for line in self._sql.split(';'):
3415 line2 = line.replace('\n', '').strip()
3421 # Update objects that uses this one to update their _inherits fields
3424 def _inherits_reload_src(self):
3425 """ Recompute the _inherit_fields mapping on each _inherits'd child model."""
3426 for obj in self.pool.models.values():
3427 if self._name in obj._inherits:
3428 obj._inherits_reload()
3431 def _inherits_reload(self):
3432 """ Recompute the _inherit_fields mapping.
3434 This will also call itself on each inherits'd child model.
3438 for table in self._inherits:
3439 other = self.pool.get(table)
3440 for col in other._columns.keys():
3441 res[col] = (table, self._inherits[table], other._columns[col], table)
3442 for col in other._inherit_fields.keys():
3443 res[col] = (table, self._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
3444 self._inherit_fields = res
3445 self._all_columns = self._get_column_infos()
3446 self._inherits_reload_src()
3449 def _get_column_infos(self):
3450 """Returns a dict mapping all fields names (direct fields and
3451 inherited field via _inherits) to a ``column_info`` struct
3452 giving detailed columns """
3454 for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems():
3455 result[k] = fields.column_info(k, col, parent, m2o, original_parent)
3456 for k, col in self._columns.iteritems():
3457 result[k] = fields.column_info(k, col)
3461 def _inherits_check(self):
3462 for table, field_name in self._inherits.items():
3463 if field_name not in self._columns:
3464 _logger.info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.', field_name, self._name)
3465 self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
3466 required=True, ondelete="cascade")
3467 elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() not in ("cascade", "restrict"):
3468 _logger.warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade" or "restrict", forcing it to required + cascade.', field_name, self._name)
3469 self._columns[field_name].required = True
3470 self._columns[field_name].ondelete = "cascade"
3472 #def __getattr__(self, name):
3474 # Proxies attribute accesses to the `inherits` parent so we can call methods defined on the inherited parent
3475 # (though inherits doesn't use Python inheritance).
3476 # Handles translating between local ids and remote ids.
3477 # Known issue: doesn't work correctly when using python's own super(), don't involve inherit-based inheritance
3478 # when you have inherits.
3480 # for model, field in self._inherits.iteritems():
3481 # proxy = self.pool.get(model)
3482 # if hasattr(proxy, name):
3483 # attribute = getattr(proxy, name)
3484 # if not hasattr(attribute, '__call__'):
3488 # return super(orm, self).__getattr__(name)
3490 # def _proxy(cr, uid, ids, *args, **kwargs):
3491 # objects = self.browse(cr, uid, ids, kwargs.get('context', None))
3492 # lst = [obj[field].id for obj in objects if obj[field]]
3493 # return getattr(proxy, name)(cr, uid, lst, *args, **kwargs)
3498 def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
3499 """ Return the definition of each field.
3501 The returned value is a dictionary (indiced by field name) of
3502 dictionaries. The _inherits'd fields are included. The string, help,
3503 and selection (if present) attributes are translated.
3505 :param cr: database cursor
3506 :param user: current user id
3507 :param allfields: list of fields
3508 :param context: context arguments, like lang, time zone
3509 :return: dictionary of field dictionaries, each one describing a field of the business object
3510 :raise AccessError: * if user has no create/write rights on the requested object
3516 write_access = self.check_access_rights(cr, user, 'write', raise_exception=False) \
3517 or self.check_access_rights(cr, user, 'create', raise_exception=False)
3521 translation_obj = self.pool.get('ir.translation')
3522 for parent in self._inherits:
3523 res.update(self.pool.get(parent).fields_get(cr, user, allfields, context))
3525 for f, field in self._columns.iteritems():
3526 if (allfields and f not in allfields) or \
3527 (field.groups and not self.user_has_groups(cr, user, groups=field.groups, context=context)):
3530 res[f] = fields.field_to_dict(self, cr, user, field, context=context)
3532 if not write_access:
3533 res[f]['readonly'] = True
3534 res[f]['states'] = {}
3536 if 'lang' in context:
3537 if 'string' in res[f]:
3538 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context['lang'])
3540 res[f]['string'] = res_trans
3541 if 'help' in res[f]:
3542 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context['lang'])
3544 res[f]['help'] = help_trans
3545 if 'selection' in res[f]:
3546 if isinstance(field.selection, (tuple, list)):
3547 sel = field.selection
3549 for key, val in sel:
3552 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context['lang'], val)
3553 sel2.append((key, val2 or val))
3554 res[f]['selection'] = sel2
3558 def check_field_access_rights(self, cr, user, operation, fields, context=None):
3560 Check the user access rights on the given fields. This raises Access
3561 Denied if the user does not have the rights. Otherwise it returns the
3562 fields (as is if the fields is not falsy, or the readable/writable
3563 fields if fields is falsy).
3566 """Predicate to test if the user has access to the given field name."""
3567 # Ignore requested field if it doesn't exist. This is ugly but
3568 # it seems to happen at least with 'name_alias' on res.partner.
3569 if field_name not in self._all_columns:
3571 field = self._all_columns[field_name].column
3572 if user != SUPERUSER_ID and field.groups:
3573 return self.user_has_groups(cr, user, groups=field.groups, context=context)
3577 fields = filter(p, self._all_columns.keys())
3579 filtered_fields = filter(lambda a: not p(a), fields)
3581 _logger.warning('Access Denied by ACLs for operation: %s, uid: %s, model: %s, fields: %s', operation, user, self._name, ', '.join(filtered_fields))
3584 _('The requested operation cannot be completed due to security restrictions. '
3585 'Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
3586 (self._description, operation))
3589 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
3590 """ Read records with given ids with the given fields
3592 :param cr: database cursor
3593 :param user: current user id
3594 :param ids: id or list of the ids of the records to read
3595 :param fields: optional list of field names to return (default: all fields would be returned)
3596 :type fields: list (example ['field_name_1', ...])
3597 :param context: optional context dictionary - it may contains keys for specifying certain options
3598 like ``context_lang``, ``context_tz`` to alter the results of the call.
3599 A special ``bin_size`` boolean flag may also be passed in the context to request the
3600 value of all fields.binary columns to be returned as the size of the binary instead of its
3601 contents. This can also be selectively overriden by passing a field-specific flag
3602 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
3603 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
3604 :return: list of dictionaries((dictionary per record asked)) with requested field values
3605 :rtype: [{‘name_of_the_field’: value, ...}, ...]
3606 :raise AccessError: * if user has no read rights on the requested object
3607 * if user tries to bypass access rules for read on the requested object
3613 self.check_access_rights(cr, user, 'read')
3614 fields = self.check_field_access_rights(cr, user, 'read', fields)
3615 if isinstance(ids, (int, long)):
3619 select = map(lambda x: isinstance(x, dict) and x['id'] or x, select)
3620 result = self._read_flat(cr, user, select, fields, context, load)
3623 for key, v in r.items():
3627 if isinstance(ids, (int, long, dict)):
3628 return result and result[0] or False
3631 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
3636 if fields_to_read is None:
3637 fields_to_read = self._columns.keys()
3639 # Construct a clause for the security rules.
3640 # 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses,
3641 # or will at least contain self._table.
3642 rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
3644 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
3645 fields_pre = [f for f in fields_to_read if
3646 f == self.CONCURRENCY_CHECK_FIELD
3647 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
3648 ] + self._inherits.values()
3652 def convert_field(f):
3653 f_qual = '%s."%s"' % (self._table, f) # need fully-qualified references in case len(tables) > 1
3654 if f in ('create_date', 'write_date'):
3655 return "date_trunc('second', %s) as %s" % (f_qual, f)
3656 if f == self.CONCURRENCY_CHECK_FIELD:
3657 if self._log_access:
3658 return "COALESCE(%s.write_date, %s.create_date, (now() at time zone 'UTC'))::timestamp AS %s" % (self._table, self._table, f,)
3659 return "(now() at time zone 'UTC')::timestamp AS %s" % (f,)
3660 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
3661 return 'length(%s) as "%s"' % (f_qual, f)
3664 fields_pre2 = map(convert_field, fields_pre)
3665 order_by = self._parent_order or self._order
3666 select_fields = ','.join(fields_pre2 + ['%s.id' % self._table])
3667 query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table)
3669 query += " AND " + (' OR '.join(rule_clause))
3670 query += " ORDER BY " + order_by
3671 for sub_ids in cr.split_for_in_conditions(ids):
3672 cr.execute(query, [tuple(sub_ids)] + rule_params)
3673 results = cr.dictfetchall()
3674 result_ids = [x['id'] for x in results]
3675 self._check_record_rules_result_count(cr, user, sub_ids, result_ids, 'read', context=context)
3678 res = map(lambda x: {'id': x}, ids)
3680 if context.get('lang'):
3681 for f in fields_pre:
3682 if f == self.CONCURRENCY_CHECK_FIELD:
3684 if self._columns[f].translate:
3685 ids = [x['id'] for x in res]
3686 #TODO: optimize out of this loop
3687 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context['lang'], ids)
3689 r[f] = res_trans.get(r['id'], False) or r[f]
3691 for table in self._inherits:
3692 col = self._inherits[table]
3693 cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()]
3696 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
3704 if not record[col]: # if the record is deleted from _inherits table?
3706 record.update(res3[record[col]])
3707 if col not in fields_to_read:
3710 # all fields which need to be post-processed by a simple function (symbol_get)
3711 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
3714 for f in fields_post:
3715 r[f] = self._columns[f]._symbol_get(r[f])
3716 ids = [x['id'] for x in res]
3718 # all non inherited fields for which the attribute whose name is in load is False
3719 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
3721 # Compute POST fields
3723 for f in fields_post:
3724 todo.setdefault(self._columns[f]._multi, [])
3725 todo[self._columns[f]._multi].append(f)
3726 for key, val in todo.items():
3728 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
3729 assert res2 is not None, \
3730 'The function field "%s" on the "%s" model returned None\n' \
3731 '(a dictionary was expected).' % (val[0], self._name)
3734 if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
3735 multi_fields = res2.get(record['id'],{})
3737 record[pos] = multi_fields.get(pos,[])
3740 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
3743 record[f] = res2[record['id']]
3747 # Warn about deprecated fields now that fields_pre and fields_post are computed
3748 # Explicitly use list() because we may receive tuples
3749 for f in list(fields_pre) + list(fields_post):
3750 field_column = self._all_columns.get(f) and self._all_columns.get(f).column
3751 if field_column and field_column.deprecated:
3752 _logger.warning('Field %s.%s is deprecated: %s', self._name, f, field_column.deprecated)
3756 for field in vals.copy():
3758 if field in self._columns:
3759 fobj = self._columns[field]
3766 for group in groups:
3767 module = group.split(".")[0]
3768 grp = group.split(".")[1]
3769 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3770 (grp, module, 'res.groups', user))
3771 readonly = cr.fetchall()
3772 if readonly[0][0] >= 1:
3775 elif readonly[0][0] == 0:
3781 if type(vals[field]) == type([]):
3783 elif type(vals[field]) == type(0.0):
3785 elif type(vals[field]) == type(''):
3786 vals[field] = '=No Permission='
3791 # TODO check READ access
3792 def perm_read(self, cr, user, ids, context=None, details=True):
3794 Returns some metadata about the given records.
3796 :param details: if True, \*_uid fields are replaced with the name of the user
3797 :return: list of ownership dictionaries for each requested record
3798 :rtype: list of dictionaries with the following keys:
3801 * create_uid: user who created the record
3802 * create_date: date when the record was created
3803 * write_uid: last user who changed the record
3804 * write_date: date of the last change to the record
3805 * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
3812 uniq = isinstance(ids, (int, long))
3816 if self._log_access:
3817 fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
3818 quoted_table = '"%s"' % self._table
3819 fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields)
3820 query = '''SELECT %s, __imd.module, __imd.name
3821 FROM %s LEFT JOIN ir_model_data __imd
3822 ON (__imd.model = %%s and __imd.res_id = %s.id)
3823 WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
3824 cr.execute(query, (self._name, tuple(ids)))
3825 res = cr.dictfetchall()
3828 r[key] = r[key] or False
3829 if details and key in ('write_uid', 'create_uid') and r[key]:
3831 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
3833 pass # Leave the numeric uid there
3834 r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
3835 del r['name'], r['module']
3840 def _check_concurrency(self, cr, ids, context):
3843 if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
3845 check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp)"
3846 for sub_ids in cr.split_for_in_conditions(ids):
3849 id_ref = "%s,%s" % (self._name, id)
3850 update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
3852 ids_to_check.extend([id, update_date])
3853 if not ids_to_check:
3855 cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
3858 # mention the first one only to keep the error message readable
3859 raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
3861 def _check_record_rules_result_count(self, cr, uid, ids, result_ids, operation, context=None):
3862 """Verify the returned rows after applying record rules matches
3863 the length of `ids`, and raise an appropriate exception if it does not.
3865 ids, result_ids = set(ids), set(result_ids)
3866 missing_ids = ids - result_ids
3868 # Attempt to distinguish record rule restriction vs deleted records,
3869 # to provide a more specific error message - check if the missinf
3870 cr.execute('SELECT id FROM ' + self._table + ' WHERE id IN %s', (tuple(missing_ids),))
3872 # the missing ids are (at least partially) hidden by access rules
3873 if uid == SUPERUSER_ID:
3875 _logger.warning('Access Denied by record rules for operation: %s, uid: %s, model: %s', operation, uid, self._name)
3876 raise except_orm(_('Access Denied'),
3877 _('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
3878 (self._description, operation))
3880 # If we get here, the missing_ids are not in the database
3881 if operation in ('read','unlink'):
3882 # No need to warn about deleting an already deleted record.
3883 # And no error when reading a record that was deleted, to prevent spurious
3884 # errors for non-transactional search/read sequences coming from clients
3886 _logger.warning('Failed operation on deleted record(s): %s, uid: %s, model: %s', operation, uid, self._name)
3887 raise except_orm(_('Missing document(s)'),
3888 _('One of the documents you are trying to access has been deleted, please try again after refreshing.'))
3891 def check_access_rights(self, cr, uid, operation, raise_exception=True): # no context on purpose.
3892 """Verifies that the operation given by ``operation`` is allowed for the user
3893 according to the access rights."""
3894 return self.pool.get('ir.model.access').check(cr, uid, self._name, operation, raise_exception)
3896 def check_access_rule(self, cr, uid, ids, operation, context=None):
3897 """Verifies that the operation given by ``operation`` is allowed for the user
3898 according to ir.rules.
3900 :param operation: one of ``write``, ``unlink``
3901 :raise except_orm: * if current ir.rules do not permit this operation.
3902 :return: None if the operation is allowed
3904 if uid == SUPERUSER_ID:
3907 if self.is_transient():
3908 # Only one single implicit access rule for transient models: owner only!
3909 # This is ok to hardcode because we assert that TransientModels always
3910 # have log_access enabled so that the create_uid column is always there.
3911 # And even with _inherits, these fields are always present in the local
3912 # table too, so no need for JOINs.
3913 cr.execute("""SELECT distinct create_uid
3915 WHERE id IN %%s""" % self._table, (tuple(ids),))
3916 uids = [x[0] for x in cr.fetchall()]
3917 if len(uids) != 1 or uids[0] != uid:
3918 raise except_orm(_('Access Denied'),
3919 _('For this kind of document, you may only access records you created yourself.\n\n(Document type: %s)') % (self._description,))
3921 where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
3923 where_clause = ' and ' + ' and '.join(where_clause)
3924 for sub_ids in cr.split_for_in_conditions(ids):
3925 cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
3926 ' WHERE ' + self._table + '.id IN %s' + where_clause,
3927 [sub_ids] + where_params)
3928 returned_ids = [x['id'] for x in cr.dictfetchall()]
3929 self._check_record_rules_result_count(cr, uid, sub_ids, returned_ids, operation, context=context)
3931 def _workflow_trigger(self, cr, uid, ids, trigger, context=None):
3932 """Call given workflow trigger as a result of a CRUD operation"""
3933 wf_service = netsvc.LocalService("workflow")
3935 getattr(wf_service, trigger)(uid, self._name, res_id, cr)
3937 def _workflow_signal(self, cr, uid, ids, signal, context=None):
3938 """Send given workflow signal and return a dict mapping ids to workflow results"""
3939 wf_service = netsvc.LocalService("workflow")
3942 result[res_id] = wf_service.trg_validate(uid, self._name, res_id, signal, cr)
3945 def unlink(self, cr, uid, ids, context=None):
3947 Delete records with given ids
3949 :param cr: database cursor
3950 :param uid: current user id
3951 :param ids: id or list of ids
3952 :param context: (optional) context arguments, like lang, time zone
3954 :raise AccessError: * if user has no unlink rights on the requested object
3955 * if user tries to bypass access rules for unlink on the requested object
3956 :raise UserError: if the record is default property for other records
3961 if isinstance(ids, (int, long)):
3964 result_store = self._store_get_values(cr, uid, ids, self._all_columns.keys(), context)
3966 self._check_concurrency(cr, ids, context)
3968 self.check_access_rights(cr, uid, 'unlink')
3970 ir_property = self.pool.get('ir.property')
3972 # Check if the records are used as default properties.
3973 domain = [('res_id', '=', False),
3974 ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
3976 if ir_property.search(cr, uid, domain, context=context):
3977 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
3979 # Delete the records' properties.
3980 property_ids = ir_property.search(cr, uid, [('res_id', 'in', ['%s,%s' % (self._name, i) for i in ids])], context=context)
3981 ir_property.unlink(cr, uid, property_ids, context=context)
3983 self._workflow_trigger(cr, uid, ids, 'trg_delete', context=context)
3985 self.check_access_rule(cr, uid, ids, 'unlink', context=context)
3986 pool_model_data = self.pool.get('ir.model.data')
3987 ir_values_obj = self.pool.get('ir.values')
3988 for sub_ids in cr.split_for_in_conditions(ids):
3989 cr.execute('delete from ' + self._table + ' ' \
3990 'where id IN %s', (sub_ids,))
3992 # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
3993 # as these are not connected with real database foreign keys, and would be dangling references.
3994 # Note: following steps performed as admin to avoid access rights restrictions, and with no context
3995 # to avoid possible side-effects during admin calls.
3996 # Step 1. Calling unlink of ir_model_data only for the affected IDS
3997 reference_ids = pool_model_data.search(cr, SUPERUSER_ID, [('res_id','in',list(sub_ids)),('model','=',self._name)])
3998 # Step 2. Marching towards the real deletion of referenced records
4000 pool_model_data.unlink(cr, SUPERUSER_ID, reference_ids)
4002 # For the same reason, removing the record relevant to ir_values
4003 ir_value_ids = ir_values_obj.search(cr, uid,
4004 ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
4007 ir_values_obj.unlink(cr, uid, ir_value_ids, context=context)
4009 for order, object, store_ids, fields in result_store:
4010 if object != self._name:
4011 obj = self.pool.get(object)
4012 cr.execute('select id from '+obj._table+' where id IN %s', (tuple(store_ids),))
4013 rids = map(lambda x: x[0], cr.fetchall())
4015 obj._store_set_values(cr, uid, rids, fields, context)
4022 def write(self, cr, user, ids, vals, context=None):
4024 Update records with given ids with the given field values
4026 :param cr: database cursor
4027 :param user: current user id
4029 :param ids: object id or list of object ids to update according to **vals**
4030 :param vals: field values to update, e.g {'field_name': new_field_value, ...}
4031 :type vals: dictionary
4032 :param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
4033 :type context: dictionary
4035 :raise AccessError: * if user has no write rights on the requested object
4036 * if user tries to bypass access rules for write on the requested object
4037 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
4038 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
4040 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
4042 + For a many2many field, a list of tuples is expected.
4043 Here is the list of tuple that are accepted, with the corresponding semantics ::
4045 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
4046 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
4047 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
4048 (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
4049 (4, ID) link to existing record with id = ID (adds a relationship)
4050 (5) unlink all (like using (3,ID) for all linked records)
4051 (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
4054 [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
4056 + For a one2many field, a lits of tuples is expected.
4057 Here is the list of tuple that are accepted, with the corresponding semantics ::
4059 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
4060 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
4061 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
4064 [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
4066 + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
4067 + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
4071 self.check_field_access_rights(cr, user, 'write', vals.keys())
4072 for field in vals.copy():
4074 if field in self._columns:
4075 fobj = self._columns[field]
4076 elif field in self._inherit_fields:
4077 fobj = self._inherit_fields[field][2]
4084 for group in groups:
4085 module = group.split(".")[0]
4086 grp = group.split(".")[1]
4087 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
4088 (grp, module, 'res.groups', user))
4089 readonly = cr.fetchall()
4090 if readonly[0][0] >= 1:
4101 if isinstance(ids, (int, long)):
4104 self._check_concurrency(cr, ids, context)
4105 self.check_access_rights(cr, user, 'write')
4107 result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
4109 # No direct update of parent_left/right
4110 vals.pop('parent_left', None)
4111 vals.pop('parent_right', None)
4113 parents_changed = []
4114 parent_order = self._parent_order or self._order
4115 if self._parent_store and (self._parent_name in vals):
4116 # The parent_left/right computation may take up to
4117 # 5 seconds. No need to recompute the values if the
4118 # parent is the same.
4119 # Note: to respect parent_order, nodes must be processed in
4120 # order, so ``parents_changed`` must be ordered properly.
4121 parent_val = vals[self._parent_name]
4123 query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \
4124 (self._table, self._parent_name, self._parent_name, parent_order)
4125 cr.execute(query, (tuple(ids), parent_val))
4127 query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \
4128 (self._table, self._parent_name, parent_order)
4129 cr.execute(query, (tuple(ids),))
4130 parents_changed = map(operator.itemgetter(0), cr.fetchall())
4137 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
4139 field_column = self._all_columns.get(field) and self._all_columns.get(field).column
4140 if field_column and field_column.deprecated:
4141 _logger.warning('Field %s.%s is deprecated: %s', self._name, field, field_column.deprecated)
4142 if field in self._columns:
4143 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
4144 if (not totranslate) or not self._columns[field].translate:
4145 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
4146 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
4147 direct.append(field)
4149 upd_todo.append(field)
4151 updend.append(field)
4152 if field in self._columns \
4153 and hasattr(self._columns[field], 'selection') \
4155 self._check_selection_field_value(cr, user, field, vals[field], context=context)
4157 if self._log_access:
4158 upd0.append('write_uid=%s')
4159 upd0.append("write_date=(now() at time zone 'UTC')")
4163 self.check_access_rule(cr, user, ids, 'write', context=context)
4164 for sub_ids in cr.split_for_in_conditions(ids):
4165 cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
4166 'where id IN %s', upd1 + [sub_ids])
4167 if cr.rowcount != len(sub_ids):
4168 raise except_orm(_('AccessError'),
4169 _('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
4174 if self._columns[f].translate:
4175 src_trans = self.pool.get(self._name).read(cr, user, ids, [f])[0][f]
4178 # Inserting value to DB
4179 self.write(cr, user, ids, {f: vals[f]})
4180 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
4183 # call the 'set' method of fields which are not classic_write
4184 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
4186 # default element in context must be removed when call a one2many or many2many
4187 rel_context = context.copy()
4188 for c in context.items():
4189 if c[0].startswith('default_'):
4190 del rel_context[c[0]]
4192 for field in upd_todo:
4194 result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
4196 unknown_fields = updend[:]
4197 for table in self._inherits:
4198 col = self._inherits[table]
4200 for sub_ids in cr.split_for_in_conditions(ids):
4201 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
4202 'where id IN %s', (sub_ids,))
4203 nids.extend([x[0] for x in cr.fetchall()])
4207 if self._inherit_fields[val][0] == table:
4209 unknown_fields.remove(val)
4211 self.pool.get(table).write(cr, user, nids, v, context)
4215 'No such field(s) in model %s: %s.',
4216 self._name, ', '.join(unknown_fields))
4217 self._validate(cr, user, ids, context)
4219 # TODO: use _order to set dest at the right position and not first node of parent
4220 # We can't defer parent_store computation because the stored function
4221 # fields that are computer may refer (directly or indirectly) to
4222 # parent_left/right (via a child_of domain)
4225 self.pool._init_parent[self._name] = True
4227 order = self._parent_order or self._order
4228 parent_val = vals[self._parent_name]
4230 clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
4232 clause, params = '%s IS NULL' % (self._parent_name,), ()
4234 for id in parents_changed:
4235 cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
4236 pleft, pright = cr.fetchone()
4237 distance = pright - pleft + 1
4239 # Positions of current siblings, to locate proper insertion point;
4240 # this can _not_ be fetched outside the loop, as it needs to be refreshed
4241 # after each update, in case several nodes are sequentially inserted one
4242 # next to the other (i.e computed incrementally)
4243 cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params)
4244 parents = cr.fetchall()
4246 # Find Position of the element
4248 for (parent_pright, parent_id) in parents:
4251 position = parent_pright + 1
4253 # It's the first node of the parent
4258 cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
4259 position = cr.fetchone()[0] + 1
4261 if pleft < position <= pright:
4262 raise except_orm(_('UserError'), _('Recursivity Detected.'))
4264 if pleft < position:
4265 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
4266 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
4267 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
4269 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
4270 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
4271 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
4273 result += self._store_get_values(cr, user, ids, vals.keys(), context)
4277 for order, object, ids_to_update, fields_to_recompute in result:
4278 key = (object, tuple(fields_to_recompute))
4279 done.setdefault(key, {})
4280 # avoid to do several times the same computation
4282 for id in ids_to_update:
4283 if id not in done[key]:
4284 done[key][id] = True
4286 self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context)
4288 self._workflow_trigger(cr, user, ids, 'trg_write', context=context)
4292 # TODO: Should set perm to user.xxx
4294 def create(self, cr, user, vals, context=None):
4296 Create a new record for the model.
4298 The values for the new record are initialized using the ``vals``
4299 argument, and if necessary the result of ``default_get()``.
4301 :param cr: database cursor
4302 :param user: current user id
4304 :param vals: field values for new record, e.g {'field_name': field_value, ...}
4305 :type vals: dictionary
4306 :param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
4307 :type context: dictionary
4308 :return: id of new record created
4309 :raise AccessError: * if user has no create rights on the requested object
4310 * if user tries to bypass access rules for create on the requested object
4311 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
4312 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
4314 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific.
4315 Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how
4322 if self.is_transient():
4323 self._transient_vacuum(cr, user)
4325 self.check_access_rights(cr, user, 'create')
4327 if self._log_access:
4328 for f in LOG_ACCESS_COLUMNS:
4329 if vals.pop(f, None) is not None:
4331 'Field `%s` is not allowed when creating the model `%s`.',
4333 vals = self._add_missing_default_values(cr, user, vals, context)
4336 for v in self._inherits:
4337 if self._inherits[v] not in vals:
4340 tocreate[v] = {'id': vals[self._inherits[v]]}
4341 (upd0, upd1, upd2) = ('', '', [])
4344 for v in vals.keys():
4345 if v in self._inherit_fields and v not in self._columns:
4346 (table, col, col_detail, original_parent) = self._inherit_fields[v]
4347 tocreate[table][v] = vals[v]
4350 if (v not in self._inherit_fields) and (v not in self._columns):
4352 unknown_fields.append(v)
4355 'No such field(s) in model %s: %s.',
4356 self._name, ', '.join(unknown_fields))
4358 # Try-except added to filter the creation of those records whose filds are readonly.
4359 # Example : any dashboard which has all the fields readonly.(due to Views(database views))
4361 cr.execute("SELECT nextval('"+self._sequence+"')")
4363 raise except_orm(_('UserError'),
4364 _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.'))
4366 id_new = cr.fetchone()[0]
4367 for table in tocreate:
4368 if self._inherits[table] in vals:
4369 del vals[self._inherits[table]]
4371 record_id = tocreate[table].pop('id', None)
4373 # When linking/creating parent records, force context without 'no_store_function' key that
4374 # defers stored functions computing, as these won't be computed in batch at the end of create().
4375 parent_context = dict(context)
4376 parent_context.pop('no_store_function', None)
4378 if record_id is None or not record_id:
4379 record_id = self.pool.get(table).create(cr, user, tocreate[table], context=parent_context)
4381 self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=parent_context)
4383 upd0 += ',' + self._inherits[table]
4385 upd2.append(record_id)
4387 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
4388 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
4390 for bool_field in bool_fields:
4391 if bool_field not in vals:
4392 vals[bool_field] = False
4394 for field in vals.copy():
4396 if field in self._columns:
4397 fobj = self._columns[field]
4399 fobj = self._inherit_fields[field][2]
4405 for group in groups:
4406 module = group.split(".")[0]
4407 grp = group.split(".")[1]
4408 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
4409 (grp, module, 'res.groups', user))
4410 readonly = cr.fetchall()
4411 if readonly[0][0] >= 1:
4414 elif readonly[0][0] == 0:
4422 if self._columns[field]._classic_write:
4423 upd0 = upd0 + ',"' + field + '"'
4424 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
4425 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
4426 #for the function fields that receive a value, we set them directly in the database
4427 #(they may be required), but we also need to trigger the _fct_inv()
4428 if (hasattr(self._columns[field], '_fnct_inv')) and not isinstance(self._columns[field], fields.related):
4429 #TODO: this way to special case the related fields is really creepy but it shouldn't be changed at
4430 #one week of the release candidate. It seems the only good way to handle correctly this is to add an
4431 #attribute to make a field `really readonly´ and thus totally ignored by the create()... otherwise
4432 #if, for example, the related has a default value (for usability) then the fct_inv is called and it
4433 #may raise some access rights error. Changing this is a too big change for now, and is thus postponed
4434 #after the release but, definitively, the behavior shouldn't be different for related and function
4436 upd_todo.append(field)
4438 #TODO: this `if´ statement should be removed because there is no good reason to special case the fields
4439 #related. See the above TODO comment for further explanations.
4440 if not isinstance(self._columns[field], fields.related):
4441 upd_todo.append(field)
4442 if field in self._columns \
4443 and hasattr(self._columns[field], 'selection') \
4445 self._check_selection_field_value(cr, user, field, vals[field], context=context)
4446 if self._log_access:
4447 upd0 += ',create_uid,create_date,write_uid,write_date'
4448 upd1 += ",%s,(now() at time zone 'UTC'),%s,(now() at time zone 'UTC')"
4449 upd2.extend((user, user))
4450 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
4451 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
4453 if self._parent_store and not context.get('defer_parent_store_computation'):
4455 self.pool._init_parent[self._name] = True
4457 parent = vals.get(self._parent_name, False)
4459 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
4461 result_p = cr.fetchall()
4462 for (pleft,) in result_p:
4467 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
4468 pleft_old = cr.fetchone()[0]
4471 cr.execute('select max(parent_right) from '+self._table)
4472 pleft = cr.fetchone()[0] or 0
4473 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
4474 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
4475 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
4477 # default element in context must be remove when call a one2many or many2many
4478 rel_context = context.copy()
4479 for c in context.items():
4480 if c[0].startswith('default_'):
4481 del rel_context[c[0]]
4484 for field in upd_todo:
4485 result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
4486 self._validate(cr, user, [id_new], context)
4488 if not context.get('no_store_function', False):
4489 result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
4492 for order, object, ids, fields2 in result:
4493 if not (object, ids, fields2) in done:
4494 self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
4495 done.append((object, ids, fields2))
4497 if self._log_create and not (context and context.get('no_store_function', False)):
4498 message = self._description + \
4500 self.name_get(cr, user, [id_new], context=context)[0][1] + \
4501 "' " + _("created.")
4502 self.log(cr, user, id_new, message, True, context=context)
4503 self.check_access_rule(cr, user, [id_new], 'create', context=context)
4504 self._workflow_trigger(cr, user, [id_new], 'trg_create', context=context)
4507 def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None):
4508 """Fetch records as objects allowing to use dot notation to browse fields and relations
4510 :param cr: database cursor
4511 :param uid: current user id
4512 :param select: id or list of ids.
4513 :param context: context arguments, like lang, time zone
4514 :rtype: object or list of objects requested
4517 self._list_class = list_class or browse_record_list
4519 # need to accepts ints and longs because ids coming from a method
4520 # launched by button in the interface have a type long...
4521 if isinstance(select, (int, long)):
4522 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
4523 elif isinstance(select, list):
4524 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context=context)
4526 return browse_null()
4528 def _store_get_values(self, cr, uid, ids, fields, context):
4529 """Returns an ordered list of fields.functions to call due to
4530 an update operation on ``fields`` of records with ``ids``,
4531 obtained by calling the 'store' functions of these fields,
4532 as setup by their 'store' attribute.
4534 :return: [(priority, model_name, [record_ids,], [function_fields,])]
4536 if fields is None: fields = []
4537 stored_functions = self.pool._store_function.get(self._name, [])
4539 # use indexed names for the details of the stored_functions:
4540 model_name_, func_field_to_compute_, id_mapping_fnct_, trigger_fields_, priority_ = range(5)
4542 # only keep functions that should be triggered for the ``fields``
4544 to_compute = [f for f in stored_functions \
4545 if ((not f[trigger_fields_]) or set(fields).intersection(f[trigger_fields_]))]
4549 for function in to_compute:
4550 fid = id(function[id_mapping_fnct_])
4551 if not fid in fresults:
4552 # use admin user for accessing objects having rules defined on store fields
4553 fresults[fid] = [id2 for id2 in function[id_mapping_fnct_](self, cr, SUPERUSER_ID, ids, context) if id2]
4554 target_ids = fresults[fid]
4556 # the compound key must consider the priority and model name
4557 key = (function[priority_], function[model_name_])
4558 for target_id in target_ids:
4559 mapping.setdefault(key, {}).setdefault(target_id,set()).add(tuple(function))
4561 # Here mapping looks like:
4562 # { (10, 'model_a') : { target_id1: [ (function_1_tuple, function_2_tuple) ], ... }
4563 # (20, 'model_a') : { target_id2: [ (function_3_tuple, function_4_tuple) ], ... }
4564 # (99, 'model_a') : { target_id1: [ (function_5_tuple, function_6_tuple) ], ... }
4567 # Now we need to generate the batch function calls list
4569 # { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] }
4571 for ((priority,model), id_map) in mapping.iteritems():
4572 functions_ids_maps = {}
4573 # function_ids_maps =
4574 # { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] }
4575 for fid, functions in id_map.iteritems():
4576 functions_ids_maps.setdefault(tuple(functions), []).append(fid)
4577 for functions, ids in functions_ids_maps.iteritems():
4578 call_map.setdefault((priority,model),[]).append((priority, model, ids,
4579 [f[func_field_to_compute_] for f in functions]))
4580 ordered_keys = call_map.keys()
4584 result = reduce(operator.add, (call_map[k] for k in ordered_keys))
4587 def _store_set_values(self, cr, uid, ids, fields, context):
4588 """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
4589 respecting ``multi`` attributes), and stores the resulting values in the database directly."""
4594 if self._log_access:
4595 cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
4599 field_dict.setdefault(r[0], [])
4600 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
4601 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
4602 for i in self.pool._store_function.get(self._name, []):
4604 up_write_date = write_date + datetime.timedelta(hours=i[5])
4605 if datetime.datetime.now() < up_write_date:
4607 field_dict[r[0]].append(i[1])
4613 if self._columns[f]._multi not in keys:
4614 keys.append(self._columns[f]._multi)
4615 todo.setdefault(self._columns[f]._multi, [])
4616 todo[self._columns[f]._multi].append(f)
4620 # use admin user for accessing objects having rules defined on store fields
4621 result = self._columns[val[0]].get(cr, self, ids, val, SUPERUSER_ID, context=context)
4622 for id, value in result.items():
4624 for f in value.keys():
4625 if f in field_dict[id]:
4632 if self._columns[v]._type == 'many2one':
4634 value[v] = value[v][0]
4637 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
4638 upd1.append(self._columns[v]._symbol_set[1](value[v]))
4641 cr.execute('update "' + self._table + '" set ' + \
4642 ','.join(upd0) + ' where id = %s', upd1)
4646 # use admin user for accessing objects having rules defined on store fields
4647 result = self._columns[f].get(cr, self, ids, f, SUPERUSER_ID, context=context)
4648 for r in result.keys():
4650 if r in field_dict.keys():
4651 if f in field_dict[r]:
4653 for id, value in result.items():
4654 if self._columns[f]._type == 'many2one':
4659 cr.execute('update "' + self._table + '" set ' + \
4660 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
4666 def perm_write(self, cr, user, ids, fields, context=None):
4667 raise NotImplementedError(_('This method does not exist anymore'))
4669 # TODO: ameliorer avec NULL
4670 def _where_calc(self, cr, user, domain, active_test=True, context=None):
4671 """Computes the WHERE clause needed to implement an OpenERP domain.
4672 :param domain: the domain to compute
4674 :param active_test: whether the default filtering of records with ``active``
4675 field set to ``False`` should be applied.
4676 :return: the query expressing the given domain as provided in domain
4677 :rtype: osv.query.Query
4682 # if the object has a field named 'active', filter out all inactive
4683 # records unless they were explicitely asked for
4684 if 'active' in self._all_columns and (active_test and context.get('active_test', True)):
4686 # the item[0] trick below works for domain items and '&'/'|'/'!'
4688 if not any(item[0] == 'active' for item in domain):
4689 domain.insert(0, ('active', '=', 1))
4691 domain = [('active', '=', 1)]
4694 e = expression.expression(cr, user, domain, self, context)
4695 tables = e.get_tables()
4696 where_clause, where_params = e.to_sql()
4697 where_clause = where_clause and [where_clause] or []
4699 where_clause, where_params, tables = [], [], ['"%s"' % self._table]
4701 return Query(tables, where_clause, where_params)
4703 def _check_qorder(self, word):
4704 if not regex_order.match(word):
4705 raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
4708 def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
4709 """Add what's missing in ``query`` to implement all appropriate ir.rules
4710 (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
4712 :param query: the current query object
4714 def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None):
4715 """ :param string parent_model: string of the parent model
4716 :param model child_object: model object, base of the rule application
4719 if parent_model and child_object:
4720 # as inherited rules are being applied, we need to add the missing JOIN
4721 # to reach the parent table (if it was not JOINed yet in the query)
4722 parent_alias = child_object._inherits_join_add(child_object, parent_model, query)
4723 # inherited rules are applied on the external table -> need to get the alias and replace
4724 parent_table = self.pool.get(parent_model)._table
4725 added_clause = [clause.replace('"%s"' % parent_table, '"%s"' % parent_alias) for clause in added_clause]
4726 # change references to parent_table to parent_alias, because we now use the alias to refer to the table
4728 for table in added_tables:
4729 # table is just a table name -> switch to the full alias
4730 if table == '"%s"' % parent_table:
4731 new_tables.append('"%s" as "%s"' % (parent_table, parent_alias))
4732 # table is already a full statement -> replace reference to the table to its alias, is correct with the way aliases are generated
4734 new_tables.append(table.replace('"%s"' % parent_table, '"%s"' % parent_alias))
4735 added_tables = new_tables
4736 query.where_clause += added_clause
4737 query.where_clause_params += added_params
4738 for table in added_tables:
4739 if table not in query.tables:
4740 query.tables.append(table)
4744 # apply main rules on the object
4745 rule_obj = self.pool.get('ir.rule')
4746 rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, self._name, mode, context=context)
4747 apply_rule(rule_where_clause, rule_where_clause_params, rule_tables)
4749 # apply ir.rules from the parents (through _inherits)
4750 for inherited_model in self._inherits:
4751 rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, inherited_model, mode, context=context)
4752 apply_rule(rule_where_clause, rule_where_clause_params, rule_tables,
4753 parent_model=inherited_model, child_object=self)
4755 def _generate_m2o_order_by(self, order_field, query):
4757 Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
4758 either native m2o fields or function/related fields that are stored, including
4759 intermediate JOINs for inheritance if required.
4761 :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
4763 if order_field not in self._columns and order_field in self._inherit_fields:
4764 # also add missing joins for reaching the table containing the m2o field
4765 qualified_field = self._inherits_join_calc(order_field, query)
4766 order_field_column = self._inherit_fields[order_field][2]
4768 qualified_field = '"%s"."%s"' % (self._table, order_field)
4769 order_field_column = self._columns[order_field]
4771 assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
4772 if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
4773 _logger.debug("Many2one function/related fields must be stored " \
4774 "to be used as ordering fields! Ignoring sorting for %s.%s",
4775 self._name, order_field)
4778 # figure out the applicable order_by for the m2o
4779 dest_model = self.pool.get(order_field_column._obj)
4780 m2o_order = dest_model._order
4781 if not regex_order.match(m2o_order):
4782 # _order is complex, can't use it here, so we default to _rec_name
4783 m2o_order = dest_model._rec_name
4785 # extract the field names, to be able to qualify them and add desc/asc
4787 for order_part in m2o_order.split(","):
4788 m2o_order_list.append(order_part.strip().split(" ", 1)[0].strip())
4789 m2o_order = m2o_order_list
4791 # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
4792 # as we don't want to exclude results that have NULL values for the m2o
4793 src_table, src_field = qualified_field.replace('"', '').split('.', 1)
4794 dst_alias, dst_alias_statement = query.add_join((src_table, dest_model._table, src_field, 'id', src_field), implicit=False, outer=True)
4795 qualify = lambda field: '"%s"."%s"' % (dst_alias, field)
4796 return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
4798 def _generate_order_by(self, order_spec, query):
4800 Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
4801 a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
4803 :raise" except_orm in case order_spec is malformed
4805 order_by_clause = ''
4806 order_spec = order_spec or self._order
4808 order_by_elements = []
4809 self._check_qorder(order_spec)
4810 for order_part in order_spec.split(','):
4811 order_split = order_part.strip().split(' ')
4812 order_field = order_split[0].strip()
4813 order_direction = order_split[1].strip() if len(order_split) == 2 else ''
4815 if order_field == 'id' or (self._log_access and order_field in LOG_ACCESS_COLUMNS.keys()):
4816 order_by_elements.append('"%s"."%s" %s' % (self._table, order_field, order_direction))
4817 elif order_field in self._columns:
4818 order_column = self._columns[order_field]
4819 if order_column._classic_read:
4820 inner_clause = '"%s"."%s"' % (self._table, order_field)
4821 elif order_column._type == 'many2one':
4822 inner_clause = self._generate_m2o_order_by(order_field, query)
4824 continue # ignore non-readable or "non-joinable" fields
4825 elif order_field in self._inherit_fields:
4826 parent_obj = self.pool.get(self._inherit_fields[order_field][3])
4827 order_column = parent_obj._columns[order_field]
4828 if order_column._classic_read:
4829 inner_clause = self._inherits_join_calc(order_field, query)
4830 elif order_column._type == 'many2one':
4831 inner_clause = self._generate_m2o_order_by(order_field, query)
4833 continue # ignore non-readable or "non-joinable" fields
4835 raise ValueError( _("Sorting field %s not found on model %s") %( order_field, self._name))
4837 if isinstance(inner_clause, list):
4838 for clause in inner_clause:
4839 order_by_elements.append("%s %s" % (clause, order_direction))
4841 order_by_elements.append("%s %s" % (inner_clause, order_direction))
4842 if order_by_elements:
4843 order_by_clause = ",".join(order_by_elements)
4845 return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
4847 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
4849 Private implementation of search() method, allowing specifying the uid to use for the access right check.
4850 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
4851 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
4852 This is ok at the security level because this method is private and not callable through XML-RPC.
4854 :param access_rights_uid: optional user ID to use when checking access rights
4855 (not for ir.rules, this is only for ir.model.access)
4859 self.check_access_rights(cr, access_rights_uid or user, 'read')
4861 # For transient models, restrict acces to the current user, except for the super-user
4862 if self.is_transient() and self._log_access and user != SUPERUSER_ID:
4863 args = expression.AND(([('create_uid', '=', user)], args or []))
4865 query = self._where_calc(cr, user, args, context=context)
4866 self._apply_ir_rules(cr, user, query, 'read', context=context)
4867 order_by = self._generate_order_by(order, query)
4868 from_clause, where_clause, where_clause_params = query.get_sql()
4870 limit_str = limit and ' limit %d' % limit or ''
4871 offset_str = offset and ' offset %d' % offset or ''
4872 where_str = where_clause and (" WHERE %s" % where_clause) or ''
4875 cr.execute('SELECT count("%s".id) FROM ' % self._table + from_clause + where_str + limit_str + offset_str, where_clause_params)
4878 cr.execute('SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str, where_clause_params)
4881 # TDE note: with auto_join, we could have several lines about the same result
4882 # i.e. a lead with several unread messages; we uniquify the result using
4883 # a fast way to do it while preserving order (http://www.peterbe.com/plog/uniqifiers-benchmark)
4884 def _uniquify_list(seq):
4886 return [x for x in seq if x not in seen and not seen.add(x)]
4888 return _uniquify_list([x[0] for x in res])
4890 # returns the different values ever entered for one field
4891 # this is used, for example, in the client when the user hits enter on
4893 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
4896 if field in self._inherit_fields:
4897 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
4899 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
4901 def copy_data(self, cr, uid, id, default=None, context=None):
4903 Copy given record's data with all its fields values
4905 :param cr: database cursor
4906 :param uid: current user id
4907 :param id: id of the record to copy
4908 :param default: field values to override in the original values of the copied record
4909 :type default: dictionary
4910 :param context: context arguments, like lang, time zone
4911 :type context: dictionary
4912 :return: dictionary containing all the field values
4918 # avoid recursion through already copied records in case of circular relationship
4919 seen_map = context.setdefault('__copy_data_seen',{})
4920 if id in seen_map.setdefault(self._name,[]):
4922 seen_map[self._name].append(id)
4926 if 'state' not in default:
4927 if 'state' in self._defaults:
4928 if callable(self._defaults['state']):
4929 default['state'] = self._defaults['state'](self, cr, uid, context)
4931 default['state'] = self._defaults['state']
4933 data = self.read(cr, uid, [id,], context=context)
4937 raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
4939 # build a black list of fields that should not be copied
4940 blacklist = set(MAGIC_COLUMNS + ['parent_left', 'parent_right'])
4941 def blacklist_given_fields(obj):
4942 # blacklist the fields that are given by inheritance
4943 for other, field_to_other in obj._inherits.items():
4944 blacklist.add(field_to_other)
4945 if field_to_other in default:
4946 # all the fields of 'other' are given by the record: default[field_to_other],
4947 # except the ones redefined in self
4948 blacklist.update(set(self.pool.get(other)._all_columns) - set(self._columns))
4950 blacklist_given_fields(self.pool.get(other))
4951 blacklist_given_fields(self)
4954 for f, colinfo in self._all_columns.items():
4955 field = colinfo.column
4958 elif f in blacklist:
4960 elif isinstance(field, fields.function):
4962 elif field._type == 'many2one':
4963 res[f] = data[f] and data[f][0]
4964 elif field._type == 'one2many':
4965 other = self.pool.get(field._obj)
4966 # duplicate following the order of the ids because we'll rely on
4967 # it later for copying translations in copy_translation()!
4968 lines = [other.copy_data(cr, uid, line_id, context=context) for line_id in sorted(data[f])]
4969 # the lines are duplicated using the wrong (old) parent, but then
4970 # are reassigned to the correct one thanks to the (0, 0, ...)
4971 res[f] = [(0, 0, line) for line in lines if line]
4972 elif field._type == 'many2many':
4973 res[f] = [(6, 0, data[f])]
4979 def copy_translations(self, cr, uid, old_id, new_id, context=None):
4983 # avoid recursion through already copied records in case of circular relationship
4984 seen_map = context.setdefault('__copy_translations_seen',{})
4985 if old_id in seen_map.setdefault(self._name,[]):
4987 seen_map[self._name].append(old_id)
4989 trans_obj = self.pool.get('ir.translation')
4990 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
4991 fields = self.fields_get(cr, uid, context=context)
4993 for field_name, field_def in fields.items():
4994 # removing the lang to compare untranslated values
4995 context_wo_lang = dict(context, lang=None)
4996 old_record, new_record = self.browse(cr, uid, [old_id, new_id], context=context_wo_lang)
4997 # we must recursively copy the translations for o2o and o2m
4998 if field_def['type'] == 'one2many':
4999 target_obj = self.pool.get(field_def['relation'])
5000 # here we rely on the order of the ids to match the translations
5001 # as foreseen in copy_data()
5002 old_children = sorted(r.id for r in old_record[field_name])
5003 new_children = sorted(r.id for r in new_record[field_name])
5004 for (old_child, new_child) in zip(old_children, new_children):
5005 target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
5006 # and for translatable fields we keep them for copy
5007 elif field_def.get('translate'):
5008 if field_name in self._columns:
5009 trans_name = self._name + "," + field_name
5012 elif field_name in self._inherit_fields:
5013 trans_name = self._inherit_fields[field_name][0] + "," + field_name
5014 # get the id of the parent record to set the translation
5015 inherit_field_name = self._inherit_fields[field_name][1]
5016 target_id = new_record[inherit_field_name].id
5017 source_id = old_record[inherit_field_name].id
5021 trans_ids = trans_obj.search(cr, uid, [
5022 ('name', '=', trans_name),
5023 ('res_id', '=', source_id)
5025 user_lang = context.get('lang')
5026 for record in trans_obj.read(cr, uid, trans_ids, context=context):
5028 # remove source to avoid triggering _set_src
5029 del record['source']
5030 record.update({'res_id': target_id})
5031 if user_lang and user_lang == record['lang']:
5032 # 'source' to force the call to _set_src
5033 # 'value' needed if value is changed in copy(), want to see the new_value
5034 record['source'] = old_record[field_name]
5035 record['value'] = new_record[field_name]
5036 trans_obj.create(cr, uid, record, context=context)
5039 def copy(self, cr, uid, id, default=None, context=None):
5041 Duplicate record with given id updating it with default values
5043 :param cr: database cursor
5044 :param uid: current user id
5045 :param id: id of the record to copy
5046 :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
5047 :type default: dictionary
5048 :param context: context arguments, like lang, time zone
5049 :type context: dictionary
5050 :return: id of the newly created record
5055 context = context.copy()
5056 data = self.copy_data(cr, uid, id, default, context)
5057 new_id = self.create(cr, uid, data, context)
5058 self.copy_translations(cr, uid, id, new_id, context)
5061 def exists(self, cr, uid, ids, context=None):
5062 """Checks whether the given id or ids exist in this model,
5063 and return the list of ids that do. This is simple to use for
5064 a truth test on a browse_record::
5069 :param ids: id or list of ids to check for existence
5070 :type ids: int or [int]
5071 :return: the list of ids that currently exist, out of
5074 if type(ids) in (int, long):
5076 query = 'SELECT id FROM "%s"' % self._table
5077 cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
5078 return [x[0] for x in cr.fetchall()]
5080 def check_recursion(self, cr, uid, ids, context=None, parent=None):
5081 _logger.warning("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
5083 assert parent is None or parent in self._columns or parent in self._inherit_fields,\
5084 "The 'parent' parameter passed to check_recursion() must be None or a valid field name"
5085 return self._check_recursion(cr, uid, ids, context, parent)
5087 def _check_recursion(self, cr, uid, ids, context=None, parent=None):
5089 Verifies that there is no loop in a hierarchical structure of records,
5090 by following the parent relationship using the **parent** field until a loop
5091 is detected or until a top-level record is found.
5093 :param cr: database cursor
5094 :param uid: current user id
5095 :param ids: list of ids of records to check
5096 :param parent: optional parent field name (default: ``self._parent_name = parent_id``)
5097 :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
5100 parent = self._parent_name
5102 # must ignore 'active' flag, ir.rules, etc. => direct SQL query
5103 query = 'SELECT "%s" FROM "%s" WHERE id = %%s' % (parent, self._table)
5106 while current_id is not None:
5107 cr.execute(query, (current_id,))
5108 result = cr.fetchone()
5109 current_id = result[0] if result else None
5110 if current_id == id:
5114 def _get_external_ids(self, cr, uid, ids, *args, **kwargs):
5115 """Retrieve the External ID(s) of any database record.
5117 **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
5119 :return: map of ids to the list of their fully qualified External IDs
5120 in the form ``module.key``, or an empty list when there's no External
5121 ID for a record, e.g.::
5123 { 'id': ['module.ext_id', 'module.ext_id_bis'],
5126 ir_model_data = self.pool.get('ir.model.data')
5127 data_ids = ir_model_data.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
5128 data_results = ir_model_data.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
5131 # can't use dict.fromkeys() as the list would be shared!
5133 for record in data_results:
5134 result[record['res_id']].append('%(module)s.%(name)s' % record)
5137 def get_external_id(self, cr, uid, ids, *args, **kwargs):
5138 """Retrieve the External ID of any database record, if there
5139 is one. This method works as a possible implementation
5140 for a function field, to be able to add it to any
5141 model object easily, referencing it as ``Model.get_external_id``.
5143 When multiple External IDs exist for a record, only one
5144 of them is returned (randomly).
5146 :return: map of ids to their fully qualified XML ID,
5147 defaulting to an empty string when there's none
5148 (to be usable as a function field),
5151 { 'id': 'module.ext_id',
5154 results = self._get_xml_ids(cr, uid, ids)
5155 for k, v in results.iteritems():
5162 # backwards compatibility
5163 get_xml_id = get_external_id
5164 _get_xml_ids = _get_external_ids
5167 def is_transient(self):
5168 """ Return whether the model is transient.
5170 See :class:`TransientModel`.
5173 return self._transient
5175 def _transient_clean_rows_older_than(self, cr, seconds):
5176 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5177 # Never delete rows used in last 5 minutes
5178 seconds = max(seconds, 300)
5179 query = ("SELECT id FROM " + self._table + " WHERE"
5180 " COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp"
5181 " < ((now() at time zone 'UTC') - interval %s)")
5182 cr.execute(query, ("%s seconds" % seconds,))
5183 ids = [x[0] for x in cr.fetchall()]
5184 self.unlink(cr, SUPERUSER_ID, ids)
5186 def _transient_clean_old_rows(self, cr, max_count):
5187 # Check how many rows we have in the table
5188 cr.execute("SELECT count(*) AS row_count FROM " + self._table)
5190 if res[0][0] <= max_count:
5191 return # max not reached, nothing to do
5192 self._transient_clean_rows_older_than(cr, 300)
5194 def _transient_vacuum(self, cr, uid, force=False):
5195 """Clean the transient records.
5197 This unlinks old records from the transient model tables whenever the
5198 "_transient_max_count" or "_max_age" conditions (if any) are reached.
5199 Actual cleaning will happen only once every "_transient_check_time" calls.
5200 This means this method can be called frequently called (e.g. whenever
5201 a new record is created).
5202 Example with both max_hours and max_count active:
5203 Suppose max_hours = 0.2 (e.g. 12 minutes), max_count = 20, there are 55 rows in the
5204 table, 10 created/changed in the last 5 minutes, an additional 12 created/changed between
5205 5 and 10 minutes ago, the rest created/changed more then 12 minutes ago.
5206 - age based vacuum will leave the 22 rows created/changed in the last 12 minutes
5207 - count based vacuum will wipe out another 12 rows. Not just 2, otherwise each addition
5208 would immediately cause the maximum to be reached again.
5209 - the 10 rows that have been created/changed the last 5 minutes will NOT be deleted
5211 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5212 _transient_check_time = 20 # arbitrary limit on vacuum executions
5213 self._transient_check_count += 1
5214 if not force and (self._transient_check_count < _transient_check_time):
5215 return True # no vacuum cleaning this time
5216 self._transient_check_count = 0
5218 # Age-based expiration
5219 if self._transient_max_hours:
5220 self._transient_clean_rows_older_than(cr, self._transient_max_hours * 60 * 60)
5222 # Count-based expiration
5223 if self._transient_max_count:
5224 self._transient_clean_old_rows(cr, self._transient_max_count)
5228 def resolve_2many_commands(self, cr, uid, field_name, commands, fields=None, context=None):
5229 """ Serializes one2many and many2many commands into record dictionaries
5230 (as if all the records came from the database via a read()). This
5231 method is aimed at onchange methods on one2many and many2many fields.
5233 Because commands might be creation commands, not all record dicts
5234 will contain an ``id`` field. Commands matching an existing record
5235 will have an ``id``.
5237 :param field_name: name of the one2many or many2many field matching the commands
5238 :type field_name: str
5239 :param commands: one2many or many2many commands to execute on ``field_name``
5240 :type commands: list((int|False, int|False, dict|False))
5241 :param fields: list of fields to read from the database, when applicable
5242 :type fields: list(str)
5243 :returns: records in a shape similar to that returned by ``read()``
5244 (except records may be missing the ``id`` field if they don't exist in db)
5247 result = [] # result (list of dict)
5248 record_ids = [] # ids of records to read
5249 updates = {} # {id: dict} of updates on particular records
5251 for command in commands:
5252 if not isinstance(command, (list, tuple)):
5253 record_ids.append(command)
5254 elif command[0] == 0:
5255 result.append(command[2])
5256 elif command[0] == 1:
5257 record_ids.append(command[1])
5258 updates.setdefault(command[1], {}).update(command[2])
5259 elif command[0] in (2, 3):
5260 record_ids = [id for id in record_ids if id != command[1]]
5261 elif command[0] == 4:
5262 record_ids.append(command[1])
5263 elif command[0] == 5:
5264 result, record_ids = [], []
5265 elif command[0] == 6:
5266 result, record_ids = [], list(command[2])
5268 # read the records and apply the updates
5269 other_model = self.pool.get(self._all_columns[field_name].column._obj)
5270 for record in other_model.read(cr, uid, record_ids, fields=fields, context=context):
5271 record.update(updates.get(record['id'], {}))
5272 result.append(record)
5276 # for backward compatibility
5277 resolve_o2m_commands_to_record_dicts = resolve_2many_commands
5279 def _register_hook(self, cr):
5280 """ stuff to do right after the registry is built """
5283 # keep this import here, at top it will cause dependency cycle errors
5286 class Model(BaseModel):
5287 """Main super-class for regular database-persisted OpenERP models.
5289 OpenERP models are created by inheriting from this class::
5294 The system will later instantiate the class once per database (on
5295 which the class' module is installed).
5298 _register = False # not visible in ORM registry, meant to be python-inherited only
5299 _transient = False # True in a TransientModel
5301 class TransientModel(BaseModel):
5302 """Model super-class for transient records, meant to be temporarily
5303 persisted, and regularly vaccuum-cleaned.
5305 A TransientModel has a simplified access rights management,
5306 all users can create new records, and may only access the
5307 records they created. The super-user has unrestricted access
5308 to all TransientModel records.
5311 _register = False # not visible in ORM registry, meant to be python-inherited only
5314 class AbstractModel(BaseModel):
5315 """Abstract Model super-class for creating an abstract class meant to be
5316 inherited by regular models (Models or TransientModels) but not meant to
5317 be usable on its own, or persisted.
5319 Technical note: we don't want to make AbstractModel the super-class of
5320 Model or BaseModel because it would not make sense to put the main
5321 definition of persistence methods such as create() in it, and still we
5322 should be able to override them within an AbstractModel.
5324 _auto = False # don't create any database backend for AbstractModels
5325 _register = False # not visible in ORM registry, meant to be python-inherited only
5328 def itemgetter_tuple(items):
5329 """ Fixes itemgetter inconsistency (useful in some cases) of not returning
5330 a tuple if len(items) == 1: always returns an n-tuple where n = len(items)
5335 return lambda gettable: (gettable[items[0]],)
5336 return operator.itemgetter(*items)
5337 class ImportWarning(Warning):
5338 """ Used to send warnings upwards the stack during the import process
5343 def convert_pgerror_23502(model, fields, info, e):
5344 m = re.match(r'^null value in column "(?P<field>\w+)" violates '
5345 r'not-null constraint\n',
5347 field_name = m.group('field')
5348 if not m or field_name not in fields:
5349 return {'message': unicode(e)}
5350 message = _(u"Missing required value for the field '%s'.") % field_name
5351 field = fields.get(field_name)
5353 message = _(u"%s This might be '%s' in the current model, or a field "
5354 u"of the same name in an o2m.") % (message, field['string'])
5357 'field': field_name,
5359 def convert_pgerror_23505(model, fields, info, e):
5360 m = re.match(r'^duplicate key (?P<field>\w+) violates unique constraint',
5362 field_name = m.group('field')
5363 if not m or field_name not in fields:
5364 return {'message': unicode(e)}
5365 message = _(u"The value for the field '%s' already exists.") % field_name
5366 field = fields.get(field_name)
5368 message = _(u"%s This might be '%s' in the current model, or a field "
5369 u"of the same name in an o2m.") % (message, field['string'])
5372 'field': field_name,
5375 PGERROR_TO_OE = collections.defaultdict(
5376 # shape of mapped converters
5377 lambda: (lambda model, fvg, info, pgerror: {'message': unicode(pgerror)}), {
5378 # not_null_violation
5379 '23502': convert_pgerror_23502,
5380 # unique constraint error
5381 '23505': convert_pgerror_23505,
5383 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: