1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
22 #.apidoc title: Object Relational Mapping
23 #.apidoc module-mods: member-order: bysource
26 Object relational mapping to database (postgresql) module
27 * Hierarchical structure
28 * Constraints consistency, validations
29 * Object meta Data depends on its status
30 * Optimised processing by complex query (multiple actions at once)
31 * Default fields value
32 * Permissions optimisation
33 * Persistant object: DB postgresql
35 * Multi-level caching system
36 * 2 different inheritancies
38 - classicals (varchar, integer, boolean, ...)
39 - relations (one2many, many2one, many2many)
60 from lxml import etree
65 import openerp.netsvc as netsvc
66 import openerp.tools as tools
67 from openerp.tools.config import config
68 from openerp.tools.misc import CountingStream
69 from openerp.tools.safe_eval import safe_eval as eval
70 from openerp.tools.translate import _
71 from openerp import SUPERUSER_ID
72 from query import Query
74 _logger = logging.getLogger(__name__)
75 _schema = logging.getLogger(__name__ + '.schema')
77 # List of etree._Element subclasses that we choose to ignore when parsing XML.
78 from openerp.tools import SKIPPED_ELEMENT_TYPES
80 regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
81 regex_object_name = re.compile(r'^[a-z0-9_.]+$')
83 def transfer_field_to_modifiers(field, modifiers):
86 for attr in ('invisible', 'readonly', 'required'):
87 state_exceptions[attr] = []
88 default_values[attr] = bool(field.get(attr))
89 for state, modifs in (field.get("states",{})).items():
91 if default_values[modif[0]] != modif[1]:
92 state_exceptions[modif[0]].append(state)
94 for attr, default_value in default_values.items():
95 if state_exceptions[attr]:
96 modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])]
98 modifiers[attr] = default_value
101 # Don't deal with groups, it is done by check_group().
102 # Need the context to evaluate the invisible attribute on tree views.
103 # For non-tree views, the context shouldn't be given.
104 def transfer_node_to_modifiers(node, modifiers, context=None, in_tree_view=False):
105 if node.get('attrs'):
106 modifiers.update(eval(node.get('attrs')))
108 if node.get('states'):
109 if 'invisible' in modifiers and isinstance(modifiers['invisible'], list):
110 # TODO combine with AND or OR, use implicit AND for now.
111 modifiers['invisible'].append(('state', 'not in', node.get('states').split(',')))
113 modifiers['invisible'] = [('state', 'not in', node.get('states').split(','))]
115 for a in ('invisible', 'readonly', 'required'):
117 v = bool(eval(node.get(a), {'context': context or {}}))
118 if in_tree_view and a == 'invisible':
119 # Invisible in a tree view has a specific meaning, make it a
120 # new key in the modifiers attribute.
121 modifiers['tree_invisible'] = v
122 elif v or (a not in modifiers or not isinstance(modifiers[a], list)):
123 # Don't set the attribute to False if a dynamic value was
124 # provided (i.e. a domain from attrs or states).
128 def simplify_modifiers(modifiers):
129 for a in ('invisible', 'readonly', 'required'):
130 if a in modifiers and not modifiers[a]:
134 def transfer_modifiers_to_node(modifiers, node):
136 simplify_modifiers(modifiers)
137 node.set('modifiers', simplejson.dumps(modifiers))
139 def setup_modifiers(node, field=None, context=None, in_tree_view=False):
140 """ Processes node attributes and field descriptors to generate
141 the ``modifiers`` node attribute and set it on the provided node.
143 Alters its first argument in-place.
145 :param node: ``field`` node from an OpenERP view
146 :type node: lxml.etree._Element
147 :param dict field: field descriptor corresponding to the provided node
148 :param dict context: execution context used to evaluate node attributes
149 :param bool in_tree_view: triggers the ``tree_invisible`` code
150 path (separate from ``invisible``): in
151 tree view there are two levels of
152 invisibility, cell content (a column is
153 present but the cell itself is not
154 displayed) with ``invisible`` and column
155 invisibility (the whole column is
156 hidden) with ``tree_invisible``.
160 if field is not None:
161 transfer_field_to_modifiers(field, modifiers)
162 transfer_node_to_modifiers(
163 node, modifiers, context=context, in_tree_view=in_tree_view)
164 transfer_modifiers_to_node(modifiers, node)
166 def test_modifiers(what, expected):
168 if isinstance(what, basestring):
169 node = etree.fromstring(what)
170 transfer_node_to_modifiers(node, modifiers)
171 simplify_modifiers(modifiers)
172 json = simplejson.dumps(modifiers)
173 assert json == expected, "%s != %s" % (json, expected)
174 elif isinstance(what, dict):
175 transfer_field_to_modifiers(what, modifiers)
176 simplify_modifiers(modifiers)
177 json = simplejson.dumps(modifiers)
178 assert json == expected, "%s != %s" % (json, expected)
183 # openerp.osv.orm.modifiers_tests()
184 def modifiers_tests():
185 test_modifiers('<field name="a"/>', '{}')
186 test_modifiers('<field name="a" invisible="1"/>', '{"invisible": true}')
187 test_modifiers('<field name="a" readonly="1"/>', '{"readonly": true}')
188 test_modifiers('<field name="a" required="1"/>', '{"required": true}')
189 test_modifiers('<field name="a" invisible="0"/>', '{}')
190 test_modifiers('<field name="a" readonly="0"/>', '{}')
191 test_modifiers('<field name="a" required="0"/>', '{}')
192 test_modifiers('<field name="a" invisible="1" required="1"/>', '{"invisible": true, "required": true}') # TODO order is not guaranteed
193 test_modifiers('<field name="a" invisible="1" required="0"/>', '{"invisible": true}')
194 test_modifiers('<field name="a" invisible="0" required="1"/>', '{"required": true}')
195 test_modifiers("""<field name="a" attrs="{'invisible': [('b', '=', 'c')]}"/>""", '{"invisible": [["b", "=", "c"]]}')
197 # The dictionary is supposed to be the result of fields_get().
198 test_modifiers({}, '{}')
199 test_modifiers({"invisible": True}, '{"invisible": true}')
200 test_modifiers({"invisible": False}, '{}')
203 def check_object_name(name):
204 """ Check if the given name is a valid openerp object name.
206 The _name attribute in osv and osv_memory object is subject to
207 some restrictions. This function returns True or False whether
208 the given name is allowed or not.
210 TODO: this is an approximation. The goal in this approximation
211 is to disallow uppercase characters (in some places, we quote
212 table/column names and in other not, which leads to this kind
215 psycopg2.ProgrammingError: relation "xxx" does not exist).
217 The same restriction should apply to both osv and osv_memory
218 objects for consistency.
221 if regex_object_name.match(name) is None:
225 def raise_on_invalid_object_name(name):
226 if not check_object_name(name):
227 msg = "The _name attribute %s is not valid." % name
229 raise except_orm('ValueError', msg)
231 POSTGRES_CONFDELTYPES = {
239 def intersect(la, lb):
240 return filter(lambda x: x in lb, la)
242 def fix_import_export_id_paths(fieldname):
244 Fixes the id fields in import and exports, and splits field paths
247 :param str fieldname: name of the field to import/export
248 :return: split field name
251 fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
252 fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
253 return fixed_external_id.split('/')
255 class except_orm(Exception):
256 def __init__(self, name, value):
259 self.args = (name, value)
261 class BrowseRecordError(Exception):
264 class browse_null(object):
265 """ Readonly python database object browser
271 def __getitem__(self, name):
274 def __getattr__(self, name):
275 return None # XXX: return self ?
283 def __nonzero__(self):
286 def __unicode__(self):
291 # TODO: execute an object method on browse_record_list
293 class browse_record_list(list):
294 """ Collection of browse objects
296 Such an instance will be returned when doing a ``browse([ids..])``
297 and will be iterable, yielding browse() objects
300 def __init__(self, lst, context=None):
303 super(browse_record_list, self).__init__(lst)
304 self.context = context
307 class browse_record(object):
308 """ An object that behaves like a row of an object's table.
309 It has attributes after the columns of the corresponding object.
313 uobj = pool.get('res.users')
314 user_rec = uobj.browse(cr, uid, 104)
318 def __init__(self, cr, uid, id, table, cache, context=None,
319 list_class=browse_record_list, fields_process=None):
321 :param table: the browsed object (inherited from orm)
322 :param dict cache: a dictionary of model->field->data to be shared
323 across browse objects, thus reducing the SQL
324 read()s. It can speed up things a lot, but also be
325 disastrous if not discarded after write()/unlink()
327 :param dict context: dictionary with an optional context
329 if fields_process is None:
333 self._list_class = list_class
337 self._table = table # deprecated, use _model!
339 self._table_name = self._table._name
340 self.__logger = logging.getLogger('openerp.osv.orm.browse_record.' + self._table_name)
341 self._context = context
342 self._fields_process = fields_process
344 cache.setdefault(table._name, {})
345 self._data = cache[table._name]
347 # if not (id and isinstance(id, (int, long,))):
348 # raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
349 # if not table.exists(cr, uid, id, context):
350 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
352 if id not in self._data:
353 self._data[id] = {'id': id}
357 def __getitem__(self, name):
361 if name not in self._data[self._id]:
362 # build the list of fields we will fetch
364 # fetch the definition of the field which was asked for
365 if name in self._table._columns:
366 col = self._table._columns[name]
367 elif name in self._table._inherit_fields:
368 col = self._table._inherit_fields[name][2]
369 elif hasattr(self._table, str(name)):
370 attr = getattr(self._table, name)
371 if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
372 def function_proxy(*args, **kwargs):
373 if 'context' not in kwargs and self._context:
374 kwargs.update(context=self._context)
375 return attr(self._cr, self._uid, [self._id], *args, **kwargs)
376 return function_proxy
380 error_msg = "Field '%s' does not exist in object '%s'" % (name, self)
381 self.__logger.warning(error_msg)
382 if self.__logger.isEnabledFor(logging.DEBUG):
383 self.__logger.debug(''.join(traceback.format_stack()))
384 raise KeyError(error_msg)
386 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
388 # gen the list of "local" (ie not inherited) fields which are classic or many2one
389 fields_to_fetch = filter(lambda x: x[1]._classic_write, self._table._columns.items())
390 # gen the list of inherited fields
391 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
392 # complete the field list with the inherited fields which are classic or many2one
393 fields_to_fetch += filter(lambda x: x[1]._classic_write, inherits)
394 # otherwise we fetch only that field
396 fields_to_fetch = [(name, col)]
397 ids = filter(lambda id: name not in self._data[id], self._data.keys())
399 field_names = map(lambda x: x[0], fields_to_fetch)
400 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
402 # TODO: improve this, very slow for reports
403 if self._fields_process:
404 lang = self._context.get('lang', 'en_US') or 'en_US'
405 lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
407 raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
408 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
410 for field_name, field_column in fields_to_fetch:
411 if field_column._type in self._fields_process:
412 for result_line in field_values:
413 result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
414 if result_line[field_name]:
415 result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
418 # Where did those ids come from? Perhaps old entries in ir_model_dat?
419 _logger.warning("No field_values found for ids %s in %s", ids, self)
420 raise KeyError('Field %s not found in %s'%(name, self))
421 # create browse records for 'remote' objects
422 for result_line in field_values:
424 for field_name, field_column in fields_to_fetch:
425 if field_column._type == 'many2one':
426 if result_line[field_name]:
427 obj = self._table.pool.get(field_column._obj)
428 if isinstance(result_line[field_name], (list, tuple)):
429 value = result_line[field_name][0]
431 value = result_line[field_name]
433 # FIXME: this happen when a _inherits object
434 # overwrite a field of it parent. Need
435 # testing to be sure we got the right
436 # object and not the parent one.
437 if not isinstance(value, browse_record):
439 # In some cases the target model is not available yet, so we must ignore it,
440 # which is safe in most cases, this value will just be loaded later when needed.
441 # This situation can be caused by custom fields that connect objects with m2o without
442 # respecting module dependencies, causing relationships to be connected to soon when
443 # the target is not loaded yet.
445 new_data[field_name] = browse_record(self._cr,
446 self._uid, value, obj, self._cache,
447 context=self._context,
448 list_class=self._list_class,
449 fields_process=self._fields_process)
451 new_data[field_name] = value
453 new_data[field_name] = browse_null()
455 new_data[field_name] = browse_null()
456 elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
457 new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
458 elif field_column._type in ('reference'):
459 if result_line[field_name]:
460 if isinstance(result_line[field_name], browse_record):
461 new_data[field_name] = result_line[field_name]
463 ref_obj, ref_id = result_line[field_name].split(',')
464 ref_id = long(ref_id)
466 obj = self._table.pool.get(ref_obj)
467 new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
469 new_data[field_name] = browse_null()
471 new_data[field_name] = browse_null()
473 new_data[field_name] = result_line[field_name]
474 self._data[result_line['id']].update(new_data)
476 if not name in self._data[self._id]:
477 # How did this happen? Could be a missing model due to custom fields used too soon, see above.
478 self.__logger.error("Fields to fetch: %s, Field values: %s", field_names, field_values)
479 self.__logger.error("Cached: %s, Table: %s", self._data[self._id], self._table)
480 raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
481 return self._data[self._id][name]
483 def __getattr__(self, name):
487 raise AttributeError(e)
489 def __contains__(self, name):
490 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
493 raise NotImplementedError("Iteration is not allowed on %s" % self)
495 def __hasattr__(self, name):
502 return "browse_record(%s, %d)" % (self._table_name, self._id)
504 def __eq__(self, other):
505 if not isinstance(other, browse_record):
507 return (self._table_name, self._id) == (other._table_name, other._id)
509 def __ne__(self, other):
510 if not isinstance(other, browse_record):
512 return (self._table_name, self._id) != (other._table_name, other._id)
514 # we need to define __unicode__ even though we've already defined __str__
515 # because we have overridden __getattr__
516 def __unicode__(self):
517 return unicode(str(self))
520 return hash((self._table_name, self._id))
525 """Force refreshing this browse_record's data and all the data of the
526 records that belong to the same cache, by emptying the cache completely,
527 preserving only the record identifiers (for prefetching optimizations).
529 for model, model_cache in self._cache.iteritems():
530 # only preserve the ids of the records that were in the cache
531 cached_ids = dict([(i, {'id': i}) for i in model_cache.keys()])
532 self._cache[model].clear()
533 self._cache[model].update(cached_ids)
535 def pg_varchar(size=0):
536 """ Returns the VARCHAR declaration for the provided size:
538 * If no size (or an empty or negative size is provided) return an
540 * Otherwise return a VARCHAR(n)
542 :type int size: varchar size, optional
546 if not isinstance(size, int):
547 raise TypeError("VARCHAR parameter should be an int, got %s"
550 return 'VARCHAR(%d)' % size
553 FIELDS_TO_PGTYPES = {
554 fields.boolean: 'bool',
555 fields.integer: 'int4',
559 fields.datetime: 'timestamp',
560 fields.binary: 'bytea',
561 fields.many2one: 'int4',
562 fields.serialized: 'text',
565 def get_pg_type(f, type_override=None):
567 :param fields._column f: field to get a Postgres type for
568 :param type type_override: use the provided type for dispatching instead of the field's own type
569 :returns: (postgres_identification_type, postgres_type_specification)
572 field_type = type_override or type(f)
574 if field_type in FIELDS_TO_PGTYPES:
575 pg_type = (FIELDS_TO_PGTYPES[field_type], FIELDS_TO_PGTYPES[field_type])
576 elif issubclass(field_type, fields.float):
578 pg_type = ('numeric', 'NUMERIC')
580 pg_type = ('float8', 'DOUBLE PRECISION')
581 elif issubclass(field_type, (fields.char, fields.reference)):
582 pg_type = ('varchar', pg_varchar(f.size))
583 elif issubclass(field_type, fields.selection):
584 if (isinstance(f.selection, list) and isinstance(f.selection[0][0], int))\
585 or getattr(f, 'size', None) == -1:
586 pg_type = ('int4', 'INTEGER')
588 pg_type = ('varchar', pg_varchar(getattr(f, 'size', None)))
589 elif issubclass(field_type, fields.function):
590 if f._type == 'selection':
591 pg_type = ('varchar', pg_varchar())
593 pg_type = get_pg_type(f, getattr(fields, f._type))
595 _logger.warning('%s type not supported!', field_type)
601 class MetaModel(type):
602 """ Metaclass for the Model.
604 This class is used as the metaclass for the Model class to discover
605 the models defined in a module (i.e. without instanciating them).
606 If the automatic discovery is not needed, it is possible to set the
607 model's _register attribute to False.
611 module_to_models = {}
613 def __init__(self, name, bases, attrs):
614 if not self._register:
615 self._register = True
616 super(MetaModel, self).__init__(name, bases, attrs)
619 # The (OpenERP) module name can be in the `openerp.addons` namespace
620 # or not. For instance module `sale` can be imported as
621 # `openerp.addons.sale` (the good way) or `sale` (for backward
623 module_parts = self.__module__.split('.')
624 if len(module_parts) > 2 and module_parts[0] == 'openerp' and \
625 module_parts[1] == 'addons':
626 module_name = self.__module__.split('.')[2]
628 module_name = self.__module__.split('.')[0]
629 if not hasattr(self, '_module'):
630 self._module = module_name
632 # Remember which models to instanciate for this module.
633 self.module_to_models.setdefault(self._module, []).append(self)
636 # Definition of log access columns, automatically added to models if
637 # self._log_access is True
638 LOG_ACCESS_COLUMNS = {
639 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
640 'create_date': 'TIMESTAMP',
641 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
642 'write_date': 'TIMESTAMP'
644 # special columns automatically created by the ORM
645 MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys()
647 class BaseModel(object):
648 """ Base class for OpenERP models.
650 OpenERP models are created by inheriting from this class' subclasses:
652 * Model: for regular database-persisted models
653 * TransientModel: for temporary data, stored in the database but automatically
654 vaccuumed every so often
655 * AbstractModel: for abstract super classes meant to be shared by multiple
656 _inheriting classes (usually Models or TransientModels)
658 The system will later instantiate the class once per database (on
659 which the class' module is installed).
661 To create a class that should not be instantiated, the _register class attribute
664 __metaclass__ = MetaModel
665 _auto = True # create database backend
666 _register = False # Set to false if the model shouldn't be automatically discovered.
672 _parent_name = 'parent_id'
673 _parent_store = False
674 _parent_order = False
681 # dict of {field:method}, with method returning the (name_get of records, {id: fold})
682 # to include in the _read_group, if grouped on this field
686 _transient = False # True in a TransientModel
687 _transient_max_count = None
688 _transient_max_hours = None
689 _transient_check_time = 20
692 # { 'parent_model': 'm2o_field', ... }
695 # Mapping from inherits'd field name to triple (m, r, f, n) where m is the
696 # model from which it is inherits'd, r is the (local) field towards m, f
697 # is the _column object itself, and n is the original (i.e. top-most)
700 # { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
701 # field_column_obj, origina_parent_model), ... }
704 # Mapping field name/column_info object
705 # This is similar to _inherit_fields but:
706 # 1. includes self fields,
707 # 2. uses column_info instead of a triple.
713 _sql_constraints = []
714 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
716 CONCURRENCY_CHECK_FIELD = '__last_update'
718 def log(self, cr, uid, id, message, secondary=False, context=None):
719 return _logger.warning("log() is deprecated. Please use OpenChatter notification system instead of the res.log mechanism.")
721 def view_init(self, cr, uid, fields_list, context=None):
722 """Override this method to do specific things when a view on the object is opened."""
725 def _field_create(self, cr, context=None):
726 """ Create entries in ir_model_fields for all the model's fields.
728 If necessary, also create an entry in ir_model, and if called from the
729 modules loading scheme (by receiving 'module' in the context), also
730 create entries in ir_model_data (for the model and the fields).
732 - create an entry in ir_model (if there is not already one),
733 - create an entry in ir_model_data (if there is not already one, and if
734 'module' is in the context),
735 - update ir_model_fields with the fields found in _columns
736 (TODO there is some redundancy as _columns is updated from
737 ir_model_fields in __init__).
742 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
744 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
745 model_id = cr.fetchone()[0]
746 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
748 model_id = cr.fetchone()[0]
749 if 'module' in context:
750 name_id = 'model_'+self._name.replace('.', '_')
751 cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
753 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
754 (name_id, context['module'], 'ir.model', model_id)
759 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
761 for rec in cr.dictfetchall():
762 cols[rec['name']] = rec
764 ir_model_fields_obj = self.pool.get('ir.model.fields')
766 # sparse field should be created at the end, as it depends on its serialized field already existing
767 model_fields = sorted(self._columns.items(), key=lambda x: 1 if x[1]._type == 'sparse' else 0)
768 for (k, f) in model_fields:
770 'model_id': model_id,
773 'field_description': f.string,
775 'relation': f._obj or '',
776 'view_load': (f.view_load and 1) or 0,
777 'select_level': tools.ustr(f.select or 0),
778 'readonly': (f.readonly and 1) or 0,
779 'required': (f.required and 1) or 0,
780 'selectable': (f.selectable and 1) or 0,
781 'translate': (f.translate and 1) or 0,
782 'relation_field': f._fields_id if isinstance(f, fields.one2many) else '',
783 'serialization_field_id': None,
785 if getattr(f, 'serialization_field', None):
786 # resolve link to serialization_field if specified by name
787 serialization_field_id = ir_model_fields_obj.search(cr, SUPERUSER_ID, [('model','=',vals['model']), ('name', '=', f.serialization_field)])
788 if not serialization_field_id:
789 raise except_orm(_('Error'), _("Serialization field `%s` not found for sparse field `%s`!") % (f.serialization_field, k))
790 vals['serialization_field_id'] = serialization_field_id[0]
792 # When its a custom field,it does not contain f.select
793 if context.get('field_state', 'base') == 'manual':
794 if context.get('field_name', '') == k:
795 vals['select_level'] = context.get('select', '0')
796 #setting value to let the problem NOT occur next time
798 vals['select_level'] = cols[k]['select_level']
801 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
802 id = cr.fetchone()[0]
804 cr.execute("""INSERT INTO ir_model_fields (
805 id, model_id, model, name, field_description, ttype,
806 relation,view_load,state,select_level,relation_field, translate, serialization_field_id
808 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
810 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
811 vals['relation'], bool(vals['view_load']), 'base',
812 vals['select_level'], vals['relation_field'], bool(vals['translate']), vals['serialization_field_id']
814 if 'module' in context:
815 name1 = 'field_' + self._table + '_' + k
816 cr.execute("select name from ir_model_data where name=%s", (name1,))
818 name1 = name1 + "_" + str(id)
819 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
820 (name1, context['module'], 'ir.model.fields', id)
823 for key, val in vals.items():
824 if cols[k][key] != vals[key]:
825 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
827 cr.execute("""UPDATE ir_model_fields SET
828 model_id=%s, field_description=%s, ttype=%s, relation=%s,
829 view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s, serialization_field_id=%s
831 model=%s AND name=%s""", (
832 vals['model_id'], vals['field_description'], vals['ttype'],
833 vals['relation'], bool(vals['view_load']),
834 vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['serialization_field_id'], vals['model'], vals['name']
840 # Goal: try to apply inheritance at the instanciation level and
841 # put objects in the pool var
844 def create_instance(cls, pool, cr):
845 """ Instanciate a given model.
847 This class method instanciates the class of some model (i.e. a class
848 deriving from osv or osv_memory). The class might be the class passed
849 in argument or, if it inherits from another class, a class constructed
850 by combining the two classes.
852 The ``attributes`` argument specifies which parent class attributes
855 TODO: the creation of the combined class is repeated at each call of
856 this method. This is probably unnecessary.
859 attributes = ['_columns', '_defaults', '_inherits', '_constraints',
862 parent_names = getattr(cls, '_inherit', None)
864 if isinstance(parent_names, (str, unicode)):
865 name = cls._name or parent_names
866 parent_names = [parent_names]
869 # for res.parnter.address compatiblity, should be remove in v7
870 if 'res.partner.address' in parent_names:
871 parent_names.pop(parent_names.index('res.partner.address'))
872 parent_names.append('res.partner')
874 raise TypeError('_name is mandatory in case of multiple inheritance')
876 for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]):
877 parent_model = pool.get(parent_name)
879 raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
880 'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
881 if not getattr(cls, '_original_module', None) and name == parent_model._name:
882 cls._original_module = parent_model._original_module
883 parent_class = parent_model.__class__
886 new = copy.copy(getattr(parent_model, s, {}))
888 # Don't _inherit custom fields.
892 # Duplicate float fields because they have a .digits
893 # cache (which must be per-registry, not server-wide).
895 if new[c]._type == 'float':
896 new[c] = copy.copy(new[c])
897 if hasattr(new, 'update'):
898 new.update(cls.__dict__.get(s, {}))
899 elif s=='_constraints':
900 for c in cls.__dict__.get(s, []):
902 for c2 in range(len(new)):
903 #For _constraints, we should check field and methods as well
904 if new[c2][2]==c[2] and (new[c2][0] == c[0] \
905 or getattr(new[c2][0],'__name__', True) == \
906 getattr(c[0],'__name__', False)):
907 # If new class defines a constraint with
908 # same function name, we let it override
917 new.extend(cls.__dict__.get(s, []))
919 cls = type(name, (cls, parent_class), dict(nattr, _register=False))
920 if not getattr(cls, '_original_module', None):
921 cls._original_module = cls._module
922 obj = object.__new__(cls)
923 obj.__init__(pool, cr)
927 """Register this model.
929 This doesn't create an instance but simply register the model
930 as being part of the module where it is defined.
935 # Set the module name (e.g. base, sale, accounting, ...) on the class.
936 module = cls.__module__.split('.')[0]
937 if not hasattr(cls, '_module'):
940 # Record this class in the list of models to instantiate for this module,
941 # managed by the metaclass.
942 module_model_list = MetaModel.module_to_models.setdefault(cls._module, [])
943 if cls not in module_model_list:
944 module_model_list.append(cls)
946 # Since we don't return an instance here, the __init__
947 # method won't be called.
950 def __init__(self, pool, cr):
951 """ Initialize a model and make it part of the given registry.
953 - copy the stored fields' functions in the osv_pool,
954 - update the _columns with the fields found in ir_model_fields,
955 - ensure there is a many2one for each _inherits'd parent,
956 - update the children's _columns,
957 - give a chance to each field to initialize itself.
960 pool.add(self._name, self)
963 if not self._name and not hasattr(self, '_inherit'):
964 name = type(self).__name__.split('.')[0]
965 msg = "The class %s has to have a _name attribute" % name
968 raise except_orm('ValueError', msg)
970 if not self._description:
971 self._description = self._name
973 self._table = self._name.replace('.', '_')
975 if not hasattr(self, '_log_access'):
976 # If _log_access is not specified, it is the same value as _auto.
977 self._log_access = getattr(self, "_auto", True)
979 self._columns = self._columns.copy()
980 for store_field in self._columns:
981 f = self._columns[store_field]
982 if hasattr(f, 'digits_change'):
984 def not_this_field(stored_func):
985 x, y, z, e, f, l = stored_func
986 return x != self._name or y != store_field
987 self.pool._store_function[self._name] = filter(not_this_field, self.pool._store_function.get(self._name, []))
988 if not isinstance(f, fields.function):
994 sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, 10, None)}
995 for object, aa in sm.items():
997 (fnct, fields2, order, length) = aa
999 (fnct, fields2, order) = aa
1002 raise except_orm('Error',
1003 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
1004 self.pool._store_function.setdefault(object, [])
1005 self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
1006 self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
1008 for (key, _, msg) in self._sql_constraints:
1009 self.pool._sql_error[self._table+'_'+key] = msg
1011 # Load manual fields
1013 cr.execute("SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", ('state', 'ir.model.fields'))
1015 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
1016 for field in cr.dictfetchall():
1017 if field['name'] in self._columns:
1020 'string': field['field_description'],
1021 'required': bool(field['required']),
1022 'readonly': bool(field['readonly']),
1023 'domain': eval(field['domain']) if field['domain'] else None,
1024 'size': field['size'],
1025 'ondelete': field['on_delete'],
1026 'translate': (field['translate']),
1028 #'select': int(field['select_level'])
1031 if field['serialization_field_id']:
1032 cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],))
1033 attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']})
1034 if field['ttype'] in ['many2one', 'one2many', 'many2many']:
1035 attrs.update({'relation': field['relation']})
1036 self._columns[field['name']] = fields.sparse(**attrs)
1037 elif field['ttype'] == 'selection':
1038 self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
1039 elif field['ttype'] == 'reference':
1040 self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
1041 elif field['ttype'] == 'many2one':
1042 self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
1043 elif field['ttype'] == 'one2many':
1044 self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
1045 elif field['ttype'] == 'many2many':
1046 _rel1 = field['relation'].replace('.', '_')
1047 _rel2 = field['model'].replace('.', '_')
1048 _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
1049 self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
1051 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
1052 self._inherits_check()
1053 self._inherits_reload()
1054 if not self._sequence:
1055 self._sequence = self._table + '_id_seq'
1056 for k in self._defaults:
1057 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
1058 for f in self._columns:
1059 self._columns[f].restart()
1062 if self.is_transient():
1063 self._transient_check_count = 0
1064 self._transient_max_count = config.get('osv_memory_count_limit')
1065 self._transient_max_hours = config.get('osv_memory_age_limit')
1066 assert self._log_access, "TransientModels must have log_access turned on, "\
1067 "in order to implement their access rights policy"
1070 if self._rec_name is not None:
1071 assert self._rec_name in self._columns.keys() + ['id'], "Invalid rec_name %s for model %s" % (self._rec_name, self._name)
1073 self._rec_name = 'name'
1076 def __export_row(self, cr, uid, row, fields, context=None):
1080 def check_type(field_type):
1081 if field_type == 'float':
1083 elif field_type == 'integer':
1085 elif field_type == 'boolean':
1089 def selection_field(in_field):
1090 col_obj = self.pool.get(in_field.keys()[0])
1091 if f[i] in col_obj._columns.keys():
1092 return col_obj._columns[f[i]]
1093 elif f[i] in col_obj._inherits.keys():
1094 selection_field(col_obj._inherits)
1098 def _get_xml_id(self, cr, uid, r):
1099 model_data = self.pool.get('ir.model.data')
1100 data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
1102 d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
1104 r = '%s.%s' % (d['module'], d['name'])
1110 n = self._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' )
1111 if not model_data.search(cr, uid, [('name', '=', n)]):
1114 model_data.create(cr, uid, {
1116 'model': self._name,
1118 'module': '__export__',
1124 data = map(lambda x: '', range(len(fields)))
1126 for fpos in range(len(fields)):
1136 r = _get_xml_id(self, cr, uid, r)
1139 # To display external name of selection field when its exported
1140 if f[i] in self._columns.keys():
1141 cols = self._columns[f[i]]
1142 elif f[i] in self._inherit_fields.keys():
1143 cols = selection_field(self._inherits)
1144 if cols and cols._type == 'selection':
1145 sel_list = cols.selection
1146 if r and type(sel_list) == type([]):
1147 r = [x[1] for x in sel_list if r==x[0]]
1148 r = r and r[0] or False
1150 if f[i] in self._columns:
1151 r = check_type(self._columns[f[i]]._type)
1152 elif f[i] in self._inherit_fields:
1153 r = check_type(self._inherit_fields[f[i]][2]._type)
1154 data[fpos] = r or False
1156 if isinstance(r, (browse_record_list, list)):
1158 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
1161 if [x for x in fields2 if x]:
1163 done.append(fields2)
1164 if cols and cols._type=='many2many' and len(fields[fpos])>(i+1) and (fields[fpos][i+1]=='id'):
1165 data[fpos] = ','.join([_get_xml_id(self, cr, uid, x) for x in r])
1169 lines2 = row2._model.__export_row(cr, uid, row2, fields2,
1172 for fpos2 in range(len(fields)):
1173 if lines2 and lines2[0][fpos2]:
1174 data[fpos2] = lines2[0][fpos2]
1178 name_relation = self.pool.get(rr._table_name)._rec_name
1179 if isinstance(rr[name_relation], browse_record):
1180 rr = rr[name_relation]
1181 rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
1182 rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
1183 dt += tools.ustr(rr_name or '') + ','
1184 data[fpos] = dt[:-1]
1193 if isinstance(r, browse_record):
1194 r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
1195 r = r and r[0] and r[0][1] or ''
1196 data[fpos] = tools.ustr(r or '')
1197 return [data] + lines
1199 def export_data(self, cr, uid, ids, fields_to_export, context=None):
1201 Export fields for selected objects
1203 :param cr: database cursor
1204 :param uid: current user id
1205 :param ids: list of ids
1206 :param fields_to_export: list of fields
1207 :param context: context arguments, like lang, time zone
1208 :rtype: dictionary with a *datas* matrix
1210 This method is used when exporting data via client menu
1215 cols = self._columns.copy()
1216 for f in self._inherit_fields:
1217 cols.update({f: self._inherit_fields[f][2]})
1218 fields_to_export = map(fix_import_export_id_paths, fields_to_export)
1220 for row in self.browse(cr, uid, ids, context):
1221 datas += self.__export_row(cr, uid, row, fields_to_export, context)
1222 return {'datas': datas}
1224 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
1227 Use :meth:`~load` instead
1229 Import given data in given module
1231 This method is used when importing data via client menu.
1233 Example of fields to import for a sale.order::
1236 partner_id, (=name_search)
1237 order_line/.id, (=database_id)
1239 order_line/product_id/id, (=xml id)
1240 order_line/price_unit,
1241 order_line/product_uom_qty,
1242 order_line/product_uom/id (=xml_id)
1244 This method returns a 4-tuple with the following structure::
1246 (return_code, errored_resource, error_message, unused)
1248 * The first item is a return code, it is ``-1`` in case of
1249 import error, or the last imported row number in case of success
1250 * The second item contains the record data dict that failed to import
1251 in case of error, otherwise it's 0
1252 * The third item contains an error message string in case of error,
1254 * The last item is currently unused, with no specific semantics
1256 :param fields: list of fields to import
1257 :param datas: data to import
1258 :param mode: 'init' or 'update' for record creation
1259 :param current_module: module name
1260 :param noupdate: flag for record creation
1261 :param filename: optional file to store partial import state for recovery
1262 :returns: 4-tuple in the form (return_code, errored_resource, error_message, unused)
1263 :rtype: (int, dict or 0, str or 0, str or 0)
1265 context = dict(context) if context is not None else {}
1266 context['_import_current_module'] = current_module
1268 fields = map(fix_import_export_id_paths, fields)
1269 ir_model_data_obj = self.pool.get('ir.model.data')
1272 if m['type'] == 'error':
1273 raise Exception(m['message'])
1275 if config.get('import_partial') and filename:
1276 with open(config.get('import_partial'), 'rb') as partial_import_file:
1277 data = pickle.load(partial_import_file)
1278 position = data.get(filename, 0)
1282 for res_id, xml_id, res, info in self._convert_records(cr, uid,
1283 self._extract_records(cr, uid, fields, datas,
1284 context=context, log=log),
1285 context=context, log=log):
1286 ir_model_data_obj._update(cr, uid, self._name,
1287 current_module, res, mode=mode, xml_id=xml_id,
1288 noupdate=noupdate, res_id=res_id, context=context)
1289 position = info.get('rows', {}).get('to', 0) + 1
1290 if config.get('import_partial') and filename and (not (position%100)):
1291 with open(config.get('import_partial'), 'rb') as partial_import:
1292 data = pickle.load(partial_import)
1293 data[filename] = position
1294 with open(config.get('import_partial'), 'wb') as partial_import:
1295 pickle.dump(data, partial_import)
1296 if context.get('defer_parent_store_computation'):
1297 self._parent_store_compute(cr)
1299 except Exception, e:
1301 return -1, {}, 'Line %d : %s' % (position + 1, tools.ustr(e)), ''
1303 if context.get('defer_parent_store_computation'):
1304 self._parent_store_compute(cr)
1305 return position, 0, 0, 0
1307 def load(self, cr, uid, fields, data, context=None):
1309 Attempts to load the data matrix, and returns a list of ids (or
1310 ``False`` if there was an error and no id could be generated) and a
1313 The ids are those of the records created and saved (in database), in
1314 the same order they were extracted from the file. They can be passed
1315 directly to :meth:`~read`
1317 :param fields: list of fields to import, at the same index as the corresponding data
1318 :type fields: list(str)
1319 :param data: row-major matrix of data to import
1320 :type data: list(list(str))
1321 :param dict context:
1322 :returns: {ids: list(int)|False, messages: [Message]}
1324 cr.execute('SAVEPOINT model_load')
1327 fields = map(fix_import_export_id_paths, fields)
1328 ModelData = self.pool['ir.model.data']
1329 fg = self.fields_get(cr, uid, context=context)
1336 for id, xid, record, info in self._convert_records(cr, uid,
1337 self._extract_records(cr, uid, fields, data,
1338 context=context, log=messages.append),
1339 context=context, log=messages.append):
1341 cr.execute('SAVEPOINT model_load_save')
1342 except psycopg2.InternalError, e:
1343 # broken transaction, exit and hope the source error was
1345 if not any(message['type'] == 'error' for message in messages):
1346 messages.append(dict(info, type='error',message=
1347 u"Unknown database error: '%s'" % e))
1350 ids.append(ModelData._update(cr, uid, self._name,
1351 current_module, record, mode=mode, xml_id=xid,
1352 noupdate=noupdate, res_id=id, context=context))
1353 cr.execute('RELEASE SAVEPOINT model_load_save')
1354 except psycopg2.Warning, e:
1355 _logger.exception('Failed to import record %s', record)
1356 cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
1357 messages.append(dict(info, type='warning', message=str(e)))
1358 except psycopg2.Error, e:
1359 _logger.exception('Failed to import record %s', record)
1360 # Failed to write, log to messages, rollback savepoint (to
1361 # avoid broken transaction) and keep going
1362 cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
1363 messages.append(dict(
1365 **PGERROR_TO_OE[e.pgcode](self, fg, info, e)))
1366 if any(message['type'] == 'error' for message in messages):
1367 cr.execute('ROLLBACK TO SAVEPOINT model_load')
1369 return {'ids': ids, 'messages': messages}
1370 def _extract_records(self, cr, uid, fields_, data,
1371 context=None, log=lambda a: None):
1372 """ Generates record dicts from the data sequence.
1374 The result is a generator of dicts mapping field names to raw
1375 (unconverted, unvalidated) values.
1377 For relational fields, if sub-fields were provided the value will be
1378 a list of sub-records
1380 The following sub-fields may be set on the record (by key):
1381 * None is the name_get for the record (to use with name_create/name_search)
1382 * "id" is the External ID for the record
1383 * ".id" is the Database ID for the record
1385 columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
1386 # Fake columns to avoid special cases in extractor
1387 columns[None] = fields.char('rec_name')
1388 columns['id'] = fields.char('External ID')
1389 columns['.id'] = fields.integer('Database ID')
1391 # m2o fields can't be on multiple lines so exclude them from the
1392 # is_relational field rows filter, but special-case it later on to
1393 # be handled with relational fields (as it can have subfields)
1394 is_relational = lambda field: columns[field]._type in ('one2many', 'many2many', 'many2one')
1395 get_o2m_values = itemgetter_tuple(
1396 [index for index, field in enumerate(fields_)
1397 if columns[field[0]]._type == 'one2many'])
1398 get_nono2m_values = itemgetter_tuple(
1399 [index for index, field in enumerate(fields_)
1400 if columns[field[0]]._type != 'one2many'])
1401 # Checks if the provided row has any non-empty non-relational field
1402 def only_o2m_values(row, f=get_nono2m_values, g=get_o2m_values):
1403 return any(g(row)) and not any(f(row))
1407 if index >= len(data): return
1410 # copy non-relational fields to record dict
1411 record = dict((field[0], value)
1412 for field, value in itertools.izip(fields_, row)
1413 if not is_relational(field[0]))
1415 # Get all following rows which have relational values attached to
1416 # the current record (no non-relational values)
1417 record_span = itertools.takewhile(
1418 only_o2m_values, itertools.islice(data, index + 1, None))
1419 # stitch record row back on for relational fields
1420 record_span = list(itertools.chain([row], record_span))
1421 for relfield in set(
1422 field[0] for field in fields_
1423 if is_relational(field[0])):
1424 column = columns[relfield]
1425 # FIXME: how to not use _obj without relying on fields_get?
1426 Model = self.pool[column._obj]
1428 # get only cells for this sub-field, should be strictly
1429 # non-empty, field path [None] is for name_get column
1430 indices, subfields = zip(*((index, field[1:] or [None])
1431 for index, field in enumerate(fields_)
1432 if field[0] == relfield))
1434 # return all rows which have at least one value for the
1435 # subfields of relfield
1436 relfield_data = filter(any, map(itemgetter_tuple(indices), record_span))
1437 record[relfield] = [subrecord
1438 for subrecord, _subinfo in Model._extract_records(
1439 cr, uid, subfields, relfield_data,
1440 context=context, log=log)]
1442 yield record, {'rows': {
1444 'to': index + len(record_span) - 1
1446 index += len(record_span)
1447 def _convert_records(self, cr, uid, records,
1448 context=None, log=lambda a: None):
1449 """ Converts records from the source iterable (recursive dicts of
1450 strings) into forms which can be written to the database (via
1451 self.create or (ir.model.data)._update)
1453 :returns: a list of triplets of (id, xid, record)
1454 :rtype: list((int|None, str|None, dict))
1456 if context is None: context = {}
1457 Converter = self.pool['ir.fields.converter']
1458 columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
1459 Translation = self.pool['ir.translation']
1461 (f, (Translation._get_source(cr, uid, self._name + ',' + f, 'field',
1462 context.get('lang'))
1464 for f, column in columns.iteritems())
1466 convert = Converter.for_model(cr, uid, self, context=context)
1468 def _log(base, field, exception):
1469 type = 'warning' if isinstance(exception, Warning) else 'error'
1470 # logs the logical (not human-readable) field name for automated
1471 # processing of response, but injects human readable in message
1472 record = dict(base, type=type, field=field,
1473 message=unicode(exception.args[0]) % base)
1474 if len(exception.args) > 1 and exception.args[1]:
1475 record.update(exception.args[1])
1478 stream = CountingStream(records)
1479 for record, extras in stream:
1482 # name_get/name_create
1483 if None in record: pass
1490 dbid = int(record['.id'])
1492 # in case of overridden id column
1493 dbid = record['.id']
1494 if not self.search(cr, uid, [('id', '=', dbid)], context=context):
1497 record=stream.index,
1499 message=_(u"Unknown database identifier '%s'") % dbid))
1502 converted = convert(record, lambda field, err:\
1503 _log(dict(extras, record=stream.index, field=field_names[field]), field, err))
1505 yield dbid, xid, converted, dict(extras, record=stream.index)
1507 def get_invalid_fields(self, cr, uid):
1508 return list(self._invalids)
1510 def _validate(self, cr, uid, ids, context=None):
1511 context = context or {}
1512 lng = context.get('lang')
1513 trans = self.pool.get('ir.translation')
1515 for constraint in self._constraints:
1516 fun, msg, fields = constraint
1517 if not fun(self, cr, uid, ids):
1518 # Check presence of __call__ directly instead of using
1519 # callable() because it will be deprecated as of Python 3.0
1520 if hasattr(msg, '__call__'):
1521 tmp_msg = msg(self, cr, uid, ids, context=context)
1522 if isinstance(tmp_msg, tuple):
1523 tmp_msg, params = tmp_msg
1524 translated_msg = tmp_msg % params
1526 translated_msg = tmp_msg
1528 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg)
1530 _("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
1532 self._invalids.update(fields)
1534 raise except_orm('ValidateError', '\n'.join(error_msgs))
1536 self._invalids.clear()
1538 def default_get(self, cr, uid, fields_list, context=None):
1540 Returns default values for the fields in fields_list.
1542 :param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
1543 :type fields_list: list
1544 :param context: optional context dictionary - it may contains keys for specifying certain options
1545 like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
1546 It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
1547 or override a default value for a field.
1548 A special ``bin_size`` boolean flag may also be passed in the context to request the
1549 value of all fields.binary columns to be returned as the size of the binary instead of its
1550 contents. This can also be selectively overriden by passing a field-specific flag
1551 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
1552 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
1553 :return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
1555 # trigger view init hook
1556 self.view_init(cr, uid, fields_list, context)
1562 # get the default values for the inherited fields
1563 for t in self._inherits.keys():
1564 defaults.update(self.pool.get(t).default_get(cr, uid, fields_list,
1567 # get the default values defined in the object
1568 for f in fields_list:
1569 if f in self._defaults:
1570 if callable(self._defaults[f]):
1571 defaults[f] = self._defaults[f](self, cr, uid, context)
1573 defaults[f] = self._defaults[f]
1575 fld_def = ((f in self._columns) and self._columns[f]) \
1576 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1579 if isinstance(fld_def, fields.property):
1580 property_obj = self.pool.get('ir.property')
1581 prop_value = property_obj.get(cr, uid, f, self._name, context=context)
1583 if isinstance(prop_value, (browse_record, browse_null)):
1584 defaults[f] = prop_value.id
1586 defaults[f] = prop_value
1588 if f not in defaults:
1591 # get the default values set by the user and override the default
1592 # values defined in the object
1593 ir_values_obj = self.pool.get('ir.values')
1594 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1595 for id, field, field_value in res:
1596 if field in fields_list:
1597 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1598 if fld_def._type == 'many2one':
1599 obj = self.pool.get(fld_def._obj)
1600 if not obj.search(cr, uid, [('id', '=', field_value or False)]):
1602 if fld_def._type == 'many2many':
1603 obj = self.pool.get(fld_def._obj)
1605 for i in range(len(field_value)):
1606 if not obj.search(cr, uid, [('id', '=',
1609 field_value2.append(field_value[i])
1610 field_value = field_value2
1611 if fld_def._type == 'one2many':
1612 obj = self.pool.get(fld_def._obj)
1614 for i in range(len(field_value)):
1615 field_value2.append({})
1616 for field2 in field_value[i]:
1617 if field2 in obj._columns.keys() and obj._columns[field2]._type == 'many2one':
1618 obj2 = self.pool.get(obj._columns[field2]._obj)
1619 if not obj2.search(cr, uid,
1620 [('id', '=', field_value[i][field2])]):
1622 elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type == 'many2one':
1623 obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
1624 if not obj2.search(cr, uid,
1625 [('id', '=', field_value[i][field2])]):
1627 # TODO add test for many2many and one2many
1628 field_value2[i][field2] = field_value[i][field2]
1629 field_value = field_value2
1630 defaults[field] = field_value
1632 # get the default values from the context
1633 for key in context or {}:
1634 if key.startswith('default_') and (key[8:] in fields_list):
1635 defaults[key[8:]] = context[key]
1638 def fields_get_keys(self, cr, user, context=None):
1639 res = self._columns.keys()
1640 # TODO I believe this loop can be replace by
1641 # res.extend(self._inherit_fields.key())
1642 for parent in self._inherits:
1643 res.extend(self.pool.get(parent).fields_get_keys(cr, user, context))
1646 def _rec_name_fallback(self, cr, uid, context=None):
1647 rec_name = self._rec_name
1648 if rec_name not in self._columns:
1649 rec_name = self._columns.keys()[0] if len(self._columns.keys()) > 0 else "id"
1653 # Overload this method if you need a window title which depends on the context
1655 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
1658 def user_has_groups(self, cr, uid, groups, context=None):
1659 """Return true if the user is at least member of one of the groups
1660 in groups_str. Typically used to resolve ``groups`` attribute
1661 in view and model definitions.
1663 :param str groups: comma-separated list of fully-qualified group
1664 external IDs, e.g.: ``base.group_user,base.group_system``
1665 :return: True if the current user is a member of one of the
1668 return any([self.pool.get('res.users').has_group(cr, uid, group_ext_id)
1669 for group_ext_id in groups.split(',')])
1671 def __view_look_dom(self, cr, user, node, view_id, in_tree_view, model_fields, context=None):
1672 """Return the description of the fields in the node.
1674 In a normal call to this method, node is a complete view architecture
1675 but it is actually possible to give some sub-node (this is used so
1676 that the method can call itself recursively).
1678 Originally, the field descriptions are drawn from the node itself.
1679 But there is now some code calling fields_get() in order to merge some
1680 of those information in the architecture.
1692 if isinstance(s, unicode):
1693 return s.encode('utf8')
1696 def check_group(node):
1697 """Apply group restrictions, may be set at view level or model level::
1698 * at view level this means the element should be made invisible to
1699 people who are not members
1700 * at model level (exclusively for fields, obviously), this means
1701 the field should be completely removed from the view, as it is
1702 completely unavailable for non-members
1704 :return: True if field should be included in the result of fields_view_get
1706 if node.tag == 'field' and node.get('name') in self._all_columns:
1707 column = self._all_columns[node.get('name')].column
1708 if column.groups and not self.user_has_groups(cr, user,
1709 groups=column.groups,
1711 node.getparent().remove(node)
1712 fields.pop(node.get('name'), None)
1713 # no point processing view-level ``groups`` anymore, return
1715 if node.get('groups'):
1716 can_see = self.user_has_groups(cr, user,
1717 groups=node.get('groups'),
1720 node.set('invisible', '1')
1721 modifiers['invisible'] = True
1722 if 'attrs' in node.attrib:
1723 del(node.attrib['attrs']) #avoid making field visible later
1724 del(node.attrib['groups'])
1727 if node.tag in ('field', 'node', 'arrow'):
1728 if node.get('object'):
1733 if f.tag in ('field'):
1734 xml += etree.tostring(f, encoding="utf-8")
1736 new_xml = etree.fromstring(encode(xml))
1737 ctx = context.copy()
1738 ctx['base_model_name'] = self._name
1739 xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
1744 attrs = {'views': views}
1746 if node.get('name'):
1749 if node.get('name') in self._columns:
1750 column = self._columns[node.get('name')]
1752 column = self._inherit_fields[node.get('name')][2]
1757 relation = self.pool.get(column._obj)
1762 if f.tag in ('form', 'tree', 'graph', 'kanban'):
1764 ctx = context.copy()
1765 ctx['base_model_name'] = self._name
1766 xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
1767 views[str(f.tag)] = {
1771 attrs = {'views': views}
1772 if node.get('widget') and node.get('widget') == 'selection':
1773 # Prepare the cached selection list for the client. This needs to be
1774 # done even when the field is invisible to the current user, because
1775 # other events could need to change its value to any of the selectable ones
1776 # (such as on_change events, refreshes, etc.)
1778 # If domain and context are strings, we keep them for client-side, otherwise
1779 # we evaluate them server-side to consider them when generating the list of
1781 # TODO: find a way to remove this hack, by allow dynamic domains
1783 if column._domain and not isinstance(column._domain, basestring):
1784 dom = column._domain
1785 dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
1786 search_context = dict(context)
1787 if column._context and not isinstance(column._context, basestring):
1788 search_context.update(column._context)
1789 attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
1790 if (node.get('required') and not int(node.get('required'))) or not column.required:
1791 attrs['selection'].append((False, ''))
1792 fields[node.get('name')] = attrs
1794 field = model_fields.get(node.get('name'))
1796 transfer_field_to_modifiers(field, modifiers)
1799 elif node.tag in ('form', 'tree'):
1800 result = self.view_header_get(cr, user, False, node.tag, context)
1802 node.set('string', result)
1803 in_tree_view = node.tag == 'tree'
1805 elif node.tag == 'calendar':
1806 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
1807 if node.get(additional_field):
1808 fields[node.get(additional_field)] = {}
1810 if not check_group(node):
1811 # node must be removed, no need to proceed further with its children
1814 # The view architeture overrides the python model.
1815 # Get the attrs before they are (possibly) deleted by check_group below
1816 transfer_node_to_modifiers(node, modifiers, context, in_tree_view)
1818 # TODO remove attrs couterpart in modifiers when invisible is true ?
1821 if 'lang' in context:
1822 if node.text and node.text.strip():
1823 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.text.strip())
1825 node.text = node.text.replace(node.text.strip(), trans)
1826 if node.tail and node.tail.strip():
1827 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.tail.strip())
1829 node.tail = node.tail.replace(node.tail.strip(), trans)
1831 if node.get('string') and not result:
1832 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
1833 if trans == node.get('string') and ('base_model_name' in context):
1834 # If translation is same as source, perhaps we'd have more luck with the alternative model name
1835 # (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
1836 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
1838 node.set('string', trans)
1840 for attr_name in ('confirm', 'sum', 'help', 'placeholder'):
1841 attr_value = node.get(attr_name)
1843 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], attr_value)
1845 node.set(attr_name, trans)
1848 if children or (node.tag == 'field' and f.tag in ('filter','separator')):
1849 fields.update(self.__view_look_dom(cr, user, f, view_id, in_tree_view, model_fields, context))
1851 transfer_modifiers_to_node(modifiers, node)
1854 def _disable_workflow_buttons(self, cr, user, node):
1855 """ Set the buttons in node to readonly if the user can't activate them. """
1857 # admin user can always activate workflow buttons
1860 # TODO handle the case of more than one workflow for a model or multiple
1861 # transitions with different groups and same signal
1862 usersobj = self.pool.get('res.users')
1863 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1864 for button in buttons:
1865 user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
1866 cr.execute("""SELECT DISTINCT t.group_id
1868 INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
1869 INNER JOIN wkf_transition t ON (t.act_to = a.id)
1872 AND t.group_id is NOT NULL
1873 """, (self._name, button.get('name')))
1874 group_ids = [x[0] for x in cr.fetchall() if x[0]]
1875 can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
1876 button.set('readonly', str(int(not can_click)))
1879 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1880 """ Return an architecture and a description of all the fields.
1882 The field description combines the result of fields_get() and
1885 :param node: the architecture as as an etree
1886 :return: a tuple (arch, fields) where arch is the given node as a
1887 string and fields is the description of all the fields.
1891 if node.tag == 'diagram':
1892 if node.getchildren()[0].tag == 'node':
1893 node_model = self.pool.get(node.getchildren()[0].get('object'))
1894 node_fields = node_model.fields_get(cr, user, None, context)
1895 fields.update(node_fields)
1896 if not node.get("create") and not node_model.check_access_rights(cr, user, 'create', raise_exception=False):
1897 node.set("create", 'false')
1898 if node.getchildren()[1].tag == 'arrow':
1899 arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, None, context)
1900 fields.update(arrow_fields)
1902 fields = self.fields_get(cr, user, None, context)
1903 fields_def = self.__view_look_dom(cr, user, node, view_id, False, fields, context=context)
1904 node = self._disable_workflow_buttons(cr, user, node)
1905 if node.tag in ('kanban', 'tree', 'form', 'gantt'):
1906 for action, operation in (('create', 'create'), ('delete', 'unlink'), ('edit', 'write')):
1907 if not node.get(action) and not self.check_access_rights(cr, user, operation, raise_exception=False):
1908 node.set(action, 'false')
1909 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1910 for k in fields.keys():
1911 if k not in fields_def:
1913 for field in fields_def:
1915 # sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1916 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1917 elif field in fields:
1918 fields[field].update(fields_def[field])
1920 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1921 res = cr.fetchall()[:]
1923 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1924 msg = "\n * ".join([r[0] for r in res])
1925 msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
1927 raise except_orm('View error', msg)
1930 def _get_default_form_view(self, cr, user, context=None):
1931 """ Generates a default single-line form view using all fields
1932 of the current model except the m2m and o2m ones.
1934 :param cr: database cursor
1935 :param int user: user id
1936 :param dict context: connection context
1937 :returns: a form view as an lxml document
1938 :rtype: etree._Element
1940 view = etree.Element('form', string=self._description)
1941 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
1942 for field, descriptor in self.fields_get(cr, user, context=context).iteritems():
1943 if descriptor['type'] in ('one2many', 'many2many'):
1945 etree.SubElement(view, 'field', name=field)
1946 if descriptor['type'] == 'text':
1947 etree.SubElement(view, 'newline')
1950 def _get_default_search_view(self, cr, user, context=None):
1951 """ Generates a single-field search view, based on _rec_name.
1953 :param cr: database cursor
1954 :param int user: user id
1955 :param dict context: connection context
1956 :returns: a tree view as an lxml document
1957 :rtype: etree._Element
1959 view = etree.Element('search', string=self._description)
1960 etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
1963 def _get_default_tree_view(self, cr, user, context=None):
1964 """ Generates a single-field tree view, based on _rec_name.
1966 :param cr: database cursor
1967 :param int user: user id
1968 :param dict context: connection context
1969 :returns: a tree view as an lxml document
1970 :rtype: etree._Element
1972 view = etree.Element('tree', string=self._description)
1973 etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
1976 def _get_default_calendar_view(self, cr, user, context=None):
1977 """ Generates a default calendar view by trying to infer
1978 calendar fields from a number of pre-set attribute names
1980 :param cr: database cursor
1981 :param int user: user id
1982 :param dict context: connection context
1983 :returns: a calendar view
1984 :rtype: etree._Element
1986 def set_first_of(seq, in_, to):
1987 """Sets the first value of ``seq`` also found in ``in_`` to
1988 the ``to`` attribute of the view being closed over.
1990 Returns whether it's found a suitable value (and set it on
1991 the attribute) or not
1999 view = etree.Element('calendar', string=self._description)
2000 etree.SubElement(view, 'field', self._rec_name_fallback(cr, user, context))
2002 if (self._date_name not in self._columns):
2004 for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
2005 if dt in self._columns:
2006 self._date_name = dt
2011 raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
2012 view.set('date_start', self._date_name)
2014 set_first_of(["user_id", "partner_id", "x_user_id", "x_partner_id"],
2015 self._columns, 'color')
2017 if not set_first_of(["date_stop", "date_end", "x_date_stop", "x_date_end"],
2018 self._columns, 'date_stop'):
2019 if not set_first_of(["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"],
2020 self._columns, 'date_delay'):
2022 _('Invalid Object Architecture!'),
2023 _("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % (self._name)))
2028 # if view_id, view_type is not required
2030 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
2032 Get the detailed composition of the requested view like fields, model, view architecture
2034 :param cr: database cursor
2035 :param user: current user id
2036 :param view_id: id of the view or None
2037 :param view_type: type of the view to return if view_id is None ('form', tree', ...)
2038 :param context: context arguments, like lang, time zone
2039 :param toolbar: true to include contextual actions
2040 :param submenu: deprecated
2041 :return: dictionary describing the composition of the requested view (including inherited views and extensions)
2042 :raise AttributeError:
2043 * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
2044 * if some tag other than 'position' is found in parent view
2045 :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
2052 if isinstance(s, unicode):
2053 return s.encode('utf8')
2056 def raise_view_error(error_msg, child_view_id):
2057 view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
2058 error_msg = error_msg % {'parent_xml_id': view.xml_id}
2059 raise AttributeError("View definition error for inherited view '%s' on model '%s': %s"
2060 % (child_view.xml_id, self._name, error_msg))
2062 def locate(source, spec):
2063 """ Locate a node in a source (parent) architecture.
2065 Given a complete source (parent) architecture (i.e. the field
2066 `arch` in a view), and a 'spec' node (a node in an inheriting
2067 view that specifies the location in the source view of what
2068 should be changed), return (if it exists) the node in the
2069 source view matching the specification.
2071 :param source: a parent architecture to modify
2072 :param spec: a modifying node in an inheriting view
2073 :return: a node in the source matching the spec
2076 if spec.tag == 'xpath':
2077 nodes = source.xpath(spec.get('expr'))
2078 return nodes[0] if nodes else None
2079 elif spec.tag == 'field':
2080 # Only compare the field name: a field can be only once in a given view
2081 # at a given level (and for multilevel expressions, we should use xpath
2082 # inheritance spec anyway).
2083 for node in source.getiterator('field'):
2084 if node.get('name') == spec.get('name'):
2088 for node in source.getiterator(spec.tag):
2089 if isinstance(node, SKIPPED_ELEMENT_TYPES):
2091 if all(node.get(attr) == spec.get(attr) \
2092 for attr in spec.attrib
2093 if attr not in ('position','version')):
2094 # Version spec should match parent's root element's version
2095 if spec.get('version') and spec.get('version') != source.get('version'):
2100 def apply_inheritance_specs(source, specs_arch, inherit_id=None):
2101 """ Apply an inheriting view.
2103 Apply to a source architecture all the spec nodes (i.e. nodes
2104 describing where and what changes to apply to some parent
2105 architecture) given by an inheriting view.
2107 :param source: a parent architecture to modify
2108 :param specs_arch: a modifying architecture in an inheriting view
2109 :param inherit_id: the database id of the inheriting view
2110 :return: a modified source where the specs are applied
2113 specs_tree = etree.fromstring(encode(specs_arch))
2114 # Queue of specification nodes (i.e. nodes describing where and
2115 # changes to apply to some parent architecture).
2116 specs = [specs_tree]
2120 if isinstance(spec, SKIPPED_ELEMENT_TYPES):
2122 if spec.tag == 'data':
2123 specs += [ c for c in specs_tree ]
2125 node = locate(source, spec)
2126 if node is not None:
2127 pos = spec.get('position', 'inside')
2128 if pos == 'replace':
2129 if node.getparent() is None:
2130 source = copy.deepcopy(spec[0])
2133 node.addprevious(child)
2134 node.getparent().remove(node)
2135 elif pos == 'attributes':
2136 for child in spec.getiterator('attribute'):
2137 attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
2139 node.set(attribute[0], attribute[1])
2141 del(node.attrib[attribute[0]])
2143 sib = node.getnext()
2147 elif pos == 'after':
2152 sib.addprevious(child)
2153 elif pos == 'before':
2154 node.addprevious(child)
2156 raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
2159 ' %s="%s"' % (attr, spec.get(attr))
2160 for attr in spec.attrib
2161 if attr != 'position'
2163 tag = "<%s%s>" % (spec.tag, attrs)
2164 if spec.get('version') and spec.get('version') != source.get('version'):
2165 raise_view_error("Mismatching view API version for element '%s': %r vs %r in parent view '%%(parent_xml_id)s'" % \
2166 (tag, spec.get('version'), source.get('version')), inherit_id)
2167 raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
2171 def apply_view_inheritance(cr, user, source, inherit_id):
2172 """ Apply all the (directly and indirectly) inheriting views.
2174 :param source: a parent architecture to modify (with parent
2175 modifications already applied)
2176 :param inherit_id: the database view_id of the parent view
2177 :return: a modified source where all the modifying architecture
2181 sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name)
2182 for (view_arch, view_id) in sql_inherit:
2183 source = apply_inheritance_specs(source, view_arch, view_id)
2184 source = apply_view_inheritance(cr, user, source, view_id)
2187 result = {'type': view_type, 'model': self._name}
2190 parent_view_model = None
2191 view_ref = context.get(view_type + '_view_ref')
2192 # Search for a root (i.e. without any parent) view.
2194 if view_ref and not view_id:
2196 module, view_ref = view_ref.split('.', 1)
2197 cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
2198 view_ref_res = cr.fetchone()
2200 view_id = view_ref_res[0]
2203 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2205 WHERE id=%s""", (view_id,))
2207 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2209 WHERE model=%s AND type=%s AND inherit_id IS NULL
2210 ORDER BY priority""", (self._name, view_type))
2211 sql_res = cr.dictfetchone()
2216 view_id = sql_res['inherit_id'] or sql_res['id']
2217 parent_view_model = sql_res['model']
2218 if not sql_res['inherit_id']:
2221 # if a view was found
2223 source = etree.fromstring(encode(sql_res['arch']))
2225 arch=apply_view_inheritance(cr, user, source, sql_res['id']),
2226 type=sql_res['type'],
2227 view_id=sql_res['id'],
2228 name=sql_res['name'],
2229 field_parent=sql_res['field_parent'] or False)
2231 # otherwise, build some kind of default view
2233 view = getattr(self, '_get_default_%s_view' % view_type)(
2235 except AttributeError:
2236 # what happens here, graph case?
2237 raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
2245 if parent_view_model != self._name:
2246 ctx = context.copy()
2247 ctx['base_model_name'] = parent_view_model
2250 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
2251 result['arch'] = xarch
2252 result['fields'] = xfields
2257 for key in ('report_sxw_content', 'report_rml_content',
2258 'report_sxw', 'report_rml',
2259 'report_sxw_content_data', 'report_rml_content_data'):
2263 ir_values_obj = self.pool.get('ir.values')
2264 resprint = ir_values_obj.get(cr, user, 'action',
2265 'client_print_multi', [(self._name, False)], False,
2267 resaction = ir_values_obj.get(cr, user, 'action',
2268 'client_action_multi', [(self._name, False)], False,
2271 resrelate = ir_values_obj.get(cr, user, 'action',
2272 'client_action_relate', [(self._name, False)], False,
2274 resaction = [clean(action) for action in resaction
2275 if view_type == 'tree' or not action[2].get('multi')]
2276 resprint = [clean(print_) for print_ in resprint
2277 if view_type == 'tree' or not print_[2].get('multi')]
2278 #When multi="True" set it will display only in More of the list view
2279 resrelate = [clean(action) for action in resrelate
2280 if (action[2].get('multi') and view_type == 'tree') or (not action[2].get('multi') and view_type == 'form')]
2282 for x in itertools.chain(resprint, resaction, resrelate):
2283 x['string'] = x['name']
2285 result['toolbar'] = {
2287 'action': resaction,
2292 _view_look_dom_arch = __view_look_dom_arch
2294 def search_count(self, cr, user, args, context=None):
2297 res = self.search(cr, user, args, context=context, count=True)
2298 if isinstance(res, list):
2302 def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
2304 Search for records based on a search domain.
2306 :param cr: database cursor
2307 :param user: current user id
2308 :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
2309 :param offset: optional number of results to skip in the returned values (default: 0)
2310 :param limit: optional max number of records to return (default: **None**)
2311 :param order: optional columns to sort by (default: self._order=id )
2312 :param context: optional context arguments, like lang, time zone
2313 :type context: dictionary
2314 :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
2315 :return: id or list of ids of records matching the criteria
2316 :rtype: integer or list of integers
2317 :raise AccessError: * if user tries to bypass access rules for read on the requested object.
2319 **Expressing a search domain (args)**
2321 Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
2323 * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
2324 * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
2325 The semantics of most of these operators are obvious.
2326 The ``child_of`` operator will look for records who are children or grand-children of a given record,
2327 according to the semantics of this model (i.e following the relationship field named by
2328 ``self._parent_name``, by default ``parent_id``.
2329 * **value** must be a valid value to compare with the values of **field_name**, depending on its type.
2331 Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
2332 These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
2333 Be very careful about this when you combine them the first time.
2335 Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
2337 [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
2339 The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
2341 (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
2344 return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
2346 def name_get(self, cr, user, ids, context=None):
2347 """Returns the preferred display value (text representation) for the records with the
2348 given ``ids``. By default this will be the value of the ``name`` column, unless
2349 the model implements a custom behavior.
2350 Can sometimes be seen as the inverse function of :meth:`~.name_search`, but it is not
2354 :return: list of pairs ``(id,text_repr)`` for all records with the given ``ids``.
2358 if isinstance(ids, (int, long)):
2361 if self._rec_name in self._all_columns:
2362 rec_name_column = self._all_columns[self._rec_name].column
2363 return [(r['id'], rec_name_column.as_display_name(cr, user, self, r[self._rec_name], context=context))
2364 for r in self.read(cr, user, ids, [self._rec_name],
2365 load='_classic_write', context=context)]
2366 return [(id, "%s,%s" % (self._name, id)) for id in ids]
2368 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
2369 """Search for records that have a display name matching the given ``name`` pattern if compared
2370 with the given ``operator``, while also matching the optional search domain (``args``).
2371 This is used for example to provide suggestions based on a partial value for a relational
2373 Sometimes be seen as the inverse function of :meth:`~.name_get`, but it is not
2376 This method is equivalent to calling :meth:`~.search` with a search domain based on ``name``
2377 and then :meth:`~.name_get` on the result of the search.
2379 :param list args: optional search domain (see :meth:`~.search` for syntax),
2380 specifying further restrictions
2381 :param str operator: domain operator for matching the ``name`` pattern, such as ``'like'``
2383 :param int limit: optional max number of records to return
2385 :return: list of pairs ``(id,text_repr)`` for all matching records.
2387 return self._name_search(cr, user, name, args, operator, context, limit)
2389 def name_create(self, cr, uid, name, context=None):
2390 """Creates a new record by calling :meth:`~.create` with only one
2391 value provided: the name of the new record (``_rec_name`` field).
2392 The new record will also be initialized with any default values applicable
2393 to this model, or provided through the context. The usual behavior of
2394 :meth:`~.create` applies.
2395 Similarly, this method may raise an exception if the model has multiple
2396 required fields and some do not have default values.
2398 :param name: name of the record to create
2401 :return: the :meth:`~.name_get` pair value for the newly-created record.
2403 rec_id = self.create(cr, uid, {self._rec_name: name}, context);
2404 return self.name_get(cr, uid, [rec_id], context)[0]
2406 # private implementation of name_search, allows passing a dedicated user for the name_get part to
2407 # solve some access rights issues
2408 def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
2414 # optimize out the default criterion of ``ilike ''`` that matches everything
2415 if not (name == '' and operator == 'ilike'):
2416 args += [(self._rec_name, operator, name)]
2417 access_rights_uid = name_get_uid or user
2418 ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
2419 res = self.name_get(cr, access_rights_uid, ids, context)
2422 def read_string(self, cr, uid, id, langs, fields=None, context=None):
2425 self.pool.get('ir.translation').check_access_rights(cr, uid, 'read')
2427 fields = self._columns.keys() + self._inherit_fields.keys()
2428 #FIXME: collect all calls to _get_source into one SQL call.
2430 res[lang] = {'code': lang}
2432 if f in self._columns:
2433 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
2435 res[lang][f] = res_trans
2437 res[lang][f] = self._columns[f].string
2438 for table in self._inherits:
2439 cols = intersect(self._inherit_fields.keys(), fields)
2440 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
2443 res[lang]['code'] = lang
2444 for f in res2[lang]:
2445 res[lang][f] = res2[lang][f]
2448 def write_string(self, cr, uid, id, langs, vals, context=None):
2449 self.pool.get('ir.translation').check_access_rights(cr, uid, 'write')
2450 #FIXME: try to only call the translation in one SQL
2453 if field in self._columns:
2454 src = self._columns[field].string
2455 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
2456 for table in self._inherits:
2457 cols = intersect(self._inherit_fields.keys(), vals)
2459 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
2462 def _add_missing_default_values(self, cr, uid, values, context=None):
2463 missing_defaults = []
2464 avoid_tables = [] # avoid overriding inherited values when parent is set
2465 for tables, parent_field in self._inherits.items():
2466 if parent_field in values:
2467 avoid_tables.append(tables)
2468 for field in self._columns.keys():
2469 if not field in values:
2470 missing_defaults.append(field)
2471 for field in self._inherit_fields.keys():
2472 if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
2473 missing_defaults.append(field)
2475 if len(missing_defaults):
2476 # override defaults with the provided values, never allow the other way around
2477 defaults = self.default_get(cr, uid, missing_defaults, context)
2479 if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
2480 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
2481 and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
2482 defaults[dv] = [(6, 0, defaults[dv])]
2483 if (dv in self._columns and self._columns[dv]._type == 'one2many' \
2484 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
2485 and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
2486 defaults[dv] = [(0, 0, x) for x in defaults[dv]]
2487 defaults.update(values)
2491 def clear_caches(self):
2492 """ Clear the caches
2494 This clears the caches associated to methods decorated with
2495 ``tools.ormcache`` or ``tools.ormcache_multi``.
2498 getattr(self, '_ormcache')
2500 except AttributeError:
2504 def _read_group_fill_results(self, cr, uid, domain, groupby, groupby_list, aggregated_fields,
2505 read_group_result, read_group_order=None, context=None):
2506 """Helper method for filling in empty groups for all possible values of
2507 the field being grouped by"""
2509 # self._group_by_full should map groupable fields to a method that returns
2510 # a list of all aggregated values that we want to display for this field,
2511 # in the form of a m2o-like pair (key,label).
2512 # This is useful to implement kanban views for instance, where all columns
2513 # should be displayed even if they don't contain any record.
2515 # Grab the list of all groups that should be displayed, including all present groups
2516 present_group_ids = [x[groupby][0] for x in read_group_result if x[groupby]]
2517 all_groups,folded = self._group_by_full[groupby](self, cr, uid, present_group_ids, domain,
2518 read_group_order=read_group_order,
2519 access_rights_uid=openerp.SUPERUSER_ID,
2522 result_template = dict.fromkeys(aggregated_fields, False)
2523 result_template[groupby + '_count'] = 0
2524 if groupby_list and len(groupby_list) > 1:
2525 result_template['__context'] = {'group_by': groupby_list[1:]}
2527 # Merge the left_side (current results as dicts) with the right_side (all
2528 # possible values as m2o pairs). Both lists are supposed to be using the
2529 # same ordering, and can be merged in one pass.
2532 def append_left(left_side):
2533 grouped_value = left_side[groupby] and left_side[groupby][0]
2534 if not grouped_value in known_values:
2535 result.append(left_side)
2536 known_values[grouped_value] = left_side
2538 count_attr = groupby + '_count'
2539 known_values[grouped_value].update({count_attr: left_side[count_attr]})
2540 def append_right(right_side):
2541 grouped_value = right_side[0]
2542 if not grouped_value in known_values:
2543 line = dict(result_template)
2544 line[groupby] = right_side
2545 line['__domain'] = [(groupby,'=',grouped_value)] + domain
2547 known_values[grouped_value] = line
2548 while read_group_result or all_groups:
2549 left_side = read_group_result[0] if read_group_result else None
2550 right_side = all_groups[0] if all_groups else None
2551 assert left_side is None or left_side[groupby] is False \
2552 or isinstance(left_side[groupby], (tuple,list)), \
2553 'M2O-like pair expected, got %r' % left_side[groupby]
2554 assert right_side is None or isinstance(right_side, (tuple,list)), \
2555 'M2O-like pair expected, got %r' % right_side
2556 if left_side is None:
2557 append_right(all_groups.pop(0))
2558 elif right_side is None:
2559 append_left(read_group_result.pop(0))
2560 elif left_side[groupby] == right_side:
2561 append_left(read_group_result.pop(0))
2562 all_groups.pop(0) # discard right_side
2563 elif not left_side[groupby] or not left_side[groupby][0]:
2564 # left side == "Undefined" entry, not present on right_side
2565 append_left(read_group_result.pop(0))
2567 append_right(all_groups.pop(0))
2571 r['__fold'] = folded.get(r[groupby] and r[groupby][0], False)
2574 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
2576 Get the list of records in list view grouped by the given ``groupby`` fields
2578 :param cr: database cursor
2579 :param uid: current user id
2580 :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
2581 :param list fields: list of fields present in the list view specified on the object
2582 :param list groupby: fields by which the records will be grouped
2583 :param int offset: optional number of records to skip
2584 :param int limit: optional max number of records to return
2585 :param dict context: context arguments, like lang, time zone
2586 :param list orderby: optional ``order by`` specification, for
2587 overriding the natural sort ordering of the
2588 groups, see also :py:meth:`~osv.osv.osv.search`
2589 (supported only for many2one fields currently)
2590 :return: list of dictionaries(one dictionary for each record) containing:
2592 * the values of fields grouped by the fields in ``groupby`` argument
2593 * __domain: list of tuples specifying the search criteria
2594 * __context: dictionary with argument like ``groupby``
2595 :rtype: [{'field_name_1': value, ...]
2596 :raise AccessError: * if user has no read rights on the requested object
2597 * if user tries to bypass access rules for read on the requested object
2600 context = context or {}
2601 self.check_access_rights(cr, uid, 'read')
2603 fields = self._columns.keys()
2605 query = self._where_calc(cr, uid, domain, context=context)
2606 self._apply_ir_rules(cr, uid, query, 'read', context=context)
2608 # Take care of adding join(s) if groupby is an '_inherits'ed field
2609 groupby_list = groupby
2610 qualified_groupby_field = groupby
2612 if isinstance(groupby, list):
2613 groupby = groupby[0]
2614 qualified_groupby_field = self._inherits_join_calc(groupby, query)
2617 assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
2618 groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
2619 assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
2621 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
2622 fget = self.fields_get(cr, uid, fields)
2624 group_count = group_by = groupby
2626 if fget.get(groupby):
2627 groupby_type = fget[groupby]['type']
2628 if groupby_type in ('date', 'datetime'):
2629 qualified_groupby_field = "to_char(%s,'yyyy-mm')" % qualified_groupby_field
2630 flist = "%s as %s " % (qualified_groupby_field, groupby)
2631 elif groupby_type == 'boolean':
2632 qualified_groupby_field = "coalesce(%s,false)" % qualified_groupby_field
2633 flist = "%s as %s " % (qualified_groupby_field, groupby)
2635 flist = qualified_groupby_field
2637 # Don't allow arbitrary values, as this would be a SQL injection vector!
2638 raise except_orm(_('Invalid group_by'),
2639 _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
2641 aggregated_fields = [
2643 if f not in ('id', 'sequence')
2644 if fget[f]['type'] in ('integer', 'float')
2645 if (f in self._columns and getattr(self._columns[f], '_classic_write'))]
2646 for f in aggregated_fields:
2647 group_operator = fget[f].get('group_operator', 'sum')
2650 qualified_field = '"%s"."%s"' % (self._table, f)
2651 flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
2653 gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
2655 from_clause, where_clause, where_clause_params = query.get_sql()
2656 where_clause = where_clause and ' WHERE ' + where_clause
2657 limit_str = limit and ' limit %d' % limit or ''
2658 offset_str = offset and ' offset %d' % offset or ''
2659 if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
2661 cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
2664 for r in cr.dictfetchall():
2665 for fld, val in r.items():
2666 if val == None: r[fld] = False
2667 alldata[r['id']] = r
2670 order = orderby or groupby
2671 data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=order, context=context)
2672 # the IDS of records that have groupby field value = False or '' should be sorted too
2673 data_ids += filter(lambda x:x not in data_ids, alldata.keys())
2674 data = self.read(cr, uid, data_ids, groupby and [groupby] or ['id'], context=context)
2675 # restore order of the search as read() uses the default _order (this is only for groups, so the size of data_read shoud be small):
2676 data.sort(lambda x,y: cmp(data_ids.index(x['id']), data_ids.index(y['id'])))
2680 d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
2681 if not isinstance(groupby_list, (str, unicode)):
2682 if groupby or not context.get('group_by_no_leaf', False):
2683 d['__context'] = {'group_by': groupby_list[1:]}
2684 if groupby and groupby in fget:
2685 if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
2686 dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7], '%Y-%m')
2687 days = calendar.monthrange(dt.year, dt.month)[1]
2689 date_value = datetime.datetime.strptime(d[groupby][:10], '%Y-%m-%d')
2690 d[groupby] = babel.dates.format_date(
2691 date_value, format='MMMM yyyy', locale=context.get('lang', 'en_US'))
2692 d['__domain'] = [(groupby, '>=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01', '%Y-%m-%d').strftime('%Y-%m-%d') or False),\
2693 (groupby, '<=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days), '%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
2694 del alldata[d['id']][groupby]
2695 d.update(alldata[d['id']])
2698 if groupby and groupby in self._group_by_full:
2699 data = self._read_group_fill_results(cr, uid, domain, groupby, groupby_list,
2700 aggregated_fields, data, read_group_order=order,
2705 def _inherits_join_add(self, current_table, parent_model_name, query):
2707 Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
2708 :param current_table: current model object
2709 :param parent_model_name: name of the parent model for which the clauses should be added
2710 :param query: query object on which the JOIN should be added
2712 inherits_field = current_table._inherits[parent_model_name]
2713 parent_model = self.pool.get(parent_model_name)
2714 parent_table_name = parent_model._table
2715 quoted_parent_table_name = '"%s"' % parent_table_name
2716 if quoted_parent_table_name not in query.tables:
2717 query.tables.append(quoted_parent_table_name)
2718 query.where_clause.append('(%s.%s = %s.id)' % (current_table._table, inherits_field, parent_table_name))
2722 def _inherits_join_calc(self, field, query):
2724 Adds missing table select and join clause(s) to ``query`` for reaching
2725 the field coming from an '_inherits' parent table (no duplicates).
2727 :param field: name of inherited field to reach
2728 :param query: query object on which the JOIN should be added
2729 :return: qualified name of field, to be used in SELECT clause
2731 current_table = self
2732 while field in current_table._inherit_fields and not field in current_table._columns:
2733 parent_model_name = current_table._inherit_fields[field][0]
2734 parent_table = self.pool.get(parent_model_name)
2735 self._inherits_join_add(current_table, parent_model_name, query)
2736 current_table = parent_table
2737 return '"%s".%s' % (current_table._table, field)
2739 def _parent_store_compute(self, cr):
2740 if not self._parent_store:
2742 _logger.info('Computing parent left and right for table %s...', self._table)
2743 def browse_rec(root, pos=0):
2745 where = self._parent_name+'='+str(root)
2747 where = self._parent_name+' IS NULL'
2748 if self._parent_order:
2749 where += ' order by '+self._parent_order
2750 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
2752 for id in cr.fetchall():
2753 pos2 = browse_rec(id[0], pos2)
2754 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
2756 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
2757 if self._parent_order:
2758 query += ' order by ' + self._parent_order
2761 for (root,) in cr.fetchall():
2762 pos = browse_rec(root, pos)
2765 def _update_store(self, cr, f, k):
2766 _logger.info("storing computed values of fields.function '%s'", k)
2767 ss = self._columns[k]._symbol_set
2768 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
2769 cr.execute('select id from '+self._table)
2770 ids_lst = map(lambda x: x[0], cr.fetchall())
2773 ids_lst = ids_lst[40:]
2774 res = f.get(cr, self, iids, k, SUPERUSER_ID, {})
2775 for key, val in res.items():
2778 # if val is a many2one, just write the ID
2779 if type(val) == tuple:
2781 if val is not False:
2782 cr.execute(update_query, (ss[1](val), key))
2784 def _check_selection_field_value(self, cr, uid, field, value, context=None):
2785 """Raise except_orm if value is not among the valid values for the selection field"""
2786 if self._columns[field]._type == 'reference':
2787 val_model, val_id_str = value.split(',', 1)
2790 val_id = long(val_id_str)
2794 raise except_orm(_('ValidateError'),
2795 _('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
2799 if isinstance(self._columns[field].selection, (tuple, list)):
2800 if val in dict(self._columns[field].selection):
2802 elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
2804 raise except_orm(_('ValidateError'),
2805 _('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._table, field))
2807 def _check_removed_columns(self, cr, log=False):
2808 # iterate on the database columns to drop the NOT NULL constraints
2809 # of fields which were required but have been removed (or will be added by another module)
2810 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
2811 columns += MAGIC_COLUMNS
2812 cr.execute("SELECT a.attname, a.attnotnull"
2813 " FROM pg_class c, pg_attribute a"
2814 " WHERE c.relname=%s"
2815 " AND c.oid=a.attrelid"
2816 " AND a.attisdropped=%s"
2817 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
2818 " AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
2820 for column in cr.dictfetchall():
2822 _logger.debug("column %s is in the table %s but not in the corresponding object %s",
2823 column['attname'], self._table, self._name)
2824 if column['attnotnull']:
2825 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
2826 _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2827 self._table, column['attname'])
2829 def _save_constraint(self, cr, constraint_name, type):
2831 Record the creation of a constraint for this model, to make it possible
2832 to delete it later when the module is uninstalled. Type can be either
2833 'f' or 'u' depending on the constraing being a foreign key or not.
2835 assert type in ('f', 'u')
2837 SELECT 1 FROM ir_model_constraint, ir_module_module
2838 WHERE ir_model_constraint.module=ir_module_module.id
2839 AND ir_model_constraint.name=%s
2840 AND ir_module_module.name=%s
2841 """, (constraint_name, self._module))
2844 INSERT INTO ir_model_constraint
2845 (name, date_init, date_update, module, model, type)
2846 VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
2847 (SELECT id FROM ir_module_module WHERE name=%s),
2848 (SELECT id FROM ir_model WHERE model=%s), %s)""",
2849 (constraint_name, self._module, self._name, type))
2851 def _save_relation_table(self, cr, relation_table):
2853 Record the creation of a many2many for this model, to make it possible
2854 to delete it later when the module is uninstalled.
2857 SELECT 1 FROM ir_model_relation, ir_module_module
2858 WHERE ir_model_relation.module=ir_module_module.id
2859 AND ir_model_relation.name=%s
2860 AND ir_module_module.name=%s
2861 """, (relation_table, self._module))
2863 cr.execute("""INSERT INTO ir_model_relation (name, date_init, date_update, module, model)
2864 VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
2865 (SELECT id FROM ir_module_module WHERE name=%s),
2866 (SELECT id FROM ir_model WHERE model=%s))""",
2867 (relation_table, self._module, self._name))
2869 # checked version: for direct m2o starting from `self`
2870 def _m2o_add_foreign_key_checked(self, source_field, dest_model, ondelete):
2871 assert self.is_transient() or not dest_model.is_transient(), \
2872 'Many2One relationships from non-transient Model to TransientModel are forbidden'
2873 if self.is_transient() and not dest_model.is_transient():
2874 # TransientModel relationships to regular Models are annoying
2875 # usually because they could block deletion due to the FKs.
2876 # So unless stated otherwise we default them to ondelete=cascade.
2877 ondelete = ondelete or 'cascade'
2878 self._foreign_keys.append((self._table, source_field, dest_model._table, ondelete or 'set null'))
2879 _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
2880 self._table, source_field, dest_model._table, ondelete)
2882 # unchecked version: for custom cases, such as m2m relationships
2883 def _m2o_add_foreign_key_unchecked(self, source_table, source_field, dest_model, ondelete):
2884 self._foreign_keys.append((source_table, source_field, dest_model._table, ondelete or 'set null'))
2885 _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s",
2886 source_table, source_field, dest_model._table, ondelete)
2888 def _drop_constraint(self, cr, source_table, constraint_name):
2889 cr.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (source_table,constraint_name))
2891 def _m2o_fix_foreign_key(self, cr, source_table, source_field, dest_model, ondelete):
2892 # Find FK constraint(s) currently established for the m2o field,
2893 # and see whether they are stale or not
2894 cr.execute("""SELECT confdeltype as ondelete_rule, conname as constraint_name,
2895 cl2.relname as foreign_table
2896 FROM pg_constraint as con, pg_class as cl1, pg_class as cl2,
2897 pg_attribute as att1, pg_attribute as att2
2898 WHERE con.conrelid = cl1.oid
2899 AND cl1.relname = %s
2900 AND con.confrelid = cl2.oid
2901 AND array_lower(con.conkey, 1) = 1
2902 AND con.conkey[1] = att1.attnum
2903 AND att1.attrelid = cl1.oid
2904 AND att1.attname = %s
2905 AND array_lower(con.confkey, 1) = 1
2906 AND con.confkey[1] = att2.attnum
2907 AND att2.attrelid = cl2.oid
2908 AND att2.attname = %s
2909 AND con.contype = 'f'""", (source_table, source_field, 'id'))
2910 constraints = cr.dictfetchall()
2912 if len(constraints) == 1:
2913 # Is it the right constraint?
2915 if cons['ondelete_rule'] != POSTGRES_CONFDELTYPES.get((ondelete or 'set null').upper(), 'a')\
2916 or cons['foreign_table'] != dest_model._table:
2917 _schema.debug("Table '%s': dropping obsolete FK constraint: '%s'",
2918 source_table, cons['constraint_name'])
2919 self._drop_constraint(cr, source_table, cons['constraint_name'])
2920 self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete)
2921 # else it's all good, nothing to do!
2923 # Multiple FKs found for the same field, drop them all, and re-create
2924 for cons in constraints:
2925 _schema.debug("Table '%s': dropping duplicate FK constraints: '%s'",
2926 source_table, cons['constraint_name'])
2927 self._drop_constraint(cr, source_table, cons['constraint_name'])
2928 self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete)
2932 def _auto_init(self, cr, context=None):
2935 Call _field_create and, unless _auto is False:
2937 - create the corresponding table in database for the model,
2938 - possibly add the parent columns in database,
2939 - possibly add the columns 'create_uid', 'create_date', 'write_uid',
2940 'write_date' in database if _log_access is True (the default),
2941 - report on database columns no more existing in _columns,
2942 - remove no more existing not null constraints,
2943 - alter existing database columns to match _columns,
2944 - create database tables to match _columns,
2945 - add database indices to match _columns,
2946 - save in self._foreign_keys a list a foreign keys to create (see
2950 self._foreign_keys = []
2951 raise_on_invalid_object_name(self._name)
2954 store_compute = False
2956 update_custom_fields = context.get('update_custom_fields', False)
2957 self._field_create(cr, context=context)
2958 create = not self._table_exist(cr)
2959 if getattr(self, '_auto', True):
2962 self._create_table(cr)
2965 if self._parent_store:
2966 if not self._parent_columns_exist(cr):
2967 self._create_parent_columns(cr)
2968 store_compute = True
2970 # Create the create_uid, create_date, write_uid, write_date, columns if desired.
2971 if self._log_access:
2972 self._add_log_columns(cr)
2974 self._check_removed_columns(cr, log=False)
2976 # iterate on the "object columns"
2977 column_data = self._select_column_data(cr)
2979 for k, f in self._columns.iteritems():
2980 if k in MAGIC_COLUMNS:
2982 # Don't update custom (also called manual) fields
2983 if f.manual and not update_custom_fields:
2986 if isinstance(f, fields.one2many):
2987 self._o2m_raise_on_missing_reference(cr, f)
2989 elif isinstance(f, fields.many2many):
2990 self._m2m_raise_or_create_relation(cr, f)
2993 res = column_data.get(k)
2995 # The field is not found as-is in database, try if it
2996 # exists with an old name.
2997 if not res and hasattr(f, 'oldname'):
2998 res = column_data.get(f.oldname)
3000 cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
3002 column_data[k] = res
3003 _schema.debug("Table '%s': renamed column '%s' to '%s'",
3004 self._table, f.oldname, k)
3006 # The field already exists in database. Possibly
3007 # change its type, rename it, drop it or change its
3010 f_pg_type = res['typname']
3011 f_pg_size = res['size']
3012 f_pg_notnull = res['attnotnull']
3013 if isinstance(f, fields.function) and not f.store and\
3014 not getattr(f, 'nodrop', False):
3015 _logger.info('column %s (%s) in table %s removed: converted to a function !\n',
3016 k, f.string, self._table)
3017 cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
3019 _schema.debug("Table '%s': dropped column '%s' with cascade",
3023 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
3028 ('text', 'char', pg_varchar(f.size), '::%s' % pg_varchar(f.size)),
3029 ('varchar', 'text', 'TEXT', ''),
3030 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3031 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
3032 ('timestamp', 'date', 'date', '::date'),
3033 ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3034 ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3036 if f_pg_type == 'varchar' and f._type == 'char' and ((f.size is None and f_pg_size) or f_pg_size < f.size):
3037 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
3038 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, pg_varchar(f.size)))
3039 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::%s' % (self._table, k, pg_varchar(f.size)))
3040 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
3042 _schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
3043 self._table, k, f_pg_size or 'unlimited', f.size or 'unlimited')
3045 if (f_pg_type==c[0]) and (f._type==c[1]):
3046 if f_pg_type != f_obj_type:
3048 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
3049 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
3050 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
3051 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
3053 _schema.debug("Table '%s': column '%s' changed type from %s to %s",
3054 self._table, k, c[0], c[1])
3057 if f_pg_type != f_obj_type:
3061 newname = k + '_moved' + str(i)
3062 cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
3063 "WHERE c.relname=%s " \
3064 "AND a.attname=%s " \
3065 "AND c.oid=a.attrelid ", (self._table, newname))
3066 if not cr.fetchone()[0]:
3070 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
3071 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
3072 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
3073 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
3074 _schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
3075 self._table, k, f_pg_type, f._type, newname)
3077 # if the field is required and hasn't got a NOT NULL constraint
3078 if f.required and f_pg_notnull == 0:
3079 # set the field to the default value if any
3080 if k in self._defaults:
3081 if callable(self._defaults[k]):
3082 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
3084 default = self._defaults[k]
3086 if (default is not None):
3087 ss = self._columns[k]._symbol_set
3088 query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
3089 cr.execute(query, (ss[1](default),))
3090 # add the NOT NULL constraint
3093 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
3095 _schema.debug("Table '%s': column '%s': added NOT NULL constraint",
3098 msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
3099 "If you want to have it, you should update the records and execute manually:\n"\
3100 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
3101 _schema.warning(msg, self._table, k, self._table, k)
3103 elif not f.required and f_pg_notnull == 1:
3104 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
3106 _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
3109 indexname = '%s_%s_index' % (self._table, k)
3110 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
3111 res2 = cr.dictfetchall()
3112 if not res2 and f.select:
3113 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
3115 if f._type == 'text':
3116 # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
3117 msg = "Table '%s': Adding (b-tree) index for %s column '%s'."\
3118 "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
3119 " because there is a length limit for indexable btree values!\n"\
3120 "Use a search view instead if you simply want to make the field searchable."
3121 _schema.warning(msg, self._table, f._type, k)
3122 if res2 and not f.select:
3123 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
3125 msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
3126 _schema.debug(msg, self._table, k, f._type)
3128 if isinstance(f, fields.many2one):
3129 dest_model = self.pool.get(f._obj)
3130 if dest_model._table != 'ir_actions':
3131 self._m2o_fix_foreign_key(cr, self._table, k, dest_model, f.ondelete)
3133 # The field doesn't exist in database. Create it if necessary.
3135 if not isinstance(f, fields.function) or f.store:
3136 # add the missing field
3137 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
3138 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
3139 _schema.debug("Table '%s': added column '%s' with definition=%s",
3140 self._table, k, get_pg_type(f)[1])
3143 if not create and k in self._defaults:
3144 if callable(self._defaults[k]):
3145 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
3147 default = self._defaults[k]
3149 ss = self._columns[k]._symbol_set
3150 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
3151 cr.execute(query, (ss[1](default),))
3153 _logger.debug("Table '%s': setting default value of new column %s", self._table, k)
3155 # remember the functions to call for the stored fields
3156 if isinstance(f, fields.function):
3158 if f.store is not True: # i.e. if f.store is a dict
3159 order = f.store[f.store.keys()[0]][2]
3160 todo_end.append((order, self._update_store, (f, k)))
3162 # and add constraints if needed
3163 if isinstance(f, fields.many2one):
3164 if not self.pool.get(f._obj):
3165 raise except_orm('Programming Error', ('There is no reference available for %s') % (f._obj,))
3166 dest_model = self.pool.get(f._obj)
3167 ref = dest_model._table
3168 # ir_actions is inherited so foreign key doesn't work on it
3169 if ref != 'ir_actions':
3170 self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete)
3172 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
3176 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
3177 _schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
3180 msg = "WARNING: unable to set column %s of table %s not null !\n"\
3181 "Try to re-run: openerp-server --update=module\n"\
3182 "If it doesn't work, update records and execute manually:\n"\
3183 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
3184 _logger.warning(msg, k, self._table, self._table, k)
3188 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
3189 create = not bool(cr.fetchone())
3191 cr.commit() # start a new transaction
3193 self._add_sql_constraints(cr)
3196 self._execute_sql(cr)
3199 self._parent_store_compute(cr)
3204 def _auto_end(self, cr, context=None):
3205 """ Create the foreign keys recorded by _auto_init. """
3206 for t, k, r, d in self._foreign_keys:
3207 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
3208 self._save_constraint(cr, "%s_%s_fkey" % (t, k), 'f')
3210 del self._foreign_keys
3213 def _table_exist(self, cr):
3214 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
3218 def _create_table(self, cr):
3219 cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id)) WITHOUT OIDS' % (self._table,))
3220 cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,))
3221 _schema.debug("Table '%s': created", self._table)
3224 def _parent_columns_exist(self, cr):
3225 cr.execute("""SELECT c.relname
3226 FROM pg_class c, pg_attribute a
3227 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
3228 """, (self._table, 'parent_left'))
3232 def _create_parent_columns(self, cr):
3233 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
3234 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
3235 if 'parent_left' not in self._columns:
3236 _logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
3238 _schema.debug("Table '%s': added column '%s' with definition=%s",
3239 self._table, 'parent_left', 'INTEGER')
3240 elif not self._columns['parent_left'].select:
3241 _logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
3243 if 'parent_right' not in self._columns:
3244 _logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
3246 _schema.debug("Table '%s': added column '%s' with definition=%s",
3247 self._table, 'parent_right', 'INTEGER')
3248 elif not self._columns['parent_right'].select:
3249 _logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
3251 if self._columns[self._parent_name].ondelete != 'cascade':
3252 _logger.error("The column %s on object %s must be set as ondelete='cascade'",
3253 self._parent_name, self._name)
3258 def _add_log_columns(self, cr):
3259 for field, field_def in LOG_ACCESS_COLUMNS.iteritems():
3262 FROM pg_class c, pg_attribute a
3263 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
3264 """, (self._table, field))
3266 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, field, field_def))
3268 _schema.debug("Table '%s': added column '%s' with definition=%s",
3269 self._table, field, field_def)
3272 def _select_column_data(self, cr):
3273 # attlen is the number of bytes necessary to represent the type when
3274 # the type has a fixed size. If the type has a varying size attlen is
3275 # -1 and atttypmod is the size limit + 4, or -1 if there is no limit.
3276 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN (CASE WHEN a.atttypmod=-1 THEN 0 ELSE a.atttypmod-4 END) ELSE a.attlen END as size " \
3277 "FROM pg_class c,pg_attribute a,pg_type t " \
3278 "WHERE c.relname=%s " \
3279 "AND c.oid=a.attrelid " \
3280 "AND a.atttypid=t.oid", (self._table,))
3281 return dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
3284 def _o2m_raise_on_missing_reference(self, cr, f):
3285 # TODO this check should be a method on fields.one2many.
3287 other = self.pool.get(f._obj)
3289 # TODO the condition could use fields_get_keys().
3290 if f._fields_id not in other._columns.keys():
3291 if f._fields_id not in other._inherit_fields.keys():
3292 raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id, f._obj,))
3294 def _m2m_raise_or_create_relation(self, cr, f):
3295 m2m_tbl, col1, col2 = f._sql_names(self)
3296 self._save_relation_table(cr, m2m_tbl)
3297 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (m2m_tbl,))
3298 if not cr.dictfetchall():
3299 if not self.pool.get(f._obj):
3300 raise except_orm('Programming Error', ('Many2Many destination model does not exist: `%s`') % (f._obj,))
3301 dest_model = self.pool.get(f._obj)
3302 ref = dest_model._table
3303 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s")) WITH OIDS' % (m2m_tbl, col1, col2, col1, col2))
3304 # create foreign key references with ondelete=cascade, unless the targets are SQL views
3305 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (ref,))
3306 if not cr.fetchall():
3307 self._m2o_add_foreign_key_unchecked(m2m_tbl, col2, dest_model, 'cascade')
3308 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (self._table,))
3309 if not cr.fetchall():
3310 self._m2o_add_foreign_key_unchecked(m2m_tbl, col1, self, 'cascade')
3312 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col1, m2m_tbl, col1))
3313 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col2, m2m_tbl, col2))
3314 cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (m2m_tbl, self._table, ref))
3316 _schema.debug("Create table '%s': m2m relation between '%s' and '%s'", m2m_tbl, self._table, ref)
3319 def _add_sql_constraints(self, cr):
3322 Modify this model's database table constraints so they match the one in
3326 def unify_cons_text(txt):
3327 return txt.lower().replace(', ',',').replace(' (','(')
3329 for (key, con, _) in self._sql_constraints:
3330 conname = '%s_%s' % (self._table, key)
3332 self._save_constraint(cr, conname, 'u')
3333 cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
3334 existing_constraints = cr.dictfetchall()
3338 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
3339 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
3340 self._table, conname, con),
3341 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
3346 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
3347 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
3348 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
3354 if not existing_constraints:
3355 # constraint does not exists:
3356 sql_actions['add']['execute'] = True
3357 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3358 elif unify_cons_text(con) not in [unify_cons_text(item['condef']) for item in existing_constraints]:
3359 # constraint exists but its definition has changed:
3360 sql_actions['drop']['execute'] = True
3361 sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
3362 sql_actions['add']['execute'] = True
3363 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3365 # we need to add the constraint:
3366 sql_actions = [item for item in sql_actions.values()]
3367 sql_actions.sort(key=lambda x: x['order'])
3368 for sql_action in [action for action in sql_actions if action['execute']]:
3370 cr.execute(sql_action['query'])
3372 _schema.debug(sql_action['msg_ok'])
3374 _schema.warning(sql_action['msg_err'])
3378 def _execute_sql(self, cr):
3379 """ Execute the SQL code from the _sql attribute (if any)."""
3380 if hasattr(self, "_sql"):
3381 for line in self._sql.split(';'):
3382 line2 = line.replace('\n', '').strip()
3388 # Update objects that uses this one to update their _inherits fields
3391 def _inherits_reload_src(self):
3392 """ Recompute the _inherit_fields mapping on each _inherits'd child model."""
3393 for obj in self.pool.models.values():
3394 if self._name in obj._inherits:
3395 obj._inherits_reload()
3398 def _inherits_reload(self):
3399 """ Recompute the _inherit_fields mapping.
3401 This will also call itself on each inherits'd child model.
3405 for table in self._inherits:
3406 other = self.pool.get(table)
3407 for col in other._columns.keys():
3408 res[col] = (table, self._inherits[table], other._columns[col], table)
3409 for col in other._inherit_fields.keys():
3410 res[col] = (table, self._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
3411 self._inherit_fields = res
3412 self._all_columns = self._get_column_infos()
3413 self._inherits_reload_src()
3416 def _get_column_infos(self):
3417 """Returns a dict mapping all fields names (direct fields and
3418 inherited field via _inherits) to a ``column_info`` struct
3419 giving detailed columns """
3421 for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems():
3422 result[k] = fields.column_info(k, col, parent, m2o, original_parent)
3423 for k, col in self._columns.iteritems():
3424 result[k] = fields.column_info(k, col)
3428 def _inherits_check(self):
3429 for table, field_name in self._inherits.items():
3430 if field_name not in self._columns:
3431 _logger.info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.', field_name, self._name)
3432 self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
3433 required=True, ondelete="cascade")
3434 elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() != "cascade":
3435 _logger.warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade", forcing it.', field_name, self._name)
3436 self._columns[field_name].required = True
3437 self._columns[field_name].ondelete = "cascade"
3439 #def __getattr__(self, name):
3441 # Proxies attribute accesses to the `inherits` parent so we can call methods defined on the inherited parent
3442 # (though inherits doesn't use Python inheritance).
3443 # Handles translating between local ids and remote ids.
3444 # Known issue: doesn't work correctly when using python's own super(), don't involve inherit-based inheritance
3445 # when you have inherits.
3447 # for model, field in self._inherits.iteritems():
3448 # proxy = self.pool.get(model)
3449 # if hasattr(proxy, name):
3450 # attribute = getattr(proxy, name)
3451 # if not hasattr(attribute, '__call__'):
3455 # return super(orm, self).__getattr__(name)
3457 # def _proxy(cr, uid, ids, *args, **kwargs):
3458 # objects = self.browse(cr, uid, ids, kwargs.get('context', None))
3459 # lst = [obj[field].id for obj in objects if obj[field]]
3460 # return getattr(proxy, name)(cr, uid, lst, *args, **kwargs)
3465 def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
3466 """ Return the definition of each field.
3468 The returned value is a dictionary (indiced by field name) of
3469 dictionaries. The _inherits'd fields are included. The string, help,
3470 and selection (if present) attributes are translated.
3472 :param cr: database cursor
3473 :param user: current user id
3474 :param fields: list of fields
3475 :param context: context arguments, like lang, time zone
3476 :return: dictionary of field dictionaries, each one describing a field of the business object
3477 :raise AccessError: * if user has no create/write rights on the requested object
3483 write_access = self.check_access_rights(cr, user, 'write', raise_exception=False) \
3484 or self.check_access_rights(cr, user, 'create', raise_exception=False)
3488 translation_obj = self.pool.get('ir.translation')
3489 for parent in self._inherits:
3490 res.update(self.pool.get(parent).fields_get(cr, user, allfields, context))
3492 for f, field in self._columns.iteritems():
3493 if (allfields and f not in allfields) or \
3494 (field.groups and not self.user_has_groups(cr, user, groups=field.groups, context=context)):
3497 res[f] = fields.field_to_dict(self, cr, user, field, context=context)
3499 if not write_access:
3500 res[f]['readonly'] = True
3501 res[f]['states'] = {}
3503 if 'lang' in context:
3504 if 'string' in res[f]:
3505 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context['lang'])
3507 res[f]['string'] = res_trans
3508 if 'help' in res[f]:
3509 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context['lang'])
3511 res[f]['help'] = help_trans
3512 if 'selection' in res[f]:
3513 if isinstance(field.selection, (tuple, list)):
3514 sel = field.selection
3516 for key, val in sel:
3519 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context['lang'], val)
3520 sel2.append((key, val2 or val))
3521 res[f]['selection'] = sel2
3525 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
3526 """ Read records with given ids with the given fields
3528 :param cr: database cursor
3529 :param user: current user id
3530 :param ids: id or list of the ids of the records to read
3531 :param fields: optional list of field names to return (default: all fields would be returned)
3532 :type fields: list (example ['field_name_1', ...])
3533 :param context: optional context dictionary - it may contains keys for specifying certain options
3534 like ``context_lang``, ``context_tz`` to alter the results of the call.
3535 A special ``bin_size`` boolean flag may also be passed in the context to request the
3536 value of all fields.binary columns to be returned as the size of the binary instead of its
3537 contents. This can also be selectively overriden by passing a field-specific flag
3538 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
3539 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
3540 :return: list of dictionaries((dictionary per record asked)) with requested field values
3541 :rtype: [{‘name_of_the_field’: value, ...}, ...]
3542 :raise AccessError: * if user has no read rights on the requested object
3543 * if user tries to bypass access rules for read on the requested object
3549 self.check_access_rights(cr, user, 'read')
3551 fields = list(set(self._columns.keys() + self._inherit_fields.keys()))
3552 if isinstance(ids, (int, long)):
3556 select = map(lambda x: isinstance(x, dict) and x['id'] or x, select)
3557 result = self._read_flat(cr, user, select, fields, context, load)
3560 for key, v in r.items():
3564 if isinstance(ids, (int, long, dict)):
3565 return result and result[0] or False
3568 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
3573 if fields_to_read == None:
3574 fields_to_read = self._columns.keys()
3576 # Construct a clause for the security rules.
3577 # 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses,
3578 # or will at least contain self._table.
3579 rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
3581 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
3582 fields_pre = [f for f in fields_to_read if
3583 f == self.CONCURRENCY_CHECK_FIELD
3584 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
3585 ] + self._inherits.values()
3589 def convert_field(f):
3590 f_qual = '%s."%s"' % (self._table, f) # need fully-qualified references in case len(tables) > 1
3591 if f in ('create_date', 'write_date'):
3592 return "date_trunc('second', %s) as %s" % (f_qual, f)
3593 if f == self.CONCURRENCY_CHECK_FIELD:
3594 if self._log_access:
3595 return "COALESCE(%s.write_date, %s.create_date, (now() at time zone 'UTC'))::timestamp AS %s" % (self._table, self._table, f,)
3596 return "(now() at time zone 'UTC')::timestamp AS %s" % (f,)
3597 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
3598 return 'length(%s) as "%s"' % (f_qual, f)
3601 fields_pre2 = map(convert_field, fields_pre)
3602 order_by = self._parent_order or self._order
3603 select_fields = ','.join(fields_pre2 + [self._table + '.id'])
3604 query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table)
3606 query += " AND " + (' OR '.join(rule_clause))
3607 query += " ORDER BY " + order_by
3608 for sub_ids in cr.split_for_in_conditions(ids):
3610 cr.execute(query, [tuple(sub_ids)] + rule_params)
3611 self._check_record_rules_result_count(cr, user, sub_ids, 'read', context=context)
3613 cr.execute(query, (tuple(sub_ids),))
3614 res.extend(cr.dictfetchall())
3616 res = map(lambda x: {'id': x}, ids)
3618 for f in fields_pre:
3619 if f == self.CONCURRENCY_CHECK_FIELD:
3621 if self._columns[f].translate:
3622 ids = [x['id'] for x in res]
3623 #TODO: optimize out of this loop
3624 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context.get('lang', False) or 'en_US', ids)
3626 r[f] = res_trans.get(r['id'], False) or r[f]
3628 for table in self._inherits:
3629 col = self._inherits[table]
3630 cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()]
3633 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
3641 if not record[col]: # if the record is deleted from _inherits table?
3643 record.update(res3[record[col]])
3644 if col not in fields_to_read:
3647 # all fields which need to be post-processed by a simple function (symbol_get)
3648 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
3651 for f in fields_post:
3652 r[f] = self._columns[f]._symbol_get(r[f])
3653 ids = [x['id'] for x in res]
3655 # all non inherited fields for which the attribute whose name is in load is False
3656 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
3658 # Compute POST fields
3660 for f in fields_post:
3661 todo.setdefault(self._columns[f]._multi, [])
3662 todo[self._columns[f]._multi].append(f)
3663 for key, val in todo.items():
3665 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
3666 assert res2 is not None, \
3667 'The function field "%s" on the "%s" model returned None\n' \
3668 '(a dictionary was expected).' % (val[0], self._name)
3671 if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
3672 multi_fields = res2.get(record['id'],{})
3674 record[pos] = multi_fields.get(pos,[])
3677 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
3680 record[f] = res2[record['id']]
3684 # Warn about deprecated fields now that fields_pre and fields_post are computed
3685 # Explicitly use list() because we may receive tuples
3686 for f in list(fields_pre) + list(fields_post):
3687 field_column = self._all_columns.get(f) and self._all_columns.get(f).column
3688 if field_column and field_column.deprecated:
3689 _logger.warning('Field %s.%s is deprecated: %s', self._name, f, field_column.deprecated)
3693 for field in vals.copy():
3695 if field in self._columns:
3696 fobj = self._columns[field]
3703 for group in groups:
3704 module = group.split(".")[0]
3705 grp = group.split(".")[1]
3706 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3707 (grp, module, 'res.groups', user))
3708 readonly = cr.fetchall()
3709 if readonly[0][0] >= 1:
3712 elif readonly[0][0] == 0:
3718 if type(vals[field]) == type([]):
3720 elif type(vals[field]) == type(0.0):
3722 elif type(vals[field]) == type(''):
3723 vals[field] = '=No Permission='
3728 # TODO check READ access
3729 def perm_read(self, cr, user, ids, context=None, details=True):
3731 Returns some metadata about the given records.
3733 :param details: if True, \*_uid fields are replaced with the name of the user
3734 :return: list of ownership dictionaries for each requested record
3735 :rtype: list of dictionaries with the following keys:
3738 * create_uid: user who created the record
3739 * create_date: date when the record was created
3740 * write_uid: last user who changed the record
3741 * write_date: date of the last change to the record
3742 * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
3749 uniq = isinstance(ids, (int, long))
3753 if self._log_access:
3754 fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
3755 quoted_table = '"%s"' % self._table
3756 fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields)
3757 query = '''SELECT %s, __imd.module, __imd.name
3758 FROM %s LEFT JOIN ir_model_data __imd
3759 ON (__imd.model = %%s and __imd.res_id = %s.id)
3760 WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
3761 cr.execute(query, (self._name, tuple(ids)))
3762 res = cr.dictfetchall()
3765 r[key] = r[key] or False
3766 if details and key in ('write_uid', 'create_uid') and r[key]:
3768 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
3770 pass # Leave the numeric uid there
3771 r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
3772 del r['name'], r['module']
3777 def _check_concurrency(self, cr, ids, context):
3780 if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
3782 check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp)"
3783 for sub_ids in cr.split_for_in_conditions(ids):
3786 id_ref = "%s,%s" % (self._name, id)
3787 update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
3789 ids_to_check.extend([id, update_date])
3790 if not ids_to_check:
3792 cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
3795 # mention the first one only to keep the error message readable
3796 raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
3798 def _check_record_rules_result_count(self, cr, uid, ids, operation, context=None):
3799 """Verify that number of returned rows after applying record rules matches
3800 the length of `ids`, and raise an appropriate exception if it does not.
3802 if cr.rowcount != len(ids):
3803 # Attempt to distinguish record rule restriction vs deleted records,
3804 # to provide a more specific error message
3805 cr.execute('SELECT id FROM ' + self._table + ' WHERE id IN %s', (tuple(ids),))
3806 if cr.rowcount != len(ids):
3807 if operation == 'unlink':
3808 # no need to warn about deleting an already deleted record!
3810 _logger.warning('Failed operation on deleted record(s): %s, uid: %s, model: %s', operation, uid, self._name)
3811 raise except_orm(_('Missing document(s)'),
3812 _('One of the documents you are trying to access has been deleted, please try again after refreshing.'))
3813 _logger.warning('Access Denied by record rules for operation: %s, uid: %s, model: %s', operation, uid, self._name)
3814 raise except_orm(_('Access Denied'),
3815 _('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
3816 (self._description, operation))
3818 def check_access_rights(self, cr, uid, operation, raise_exception=True): # no context on purpose.
3819 """Verifies that the operation given by ``operation`` is allowed for the user
3820 according to the access rights."""
3821 return self.pool.get('ir.model.access').check(cr, uid, self._name, operation, raise_exception)
3823 def check_access_rule(self, cr, uid, ids, operation, context=None):
3824 """Verifies that the operation given by ``operation`` is allowed for the user
3825 according to ir.rules.
3827 :param operation: one of ``write``, ``unlink``
3828 :raise except_orm: * if current ir.rules do not permit this operation.
3829 :return: None if the operation is allowed
3831 if uid == SUPERUSER_ID:
3834 if self.is_transient():
3835 # Only one single implicit access rule for transient models: owner only!
3836 # This is ok to hardcode because we assert that TransientModels always
3837 # have log_access enabled so that the create_uid column is always there.
3838 # And even with _inherits, these fields are always present in the local
3839 # table too, so no need for JOINs.
3840 cr.execute("""SELECT distinct create_uid
3842 WHERE id IN %%s""" % self._table, (tuple(ids),))
3843 uids = [x[0] for x in cr.fetchall()]
3844 if len(uids) != 1 or uids[0] != uid:
3845 raise except_orm(_('Access Denied'),
3846 _('For this kind of document, you may only access records you created yourself.\n\n(Document type: %s)') % (self._description,))
3848 where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
3850 where_clause = ' and ' + ' and '.join(where_clause)
3851 for sub_ids in cr.split_for_in_conditions(ids):
3852 cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
3853 ' WHERE ' + self._table + '.id IN %s' + where_clause,
3854 [sub_ids] + where_params)
3855 self._check_record_rules_result_count(cr, uid, sub_ids, operation, context=context)
3857 def _workflow_trigger(self, cr, uid, ids, trigger, context=None):
3858 """Call given workflow trigger as a result of a CRUD operation"""
3859 wf_service = netsvc.LocalService("workflow")
3861 getattr(wf_service, trigger)(uid, self._name, res_id, cr)
3863 def _workflow_signal(self, cr, uid, ids, signal, context=None):
3864 """Send given workflow signal"""
3865 wf_service = netsvc.LocalService("workflow")
3867 wf_service.trg_validate(uid, self._name, res_id, signal, cr)
3869 def unlink(self, cr, uid, ids, context=None):
3871 Delete records with given ids
3873 :param cr: database cursor
3874 :param uid: current user id
3875 :param ids: id or list of ids
3876 :param context: (optional) context arguments, like lang, time zone
3878 :raise AccessError: * if user has no unlink rights on the requested object
3879 * if user tries to bypass access rules for unlink on the requested object
3880 :raise UserError: if the record is default property for other records
3885 if isinstance(ids, (int, long)):
3888 result_store = self._store_get_values(cr, uid, ids, None, context)
3890 self._check_concurrency(cr, ids, context)
3892 self.check_access_rights(cr, uid, 'unlink')
3894 ir_property = self.pool.get('ir.property')
3896 # Check if the records are used as default properties.
3897 domain = [('res_id', '=', False),
3898 ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
3900 if ir_property.search(cr, uid, domain, context=context):
3901 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
3903 # Delete the records' properties.
3904 property_ids = ir_property.search(cr, uid, [('res_id', 'in', ['%s,%s' % (self._name, i) for i in ids])], context=context)
3905 ir_property.unlink(cr, uid, property_ids, context=context)
3907 self._workflow_trigger(cr, uid, ids, 'trg_delete', context=context)
3909 self.check_access_rule(cr, uid, ids, 'unlink', context=context)
3910 pool_model_data = self.pool.get('ir.model.data')
3911 ir_values_obj = self.pool.get('ir.values')
3912 for sub_ids in cr.split_for_in_conditions(ids):
3913 cr.execute('delete from ' + self._table + ' ' \
3914 'where id IN %s', (sub_ids,))
3916 # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
3917 # as these are not connected with real database foreign keys, and would be dangling references.
3918 # Note: following steps performed as admin to avoid access rights restrictions, and with no context
3919 # to avoid possible side-effects during admin calls.
3920 # Step 1. Calling unlink of ir_model_data only for the affected IDS
3921 reference_ids = pool_model_data.search(cr, SUPERUSER_ID, [('res_id','in',list(sub_ids)),('model','=',self._name)])
3922 # Step 2. Marching towards the real deletion of referenced records
3924 pool_model_data.unlink(cr, SUPERUSER_ID, reference_ids)
3926 # For the same reason, removing the record relevant to ir_values
3927 ir_value_ids = ir_values_obj.search(cr, uid,
3928 ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
3931 ir_values_obj.unlink(cr, uid, ir_value_ids, context=context)
3933 for order, object, store_ids, fields in result_store:
3934 if object != self._name:
3935 obj = self.pool.get(object)
3936 cr.execute('select id from '+obj._table+' where id IN %s', (tuple(store_ids),))
3937 rids = map(lambda x: x[0], cr.fetchall())
3939 obj._store_set_values(cr, uid, rids, fields, context)
3946 def write(self, cr, user, ids, vals, context=None):
3948 Update records with given ids with the given field values
3950 :param cr: database cursor
3951 :param user: current user id
3953 :param ids: object id or list of object ids to update according to **vals**
3954 :param vals: field values to update, e.g {'field_name': new_field_value, ...}
3955 :type vals: dictionary
3956 :param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
3957 :type context: dictionary
3959 :raise AccessError: * if user has no write rights on the requested object
3960 * if user tries to bypass access rules for write on the requested object
3961 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
3962 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
3964 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
3966 + For a many2many field, a list of tuples is expected.
3967 Here is the list of tuple that are accepted, with the corresponding semantics ::
3969 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3970 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3971 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3972 (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
3973 (4, ID) link to existing record with id = ID (adds a relationship)
3974 (5) unlink all (like using (3,ID) for all linked records)
3975 (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
3978 [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
3980 + For a one2many field, a lits of tuples is expected.
3981 Here is the list of tuple that are accepted, with the corresponding semantics ::
3983 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
3984 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
3985 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
3988 [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
3990 + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
3991 + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
3995 for field in vals.copy():
3997 if field in self._columns:
3998 fobj = self._columns[field]
3999 elif field in self._inherit_fields:
4000 fobj = self._inherit_fields[field][2]
4007 for group in groups:
4008 module = group.split(".")[0]
4009 grp = group.split(".")[1]
4010 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
4011 (grp, module, 'res.groups', user))
4012 readonly = cr.fetchall()
4013 if readonly[0][0] >= 1:
4024 if isinstance(ids, (int, long)):
4027 self._check_concurrency(cr, ids, context)
4028 self.check_access_rights(cr, user, 'write')
4030 result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
4032 # No direct update of parent_left/right
4033 vals.pop('parent_left', None)
4034 vals.pop('parent_right', None)
4036 parents_changed = []
4037 parent_order = self._parent_order or self._order
4038 if self._parent_store and (self._parent_name in vals):
4039 # The parent_left/right computation may take up to
4040 # 5 seconds. No need to recompute the values if the
4041 # parent is the same.
4042 # Note: to respect parent_order, nodes must be processed in
4043 # order, so ``parents_changed`` must be ordered properly.
4044 parent_val = vals[self._parent_name]
4046 query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \
4047 (self._table, self._parent_name, self._parent_name, parent_order)
4048 cr.execute(query, (tuple(ids), parent_val))
4050 query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \
4051 (self._table, self._parent_name, parent_order)
4052 cr.execute(query, (tuple(ids),))
4053 parents_changed = map(operator.itemgetter(0), cr.fetchall())
4060 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
4062 field_column = self._all_columns.get(field) and self._all_columns.get(field).column
4063 if field_column and field_column.deprecated:
4064 _logger.warning('Field %s.%s is deprecated: %s', self._name, field, field_column.deprecated)
4065 if field in self._columns:
4066 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
4067 if (not totranslate) or not self._columns[field].translate:
4068 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
4069 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
4070 direct.append(field)
4072 upd_todo.append(field)
4074 updend.append(field)
4075 if field in self._columns \
4076 and hasattr(self._columns[field], 'selection') \
4078 self._check_selection_field_value(cr, user, field, vals[field], context=context)
4080 if self._log_access:
4081 upd0.append('write_uid=%s')
4082 upd0.append("write_date=(now() at time zone 'UTC')")
4086 self.check_access_rule(cr, user, ids, 'write', context=context)
4087 for sub_ids in cr.split_for_in_conditions(ids):
4088 cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
4089 'where id IN %s', upd1 + [sub_ids])
4090 if cr.rowcount != len(sub_ids):
4091 raise except_orm(_('AccessError'),
4092 _('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
4097 if self._columns[f].translate:
4098 src_trans = self.pool.get(self._name).read(cr, user, ids, [f])[0][f]
4101 # Inserting value to DB
4102 self.write(cr, user, ids, {f: vals[f]})
4103 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
4106 # call the 'set' method of fields which are not classic_write
4107 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
4109 # default element in context must be removed when call a one2many or many2many
4110 rel_context = context.copy()
4111 for c in context.items():
4112 if c[0].startswith('default_'):
4113 del rel_context[c[0]]
4115 for field in upd_todo:
4117 result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
4119 unknown_fields = updend[:]
4120 for table in self._inherits:
4121 col = self._inherits[table]
4123 for sub_ids in cr.split_for_in_conditions(ids):
4124 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
4125 'where id IN %s', (sub_ids,))
4126 nids.extend([x[0] for x in cr.fetchall()])
4130 if self._inherit_fields[val][0] == table:
4132 unknown_fields.remove(val)
4134 self.pool.get(table).write(cr, user, nids, v, context)
4138 'No such field(s) in model %s: %s.',
4139 self._name, ', '.join(unknown_fields))
4140 self._validate(cr, user, ids, context)
4142 # TODO: use _order to set dest at the right position and not first node of parent
4143 # We can't defer parent_store computation because the stored function
4144 # fields that are computer may refer (directly or indirectly) to
4145 # parent_left/right (via a child_of domain)
4148 self.pool._init_parent[self._name] = True
4150 order = self._parent_order or self._order
4151 parent_val = vals[self._parent_name]
4153 clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
4155 clause, params = '%s IS NULL' % (self._parent_name,), ()
4157 for id in parents_changed:
4158 cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
4159 pleft, pright = cr.fetchone()
4160 distance = pright - pleft + 1
4162 # Positions of current siblings, to locate proper insertion point;
4163 # this can _not_ be fetched outside the loop, as it needs to be refreshed
4164 # after each update, in case several nodes are sequentially inserted one
4165 # next to the other (i.e computed incrementally)
4166 cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params)
4167 parents = cr.fetchall()
4169 # Find Position of the element
4171 for (parent_pright, parent_id) in parents:
4174 position = parent_pright + 1
4176 # It's the first node of the parent
4181 cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
4182 position = cr.fetchone()[0] + 1
4184 if pleft < position <= pright:
4185 raise except_orm(_('UserError'), _('Recursivity Detected.'))
4187 if pleft < position:
4188 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
4189 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
4190 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
4192 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
4193 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
4194 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
4196 result += self._store_get_values(cr, user, ids, vals.keys(), context)
4200 for order, object, ids_to_update, fields_to_recompute in result:
4201 key = (object, tuple(fields_to_recompute))
4202 done.setdefault(key, {})
4203 # avoid to do several times the same computation
4205 for id in ids_to_update:
4206 if id not in done[key]:
4207 done[key][id] = True
4209 self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context)
4211 self._workflow_trigger(cr, user, ids, 'trg_write', context=context)
4215 # TODO: Should set perm to user.xxx
4217 def create(self, cr, user, vals, context=None):
4219 Create a new record for the model.
4221 The values for the new record are initialized using the ``vals``
4222 argument, and if necessary the result of ``default_get()``.
4224 :param cr: database cursor
4225 :param user: current user id
4227 :param vals: field values for new record, e.g {'field_name': field_value, ...}
4228 :type vals: dictionary
4229 :param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
4230 :type context: dictionary
4231 :return: id of new record created
4232 :raise AccessError: * if user has no create rights on the requested object
4233 * if user tries to bypass access rules for create on the requested object
4234 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
4235 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
4237 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific.
4238 Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how
4245 if self.is_transient():
4246 self._transient_vacuum(cr, user)
4248 self.check_access_rights(cr, user, 'create')
4250 if self._log_access:
4251 for f in LOG_ACCESS_COLUMNS:
4252 if vals.pop(f, None) is not None:
4254 'Field `%s` is not allowed when creating the model `%s`.',
4256 vals = self._add_missing_default_values(cr, user, vals, context)
4259 for v in self._inherits:
4260 if self._inherits[v] not in vals:
4263 tocreate[v] = {'id': vals[self._inherits[v]]}
4264 (upd0, upd1, upd2) = ('', '', [])
4267 for v in vals.keys():
4268 if v in self._inherit_fields and v not in self._columns:
4269 (table, col, col_detail, original_parent) = self._inherit_fields[v]
4270 tocreate[table][v] = vals[v]
4273 if (v not in self._inherit_fields) and (v not in self._columns):
4275 unknown_fields.append(v)
4278 'No such field(s) in model %s: %s.',
4279 self._name, ', '.join(unknown_fields))
4281 # Try-except added to filter the creation of those records whose filds are readonly.
4282 # Example : any dashboard which has all the fields readonly.(due to Views(database views))
4284 cr.execute("SELECT nextval('"+self._sequence+"')")
4286 raise except_orm(_('UserError'),
4287 _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.'))
4289 id_new = cr.fetchone()[0]
4290 for table in tocreate:
4291 if self._inherits[table] in vals:
4292 del vals[self._inherits[table]]
4294 record_id = tocreate[table].pop('id', None)
4296 if record_id is None or not record_id:
4297 record_id = self.pool.get(table).create(cr, user, tocreate[table], context=context)
4299 self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=context)
4301 upd0 += ',' + self._inherits[table]
4303 upd2.append(record_id)
4305 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
4306 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
4308 for bool_field in bool_fields:
4309 if bool_field not in vals:
4310 vals[bool_field] = False
4312 for field in vals.copy():
4314 if field in self._columns:
4315 fobj = self._columns[field]
4317 fobj = self._inherit_fields[field][2]
4323 for group in groups:
4324 module = group.split(".")[0]
4325 grp = group.split(".")[1]
4326 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
4327 (grp, module, 'res.groups', user))
4328 readonly = cr.fetchall()
4329 if readonly[0][0] >= 1:
4332 elif readonly[0][0] == 0:
4340 if self._columns[field]._classic_write:
4341 upd0 = upd0 + ',"' + field + '"'
4342 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
4343 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
4344 #for the function fields that receive a value, we set them directly in the database
4345 #(they may be required), but we also need to trigger the _fct_inv()
4346 if (hasattr(self._columns[field], '_fnct_inv')) and not isinstance(self._columns[field], fields.related):
4347 #TODO: this way to special case the related fields is really creepy but it shouldn't be changed at
4348 #one week of the release candidate. It seems the only good way to handle correctly this is to add an
4349 #attribute to make a field `really readonly´ and thus totally ignored by the create()... otherwise
4350 #if, for example, the related has a default value (for usability) then the fct_inv is called and it
4351 #may raise some access rights error. Changing this is a too big change for now, and is thus postponed
4352 #after the release but, definitively, the behavior shouldn't be different for related and function
4354 upd_todo.append(field)
4356 #TODO: this `if´ statement should be removed because there is no good reason to special case the fields
4357 #related. See the above TODO comment for further explanations.
4358 if not isinstance(self._columns[field], fields.related):
4359 upd_todo.append(field)
4360 if field in self._columns \
4361 and hasattr(self._columns[field], 'selection') \
4363 self._check_selection_field_value(cr, user, field, vals[field], context=context)
4364 if self._log_access:
4365 upd0 += ',create_uid,create_date,write_uid,write_date'
4366 upd1 += ",%s,(now() at time zone 'UTC'),%s,(now() at time zone 'UTC')"
4367 upd2.extend((user, user))
4368 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
4369 self.check_access_rule(cr, user, [id_new], 'create', context=context)
4370 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
4372 if self._parent_store and not context.get('defer_parent_store_computation'):
4374 self.pool._init_parent[self._name] = True
4376 parent = vals.get(self._parent_name, False)
4378 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
4380 result_p = cr.fetchall()
4381 for (pleft,) in result_p:
4386 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
4387 pleft_old = cr.fetchone()[0]
4390 cr.execute('select max(parent_right) from '+self._table)
4391 pleft = cr.fetchone()[0] or 0
4392 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
4393 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
4394 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
4396 # default element in context must be remove when call a one2many or many2many
4397 rel_context = context.copy()
4398 for c in context.items():
4399 if c[0].startswith('default_'):
4400 del rel_context[c[0]]
4403 for field in upd_todo:
4404 result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
4405 self._validate(cr, user, [id_new], context)
4407 if not context.get('no_store_function', False):
4408 result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
4411 for order, object, ids, fields2 in result:
4412 if not (object, ids, fields2) in done:
4413 self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
4414 done.append((object, ids, fields2))
4416 if self._log_create and not (context and context.get('no_store_function', False)):
4417 message = self._description + \
4419 self.name_get(cr, user, [id_new], context=context)[0][1] + \
4420 "' " + _("created.")
4421 self.log(cr, user, id_new, message, True, context=context)
4422 self._workflow_trigger(cr, user, [id_new], 'trg_create', context=context)
4425 def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None):
4426 """Fetch records as objects allowing to use dot notation to browse fields and relations
4428 :param cr: database cursor
4429 :param uid: current user id
4430 :param select: id or list of ids.
4431 :param context: context arguments, like lang, time zone
4432 :rtype: object or list of objects requested
4435 self._list_class = list_class or browse_record_list
4437 # need to accepts ints and longs because ids coming from a method
4438 # launched by button in the interface have a type long...
4439 if isinstance(select, (int, long)):
4440 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
4441 elif isinstance(select, list):
4442 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context=context)
4444 return browse_null()
4446 def _store_get_values(self, cr, uid, ids, fields, context):
4447 """Returns an ordered list of fields.functions to call due to
4448 an update operation on ``fields`` of records with ``ids``,
4449 obtained by calling the 'store' functions of these fields,
4450 as setup by their 'store' attribute.
4452 :return: [(priority, model_name, [record_ids,], [function_fields,])]
4454 if fields is None: fields = []
4455 stored_functions = self.pool._store_function.get(self._name, [])
4457 # use indexed names for the details of the stored_functions:
4458 model_name_, func_field_to_compute_, id_mapping_fnct_, trigger_fields_, priority_ = range(5)
4460 # only keep functions that should be triggered for the ``fields``
4462 to_compute = [f for f in stored_functions \
4463 if ((not f[trigger_fields_]) or set(fields).intersection(f[trigger_fields_]))]
4466 for function in to_compute:
4467 # use admin user for accessing objects having rules defined on store fields
4468 target_ids = [id for id in function[id_mapping_fnct_](self, cr, SUPERUSER_ID, ids, context) if id]
4470 # the compound key must consider the priority and model name
4471 key = (function[priority_], function[model_name_])
4472 for target_id in target_ids:
4473 mapping.setdefault(key, {}).setdefault(target_id,set()).add(tuple(function))
4475 # Here mapping looks like:
4476 # { (10, 'model_a') : { target_id1: [ (function_1_tuple, function_2_tuple) ], ... }
4477 # (20, 'model_a') : { target_id2: [ (function_3_tuple, function_4_tuple) ], ... }
4478 # (99, 'model_a') : { target_id1: [ (function_5_tuple, function_6_tuple) ], ... }
4481 # Now we need to generate the batch function calls list
4483 # { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] }
4485 for ((priority,model), id_map) in mapping.iteritems():
4486 functions_ids_maps = {}
4487 # function_ids_maps =
4488 # { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] }
4489 for id, functions in id_map.iteritems():
4490 functions_ids_maps.setdefault(tuple(functions), []).append(id)
4491 for functions, ids in functions_ids_maps.iteritems():
4492 call_map.setdefault((priority,model),[]).append((priority, model, ids,
4493 [f[func_field_to_compute_] for f in functions]))
4494 ordered_keys = call_map.keys()
4498 result = reduce(operator.add, (call_map[k] for k in ordered_keys))
4501 def _store_set_values(self, cr, uid, ids, fields, context):
4502 """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
4503 respecting ``multi`` attributes), and stores the resulting values in the database directly."""
4508 if self._log_access:
4509 cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
4513 field_dict.setdefault(r[0], [])
4514 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
4515 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
4516 for i in self.pool._store_function.get(self._name, []):
4518 up_write_date = write_date + datetime.timedelta(hours=i[5])
4519 if datetime.datetime.now() < up_write_date:
4521 field_dict[r[0]].append(i[1])
4527 if self._columns[f]._multi not in keys:
4528 keys.append(self._columns[f]._multi)
4529 todo.setdefault(self._columns[f]._multi, [])
4530 todo[self._columns[f]._multi].append(f)
4534 # use admin user for accessing objects having rules defined on store fields
4535 result = self._columns[val[0]].get(cr, self, ids, val, SUPERUSER_ID, context=context)
4536 for id, value in result.items():
4538 for f in value.keys():
4539 if f in field_dict[id]:
4546 if self._columns[v]._type == 'many2one':
4548 value[v] = value[v][0]
4551 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
4552 upd1.append(self._columns[v]._symbol_set[1](value[v]))
4555 cr.execute('update "' + self._table + '" set ' + \
4556 ','.join(upd0) + ' where id = %s', upd1)
4560 # use admin user for accessing objects having rules defined on store fields
4561 result = self._columns[f].get(cr, self, ids, f, SUPERUSER_ID, context=context)
4562 for r in result.keys():
4564 if r in field_dict.keys():
4565 if f in field_dict[r]:
4567 for id, value in result.items():
4568 if self._columns[f]._type == 'many2one':
4573 cr.execute('update "' + self._table + '" set ' + \
4574 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
4580 def perm_write(self, cr, user, ids, fields, context=None):
4581 raise NotImplementedError(_('This method does not exist anymore'))
4583 # TODO: ameliorer avec NULL
4584 def _where_calc(self, cr, user, domain, active_test=True, context=None):
4585 """Computes the WHERE clause needed to implement an OpenERP domain.
4586 :param domain: the domain to compute
4588 :param active_test: whether the default filtering of records with ``active``
4589 field set to ``False`` should be applied.
4590 :return: the query expressing the given domain as provided in domain
4591 :rtype: osv.query.Query
4596 # if the object has a field named 'active', filter out all inactive
4597 # records unless they were explicitely asked for
4598 if 'active' in self._all_columns and (active_test and context.get('active_test', True)):
4600 # the item[0] trick below works for domain items and '&'/'|'/'!'
4602 if not any(item[0] == 'active' for item in domain):
4603 domain.insert(0, ('active', '=', 1))
4605 domain = [('active', '=', 1)]
4608 e = expression.expression(cr, user, domain, self, context)
4609 tables = e.get_tables()
4610 where_clause, where_params = e.to_sql()
4611 where_clause = where_clause and [where_clause] or []
4613 where_clause, where_params, tables = [], [], ['"%s"' % self._table]
4615 return Query(tables, where_clause, where_params)
4617 def _check_qorder(self, word):
4618 if not regex_order.match(word):
4619 raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
4622 def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
4623 """Add what's missing in ``query`` to implement all appropriate ir.rules
4624 (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
4626 :param query: the current query object
4628 def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None):
4630 if parent_model and child_object:
4631 # as inherited rules are being applied, we need to add the missing JOIN
4632 # to reach the parent table (if it was not JOINed yet in the query)
4633 child_object._inherits_join_add(child_object, parent_model, query)
4634 query.where_clause += added_clause
4635 query.where_clause_params += added_params
4636 for table in added_tables:
4637 if table not in query.tables:
4638 query.tables.append(table)
4642 # apply main rules on the object
4643 rule_obj = self.pool.get('ir.rule')
4644 apply_rule(*rule_obj.domain_get(cr, uid, self._name, mode, context=context))
4646 # apply ir.rules from the parents (through _inherits)
4647 for inherited_model in self._inherits:
4648 kwargs = dict(parent_model=inherited_model, child_object=self) #workaround for python2.5
4649 apply_rule(*rule_obj.domain_get(cr, uid, inherited_model, mode, context=context), **kwargs)
4651 def _generate_m2o_order_by(self, order_field, query):
4653 Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
4654 either native m2o fields or function/related fields that are stored, including
4655 intermediate JOINs for inheritance if required.
4657 :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
4659 if order_field not in self._columns and order_field in self._inherit_fields:
4660 # also add missing joins for reaching the table containing the m2o field
4661 qualified_field = self._inherits_join_calc(order_field, query)
4662 order_field_column = self._inherit_fields[order_field][2]
4664 qualified_field = '"%s"."%s"' % (self._table, order_field)
4665 order_field_column = self._columns[order_field]
4667 assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
4668 if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
4669 _logger.debug("Many2one function/related fields must be stored " \
4670 "to be used as ordering fields! Ignoring sorting for %s.%s",
4671 self._name, order_field)
4674 # figure out the applicable order_by for the m2o
4675 dest_model = self.pool.get(order_field_column._obj)
4676 m2o_order = dest_model._order
4677 if not regex_order.match(m2o_order):
4678 # _order is complex, can't use it here, so we default to _rec_name
4679 m2o_order = dest_model._rec_name
4681 # extract the field names, to be able to qualify them and add desc/asc
4683 for order_part in m2o_order.split(","):
4684 m2o_order_list.append(order_part.strip().split(" ",1)[0].strip())
4685 m2o_order = m2o_order_list
4687 # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
4688 # as we don't want to exclude results that have NULL values for the m2o
4689 src_table, src_field = qualified_field.replace('"','').split('.', 1)
4690 query.join((src_table, dest_model._table, src_field, 'id'), outer=True)
4691 qualify = lambda field: '"%s"."%s"' % (dest_model._table, field)
4692 return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
4695 def _generate_order_by(self, order_spec, query):
4697 Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
4698 a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
4700 :raise" except_orm in case order_spec is malformed
4702 order_by_clause = self._order
4704 order_by_elements = []
4705 self._check_qorder(order_spec)
4706 for order_part in order_spec.split(','):
4707 order_split = order_part.strip().split(' ')
4708 order_field = order_split[0].strip()
4709 order_direction = order_split[1].strip() if len(order_split) == 2 else ''
4711 if order_field == 'id':
4712 order_by_clause = '"%s"."%s"' % (self._table, order_field)
4713 elif order_field in self._columns:
4714 order_column = self._columns[order_field]
4715 if order_column._classic_read:
4716 inner_clause = '"%s"."%s"' % (self._table, order_field)
4717 elif order_column._type == 'many2one':
4718 inner_clause = self._generate_m2o_order_by(order_field, query)
4720 continue # ignore non-readable or "non-joinable" fields
4721 elif order_field in self._inherit_fields:
4722 parent_obj = self.pool.get(self._inherit_fields[order_field][3])
4723 order_column = parent_obj._columns[order_field]
4724 if order_column._classic_read:
4725 inner_clause = self._inherits_join_calc(order_field, query)
4726 elif order_column._type == 'many2one':
4727 inner_clause = self._generate_m2o_order_by(order_field, query)
4729 continue # ignore non-readable or "non-joinable" fields
4731 if isinstance(inner_clause, list):
4732 for clause in inner_clause:
4733 order_by_elements.append("%s %s" % (clause, order_direction))
4735 order_by_elements.append("%s %s" % (inner_clause, order_direction))
4736 if order_by_elements:
4737 order_by_clause = ",".join(order_by_elements)
4739 return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
4741 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
4743 Private implementation of search() method, allowing specifying the uid to use for the access right check.
4744 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
4745 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
4746 This is ok at the security level because this method is private and not callable through XML-RPC.
4748 :param access_rights_uid: optional user ID to use when checking access rights
4749 (not for ir.rules, this is only for ir.model.access)
4753 self.check_access_rights(cr, access_rights_uid or user, 'read')
4755 # For transient models, restrict acces to the current user, except for the super-user
4756 if self.is_transient() and self._log_access and user != SUPERUSER_ID:
4757 args = expression.AND(([('create_uid', '=', user)], args or []))
4759 query = self._where_calc(cr, user, args, context=context)
4760 self._apply_ir_rules(cr, user, query, 'read', context=context)
4761 order_by = self._generate_order_by(order, query)
4762 from_clause, where_clause, where_clause_params = query.get_sql()
4764 limit_str = limit and ' limit %d' % limit or ''
4765 offset_str = offset and ' offset %d' % offset or ''
4766 where_str = where_clause and (" WHERE %s" % where_clause) or ''
4769 cr.execute('SELECT count("%s".id) FROM ' % self._table + from_clause + where_str + limit_str + offset_str, where_clause_params)
4772 cr.execute('SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str, where_clause_params)
4774 return [x[0] for x in res]
4776 # returns the different values ever entered for one field
4777 # this is used, for example, in the client when the user hits enter on
4779 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
4782 if field in self._inherit_fields:
4783 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
4785 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
4787 def copy_data(self, cr, uid, id, default=None, context=None):
4789 Copy given record's data with all its fields values
4791 :param cr: database cursor
4792 :param user: current user id
4793 :param id: id of the record to copy
4794 :param default: field values to override in the original values of the copied record
4795 :type default: dictionary
4796 :param context: context arguments, like lang, time zone
4797 :type context: dictionary
4798 :return: dictionary containing all the field values
4804 # avoid recursion through already copied records in case of circular relationship
4805 seen_map = context.setdefault('__copy_data_seen',{})
4806 if id in seen_map.setdefault(self._name,[]):
4808 seen_map[self._name].append(id)
4812 if 'state' not in default:
4813 if 'state' in self._defaults:
4814 if callable(self._defaults['state']):
4815 default['state'] = self._defaults['state'](self, cr, uid, context)
4817 default['state'] = self._defaults['state']
4819 context_wo_lang = context.copy()
4820 if 'lang' in context:
4821 del context_wo_lang['lang']
4822 data = self.read(cr, uid, [id,], context=context_wo_lang)
4826 raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
4828 # build a black list of fields that should not be copied
4829 blacklist = set(MAGIC_COLUMNS + ['parent_left', 'parent_right'])
4830 def blacklist_given_fields(obj):
4831 # blacklist the fields that are given by inheritance
4832 for other, field_to_other in obj._inherits.items():
4833 blacklist.add(field_to_other)
4834 if field_to_other in default:
4835 # all the fields of 'other' are given by the record: default[field_to_other],
4836 # except the ones redefined in self
4837 blacklist.update(set(self.pool.get(other)._all_columns) - set(self._columns))
4839 blacklist_given_fields(self.pool.get(other))
4840 blacklist_given_fields(self)
4843 for f, colinfo in self._all_columns.items():
4844 field = colinfo.column
4847 elif f in blacklist:
4849 elif isinstance(field, fields.function):
4851 elif field._type == 'many2one':
4852 res[f] = data[f] and data[f][0]
4853 elif field._type == 'one2many':
4854 other = self.pool.get(field._obj)
4855 # duplicate following the order of the ids because we'll rely on
4856 # it later for copying translations in copy_translation()!
4857 lines = [other.copy_data(cr, uid, line_id, context=context) for line_id in sorted(data[f])]
4858 # the lines are duplicated using the wrong (old) parent, but then
4859 # are reassigned to the correct one thanks to the (0, 0, ...)
4860 res[f] = [(0, 0, line) for line in lines if line]
4861 elif field._type == 'many2many':
4862 res[f] = [(6, 0, data[f])]
4868 def copy_translations(self, cr, uid, old_id, new_id, context=None):
4872 # avoid recursion through already copied records in case of circular relationship
4873 seen_map = context.setdefault('__copy_translations_seen',{})
4874 if old_id in seen_map.setdefault(self._name,[]):
4876 seen_map[self._name].append(old_id)
4878 trans_obj = self.pool.get('ir.translation')
4879 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
4880 fields = self.fields_get(cr, uid, context=context)
4882 translation_records = []
4883 for field_name, field_def in fields.items():
4884 # we must recursively copy the translations for o2o and o2m
4885 if field_def['type'] == 'one2many':
4886 target_obj = self.pool.get(field_def['relation'])
4887 old_record, new_record = self.read(cr, uid, [old_id, new_id], [field_name], context=context)
4888 # here we rely on the order of the ids to match the translations
4889 # as foreseen in copy_data()
4890 old_children = sorted(old_record[field_name])
4891 new_children = sorted(new_record[field_name])
4892 for (old_child, new_child) in zip(old_children, new_children):
4893 target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
4894 # and for translatable fields we keep them for copy
4895 elif field_def.get('translate'):
4897 if field_name in self._columns:
4898 trans_name = self._name + "," + field_name
4899 elif field_name in self._inherit_fields:
4900 trans_name = self._inherit_fields[field_name][0] + "," + field_name
4902 trans_ids = trans_obj.search(cr, uid, [
4903 ('name', '=', trans_name),
4904 ('res_id', '=', old_id)
4906 translation_records.extend(trans_obj.read(cr, uid, trans_ids, context=context))
4908 for record in translation_records:
4910 record['res_id'] = new_id
4911 trans_obj.create(cr, uid, record, context=context)
4914 def copy(self, cr, uid, id, default=None, context=None):
4916 Duplicate record with given id updating it with default values
4918 :param cr: database cursor
4919 :param uid: current user id
4920 :param id: id of the record to copy
4921 :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
4922 :type default: dictionary
4923 :param context: context arguments, like lang, time zone
4924 :type context: dictionary
4925 :return: id of the newly created record
4930 context = context.copy()
4931 data = self.copy_data(cr, uid, id, default, context)
4932 new_id = self.create(cr, uid, data, context)
4933 self.copy_translations(cr, uid, id, new_id, context)
4936 def exists(self, cr, uid, ids, context=None):
4937 """Checks whether the given id or ids exist in this model,
4938 and return the list of ids that do. This is simple to use for
4939 a truth test on a browse_record::
4944 :param ids: id or list of ids to check for existence
4945 :type ids: int or [int]
4946 :return: the list of ids that currently exist, out of
4949 if type(ids) in (int, long):
4951 query = 'SELECT id FROM "%s"' % (self._table)
4952 cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
4953 return [x[0] for x in cr.fetchall()]
4955 def check_recursion(self, cr, uid, ids, context=None, parent=None):
4956 _logger.warning("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
4958 assert parent is None or parent in self._columns or parent in self._inherit_fields,\
4959 "The 'parent' parameter passed to check_recursion() must be None or a valid field name"
4960 return self._check_recursion(cr, uid, ids, context, parent)
4962 def _check_recursion(self, cr, uid, ids, context=None, parent=None):
4964 Verifies that there is no loop in a hierarchical structure of records,
4965 by following the parent relationship using the **parent** field until a loop
4966 is detected or until a top-level record is found.
4968 :param cr: database cursor
4969 :param uid: current user id
4970 :param ids: list of ids of records to check
4971 :param parent: optional parent field name (default: ``self._parent_name = parent_id``)
4972 :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
4976 parent = self._parent_name
4978 query = 'SELECT distinct "%s" FROM "%s" WHERE id IN %%s' % (parent, self._table)
4981 for i in range(0, len(ids), cr.IN_MAX):
4982 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
4983 cr.execute(query, (tuple(sub_ids_parent),))
4984 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
4985 ids_parent = ids_parent2
4986 for i in ids_parent:
4991 def _get_external_ids(self, cr, uid, ids, *args, **kwargs):
4992 """Retrieve the External ID(s) of any database record.
4994 **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
4996 :return: map of ids to the list of their fully qualified External IDs
4997 in the form ``module.key``, or an empty list when there's no External
4998 ID for a record, e.g.::
5000 { 'id': ['module.ext_id', 'module.ext_id_bis'],
5003 ir_model_data = self.pool.get('ir.model.data')
5004 data_ids = ir_model_data.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
5005 data_results = ir_model_data.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
5008 # can't use dict.fromkeys() as the list would be shared!
5010 for record in data_results:
5011 result[record['res_id']].append('%(module)s.%(name)s' % record)
5014 def get_external_id(self, cr, uid, ids, *args, **kwargs):
5015 """Retrieve the External ID of any database record, if there
5016 is one. This method works as a possible implementation
5017 for a function field, to be able to add it to any
5018 model object easily, referencing it as ``Model.get_external_id``.
5020 When multiple External IDs exist for a record, only one
5021 of them is returned (randomly).
5023 :return: map of ids to their fully qualified XML ID,
5024 defaulting to an empty string when there's none
5025 (to be usable as a function field),
5028 { 'id': 'module.ext_id',
5031 results = self._get_xml_ids(cr, uid, ids)
5032 for k, v in results.iteritems():
5039 # backwards compatibility
5040 get_xml_id = get_external_id
5041 _get_xml_ids = _get_external_ids
5044 def is_transient(self):
5045 """ Return whether the model is transient.
5047 See :class:`TransientModel`.
5050 return self._transient
5052 def _transient_clean_rows_older_than(self, cr, seconds):
5053 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5054 cr.execute("SELECT id FROM " + self._table + " WHERE"
5055 " COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp <"
5056 " ((now() at time zone 'UTC') - interval %s)", ("%s seconds" % seconds,))
5057 ids = [x[0] for x in cr.fetchall()]
5058 self.unlink(cr, SUPERUSER_ID, ids)
5060 def _transient_clean_old_rows(self, cr, count):
5061 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5063 "SELECT id, COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp"
5064 " AS t FROM " + self._table +
5065 " ORDER BY t LIMIT %s", (count,))
5066 ids = [x[0] for x in cr.fetchall()]
5067 self.unlink(cr, SUPERUSER_ID, ids)
5069 def _transient_vacuum(self, cr, uid, force=False):
5070 """Clean the transient records.
5072 This unlinks old records from the transient model tables whenever the
5073 "_transient_max_count" or "_max_age" conditions (if any) are reached.
5074 Actual cleaning will happen only once every "_transient_check_time" calls.
5075 This means this method can be called frequently called (e.g. whenever
5076 a new record is created).
5078 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5079 self._transient_check_count += 1
5080 if (not force) and (self._transient_check_count % self._transient_check_time):
5081 self._transient_check_count = 0
5084 # Age-based expiration
5085 if self._transient_max_hours:
5086 self._transient_clean_rows_older_than(cr, self._transient_max_hours * 60 * 60)
5088 # Count-based expiration
5089 if self._transient_max_count:
5090 self._transient_clean_old_rows(cr, self._transient_max_count)
5094 def resolve_2many_commands(self, cr, uid, field_name, commands, fields=None, context=None):
5095 """ Serializes one2many and many2many commands into record dictionaries
5096 (as if all the records came from the database via a read()). This
5097 method is aimed at onchange methods on one2many and many2many fields.
5099 Because commands might be creation commands, not all record dicts
5100 will contain an ``id`` field. Commands matching an existing record
5101 will have an ``id``.
5103 :param field_name: name of the one2many or many2many field matching the commands
5104 :type field_name: str
5105 :param commands: one2many or many2many commands to execute on ``field_name``
5106 :type commands: list((int|False, int|False, dict|False))
5107 :param fields: list of fields to read from the database, when applicable
5108 :type fields: list(str)
5109 :returns: records in a shape similar to that returned by ``read()``
5110 (except records may be missing the ``id`` field if they don't exist in db)
5113 result = [] # result (list of dict)
5114 record_ids = [] # ids of records to read
5115 updates = {} # {id: dict} of updates on particular records
5117 for command in commands:
5118 if not isinstance(command, (list, tuple)):
5119 record_ids.append(command)
5120 elif command[0] == 0:
5121 result.append(command[2])
5122 elif command[0] == 1:
5123 record_ids.append(command[1])
5124 updates.setdefault(command[1], {}).update(command[2])
5125 elif command[0] in (2, 3):
5126 record_ids = [id for id in record_ids if id != command[1]]
5127 elif command[0] == 4:
5128 record_ids.append(command[1])
5129 elif command[0] == 5:
5130 result, record_ids = [], []
5131 elif command[0] == 6:
5132 result, record_ids = [], list(command[2])
5134 # read the records and apply the updates
5135 other_model = self.pool.get(self._all_columns[field_name].column._obj)
5136 for record in other_model.read(cr, uid, record_ids, fields=fields, context=context):
5137 record.update(updates.get(record['id'], {}))
5138 result.append(record)
5142 # for backward compatibility
5143 resolve_o2m_commands_to_record_dicts = resolve_2many_commands
5145 # keep this import here, at top it will cause dependency cycle errors
5148 class Model(BaseModel):
5149 """Main super-class for regular database-persisted OpenERP models.
5151 OpenERP models are created by inheriting from this class::
5156 The system will later instantiate the class once per database (on
5157 which the class' module is installed).
5160 _register = False # not visible in ORM registry, meant to be python-inherited only
5161 _transient = False # True in a TransientModel
5163 class TransientModel(BaseModel):
5164 """Model super-class for transient records, meant to be temporarily
5165 persisted, and regularly vaccuum-cleaned.
5167 A TransientModel has a simplified access rights management,
5168 all users can create new records, and may only access the
5169 records they created. The super-user has unrestricted access
5170 to all TransientModel records.
5173 _register = False # not visible in ORM registry, meant to be python-inherited only
5176 class AbstractModel(BaseModel):
5177 """Abstract Model super-class for creating an abstract class meant to be
5178 inherited by regular models (Models or TransientModels) but not meant to
5179 be usable on its own, or persisted.
5181 Technical note: we don't want to make AbstractModel the super-class of
5182 Model or BaseModel because it would not make sense to put the main
5183 definition of persistence methods such as create() in it, and still we
5184 should be able to override them within an AbstractModel.
5186 _auto = False # don't create any database backend for AbstractModels
5187 _register = False # not visible in ORM registry, meant to be python-inherited only
5189 def itemgetter_tuple(items):
5190 """ Fixes itemgetter inconsistency (useful in some cases) of not returning
5191 a tuple if len(items) == 1: always returns an n-tuple where n = len(items)
5196 return lambda gettable: (gettable[items[0]],)
5197 return operator.itemgetter(*items)
5198 class ImportWarning(Warning):
5199 """ Used to send warnings upwards the stack during the import process
5204 def convert_pgerror_23502(model, fields, info, e):
5205 m = re.match(r'^null value in column "(?P<field>\w+)" violates '
5206 r'not-null constraint\n',
5208 if not m or m.group('field') not in fields:
5209 return {'message': unicode(e)}
5210 field = fields[m.group('field')]
5212 'message': _(u"Missing required value for the field '%(field)s'") % {
5213 'field': field['string']
5215 'field': m.group('field'),
5218 PGERROR_TO_OE = collections.defaultdict(
5219 # shape of mapped converters
5220 lambda: (lambda model, fvg, info, pgerror: {'message': unicode(pgerror)}), {
5221 # not_null_violation
5222 '23502': convert_pgerror_23502,
5224 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: