1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
22 #.apidoc title: Object Relational Mapping
23 #.apidoc module-mods: member-order: bysource
26 Object relational mapping to database (postgresql) module
27 * Hierarchical structure
28 * Constraints consistency, validations
29 * Object meta Data depends on its status
30 * Optimised processing by complex query (multiple actions at once)
31 * Default fields value
32 * Permissions optimisation
33 * Persistant object: DB postgresql
35 * Multi-level caching system
36 * 2 different inheritancies
38 - classicals (varchar, integer, boolean, ...)
39 - relations (one2many, many2one, many2many)
60 from lxml import etree
64 import openerp.netsvc as netsvc
65 import openerp.tools as tools
66 from openerp.tools.config import config
67 from openerp.tools.misc import CountingStream
68 from openerp.tools.safe_eval import safe_eval as eval
69 from openerp.tools.translate import _
70 from openerp import SUPERUSER_ID
71 from query import Query
73 _logger = logging.getLogger(__name__)
74 _schema = logging.getLogger(__name__ + '.schema')
76 # List of etree._Element subclasses that we choose to ignore when parsing XML.
77 from openerp.tools import SKIPPED_ELEMENT_TYPES
79 regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
80 regex_object_name = re.compile(r'^[a-z0-9_.]+$')
82 def transfer_field_to_modifiers(field, modifiers):
85 for attr in ('invisible', 'readonly', 'required'):
86 state_exceptions[attr] = []
87 default_values[attr] = bool(field.get(attr))
88 for state, modifs in (field.get("states",{})).items():
90 if default_values[modif[0]] != modif[1]:
91 state_exceptions[modif[0]].append(state)
93 for attr, default_value in default_values.items():
94 if state_exceptions[attr]:
95 modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])]
97 modifiers[attr] = default_value
100 # Don't deal with groups, it is done by check_group().
101 # Need the context to evaluate the invisible attribute on tree views.
102 # For non-tree views, the context shouldn't be given.
103 def transfer_node_to_modifiers(node, modifiers, context=None, in_tree_view=False):
104 if node.get('attrs'):
105 modifiers.update(eval(node.get('attrs')))
107 if node.get('states'):
108 if 'invisible' in modifiers and isinstance(modifiers['invisible'], list):
109 # TODO combine with AND or OR, use implicit AND for now.
110 modifiers['invisible'].append(('state', 'not in', node.get('states').split(',')))
112 modifiers['invisible'] = [('state', 'not in', node.get('states').split(','))]
114 for a in ('invisible', 'readonly', 'required'):
116 v = bool(eval(node.get(a), {'context': context or {}}))
117 if in_tree_view and a == 'invisible':
118 # Invisible in a tree view has a specific meaning, make it a
119 # new key in the modifiers attribute.
120 modifiers['tree_invisible'] = v
121 elif v or (a not in modifiers or not isinstance(modifiers[a], list)):
122 # Don't set the attribute to False if a dynamic value was
123 # provided (i.e. a domain from attrs or states).
127 def simplify_modifiers(modifiers):
128 for a in ('invisible', 'readonly', 'required'):
129 if a in modifiers and not modifiers[a]:
133 def transfer_modifiers_to_node(modifiers, node):
135 simplify_modifiers(modifiers)
136 node.set('modifiers', simplejson.dumps(modifiers))
138 def setup_modifiers(node, field=None, context=None, in_tree_view=False):
139 """ Processes node attributes and field descriptors to generate
140 the ``modifiers`` node attribute and set it on the provided node.
142 Alters its first argument in-place.
144 :param node: ``field`` node from an OpenERP view
145 :type node: lxml.etree._Element
146 :param dict field: field descriptor corresponding to the provided node
147 :param dict context: execution context used to evaluate node attributes
148 :param bool in_tree_view: triggers the ``tree_invisible`` code
149 path (separate from ``invisible``): in
150 tree view there are two levels of
151 invisibility, cell content (a column is
152 present but the cell itself is not
153 displayed) with ``invisible`` and column
154 invisibility (the whole column is
155 hidden) with ``tree_invisible``.
159 if field is not None:
160 transfer_field_to_modifiers(field, modifiers)
161 transfer_node_to_modifiers(
162 node, modifiers, context=context, in_tree_view=in_tree_view)
163 transfer_modifiers_to_node(modifiers, node)
165 def test_modifiers(what, expected):
167 if isinstance(what, basestring):
168 node = etree.fromstring(what)
169 transfer_node_to_modifiers(node, modifiers)
170 simplify_modifiers(modifiers)
171 json = simplejson.dumps(modifiers)
172 assert json == expected, "%s != %s" % (json, expected)
173 elif isinstance(what, dict):
174 transfer_field_to_modifiers(what, modifiers)
175 simplify_modifiers(modifiers)
176 json = simplejson.dumps(modifiers)
177 assert json == expected, "%s != %s" % (json, expected)
182 # openerp.osv.orm.modifiers_tests()
183 def modifiers_tests():
184 test_modifiers('<field name="a"/>', '{}')
185 test_modifiers('<field name="a" invisible="1"/>', '{"invisible": true}')
186 test_modifiers('<field name="a" readonly="1"/>', '{"readonly": true}')
187 test_modifiers('<field name="a" required="1"/>', '{"required": true}')
188 test_modifiers('<field name="a" invisible="0"/>', '{}')
189 test_modifiers('<field name="a" readonly="0"/>', '{}')
190 test_modifiers('<field name="a" required="0"/>', '{}')
191 test_modifiers('<field name="a" invisible="1" required="1"/>', '{"invisible": true, "required": true}') # TODO order is not guaranteed
192 test_modifiers('<field name="a" invisible="1" required="0"/>', '{"invisible": true}')
193 test_modifiers('<field name="a" invisible="0" required="1"/>', '{"required": true}')
194 test_modifiers("""<field name="a" attrs="{'invisible': [('b', '=', 'c')]}"/>""", '{"invisible": [["b", "=", "c"]]}')
196 # The dictionary is supposed to be the result of fields_get().
197 test_modifiers({}, '{}')
198 test_modifiers({"invisible": True}, '{"invisible": true}')
199 test_modifiers({"invisible": False}, '{}')
202 def check_object_name(name):
203 """ Check if the given name is a valid openerp object name.
205 The _name attribute in osv and osv_memory object is subject to
206 some restrictions. This function returns True or False whether
207 the given name is allowed or not.
209 TODO: this is an approximation. The goal in this approximation
210 is to disallow uppercase characters (in some places, we quote
211 table/column names and in other not, which leads to this kind
214 psycopg2.ProgrammingError: relation "xxx" does not exist).
216 The same restriction should apply to both osv and osv_memory
217 objects for consistency.
220 if regex_object_name.match(name) is None:
224 def raise_on_invalid_object_name(name):
225 if not check_object_name(name):
226 msg = "The _name attribute %s is not valid." % name
228 raise except_orm('ValueError', msg)
230 POSTGRES_CONFDELTYPES = {
238 def intersect(la, lb):
239 return filter(lambda x: x in lb, la)
241 def fix_import_export_id_paths(fieldname):
243 Fixes the id fields in import and exports, and splits field paths
246 :param str fieldname: name of the field to import/export
247 :return: split field name
250 fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
251 fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
252 return fixed_external_id.split('/')
254 class except_orm(Exception):
255 def __init__(self, name, value):
258 self.args = (name, value)
260 class BrowseRecordError(Exception):
263 class browse_null(object):
264 """ Readonly python database object browser
270 def __getitem__(self, name):
273 def __getattr__(self, name):
274 return None # XXX: return self ?
282 def __nonzero__(self):
285 def __unicode__(self):
290 # TODO: execute an object method on browse_record_list
292 class browse_record_list(list):
293 """ Collection of browse objects
295 Such an instance will be returned when doing a ``browse([ids..])``
296 and will be iterable, yielding browse() objects
299 def __init__(self, lst, context=None):
302 super(browse_record_list, self).__init__(lst)
303 self.context = context
306 class browse_record(object):
307 """ An object that behaves like a row of an object's table.
308 It has attributes after the columns of the corresponding object.
312 uobj = pool.get('res.users')
313 user_rec = uobj.browse(cr, uid, 104)
317 def __init__(self, cr, uid, id, table, cache, context=None,
318 list_class=browse_record_list, fields_process=None):
320 :param table: the browsed object (inherited from orm)
321 :param dict cache: a dictionary of model->field->data to be shared
322 across browse objects, thus reducing the SQL
323 read()s. It can speed up things a lot, but also be
324 disastrous if not discarded after write()/unlink()
326 :param dict context: dictionary with an optional context
328 if fields_process is None:
332 self._list_class = list_class
336 self._table = table # deprecated, use _model!
338 self._table_name = self._table._name
339 self.__logger = logging.getLogger('openerp.osv.orm.browse_record.' + self._table_name)
340 self._context = context
341 self._fields_process = fields_process
343 cache.setdefault(table._name, {})
344 self._data = cache[table._name]
346 # if not (id and isinstance(id, (int, long,))):
347 # raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
348 # if not table.exists(cr, uid, id, context):
349 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
351 if id not in self._data:
352 self._data[id] = {'id': id}
356 def __getitem__(self, name):
360 if name not in self._data[self._id]:
361 # build the list of fields we will fetch
363 # fetch the definition of the field which was asked for
364 if name in self._table._columns:
365 col = self._table._columns[name]
366 elif name in self._table._inherit_fields:
367 col = self._table._inherit_fields[name][2]
368 elif hasattr(self._table, str(name)):
369 attr = getattr(self._table, name)
370 if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
371 def function_proxy(*args, **kwargs):
372 if 'context' not in kwargs and self._context:
373 kwargs.update(context=self._context)
374 return attr(self._cr, self._uid, [self._id], *args, **kwargs)
375 return function_proxy
379 error_msg = "Field '%s' does not exist in object '%s'" % (name, self)
380 self.__logger.warning(error_msg)
381 if self.__logger.isEnabledFor(logging.DEBUG):
382 self.__logger.debug(''.join(traceback.format_stack()))
383 raise KeyError(error_msg)
385 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
387 # gen the list of "local" (ie not inherited) fields which are classic or many2one
388 fields_to_fetch = filter(lambda x: x[1]._classic_write, self._table._columns.items())
389 # gen the list of inherited fields
390 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
391 # complete the field list with the inherited fields which are classic or many2one
392 fields_to_fetch += filter(lambda x: x[1]._classic_write, inherits)
393 # otherwise we fetch only that field
395 fields_to_fetch = [(name, col)]
396 ids = filter(lambda id: name not in self._data[id], self._data.keys())
398 field_names = map(lambda x: x[0], fields_to_fetch)
399 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
401 # TODO: improve this, very slow for reports
402 if self._fields_process:
403 lang = self._context.get('lang', 'en_US') or 'en_US'
404 lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
406 raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
407 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
409 for field_name, field_column in fields_to_fetch:
410 if field_column._type in self._fields_process:
411 for result_line in field_values:
412 result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
413 if result_line[field_name]:
414 result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
417 # Where did those ids come from? Perhaps old entries in ir_model_dat?
418 _logger.warning("No field_values found for ids %s in %s", ids, self)
419 raise KeyError('Field %s not found in %s'%(name, self))
420 # create browse records for 'remote' objects
421 for result_line in field_values:
423 for field_name, field_column in fields_to_fetch:
424 if field_column._type == 'many2one':
425 if result_line[field_name]:
426 obj = self._table.pool.get(field_column._obj)
427 if isinstance(result_line[field_name], (list, tuple)):
428 value = result_line[field_name][0]
430 value = result_line[field_name]
432 # FIXME: this happen when a _inherits object
433 # overwrite a field of it parent. Need
434 # testing to be sure we got the right
435 # object and not the parent one.
436 if not isinstance(value, browse_record):
438 # In some cases the target model is not available yet, so we must ignore it,
439 # which is safe in most cases, this value will just be loaded later when needed.
440 # This situation can be caused by custom fields that connect objects with m2o without
441 # respecting module dependencies, causing relationships to be connected to soon when
442 # the target is not loaded yet.
444 new_data[field_name] = browse_record(self._cr,
445 self._uid, value, obj, self._cache,
446 context=self._context,
447 list_class=self._list_class,
448 fields_process=self._fields_process)
450 new_data[field_name] = value
452 new_data[field_name] = browse_null()
454 new_data[field_name] = browse_null()
455 elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
456 new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
457 elif field_column._type == 'reference':
458 if result_line[field_name]:
459 if isinstance(result_line[field_name], browse_record):
460 new_data[field_name] = result_line[field_name]
462 ref_obj, ref_id = result_line[field_name].split(',')
463 ref_id = long(ref_id)
465 obj = self._table.pool.get(ref_obj)
466 new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
468 new_data[field_name] = browse_null()
470 new_data[field_name] = browse_null()
472 new_data[field_name] = result_line[field_name]
473 self._data[result_line['id']].update(new_data)
475 if not name in self._data[self._id]:
476 # How did this happen? Could be a missing model due to custom fields used too soon, see above.
477 self.__logger.error("Fields to fetch: %s, Field values: %s", field_names, field_values)
478 self.__logger.error("Cached: %s, Table: %s", self._data[self._id], self._table)
479 raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
480 return self._data[self._id][name]
482 def __getattr__(self, name):
486 raise AttributeError(e)
488 def __contains__(self, name):
489 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
492 raise NotImplementedError("Iteration is not allowed on %s" % self)
494 def __hasattr__(self, name):
501 return "browse_record(%s, %d)" % (self._table_name, self._id)
503 def __eq__(self, other):
504 if not isinstance(other, browse_record):
506 return (self._table_name, self._id) == (other._table_name, other._id)
508 def __ne__(self, other):
509 if not isinstance(other, browse_record):
511 return (self._table_name, self._id) != (other._table_name, other._id)
513 # we need to define __unicode__ even though we've already defined __str__
514 # because we have overridden __getattr__
515 def __unicode__(self):
516 return unicode(str(self))
519 return hash((self._table_name, self._id))
524 """Force refreshing this browse_record's data and all the data of the
525 records that belong to the same cache, by emptying the cache completely,
526 preserving only the record identifiers (for prefetching optimizations).
528 for model, model_cache in self._cache.iteritems():
529 # only preserve the ids of the records that were in the cache
530 cached_ids = dict([(i, {'id': i}) for i in model_cache.keys()])
531 self._cache[model].clear()
532 self._cache[model].update(cached_ids)
534 def pg_varchar(size=0):
535 """ Returns the VARCHAR declaration for the provided size:
537 * If no size (or an empty or negative size is provided) return an
539 * Otherwise return a VARCHAR(n)
541 :type int size: varchar size, optional
545 if not isinstance(size, int):
546 raise TypeError("VARCHAR parameter should be an int, got %s"
549 return 'VARCHAR(%d)' % size
552 FIELDS_TO_PGTYPES = {
553 fields.boolean: 'bool',
554 fields.integer: 'int4',
558 fields.datetime: 'timestamp',
559 fields.binary: 'bytea',
560 fields.many2one: 'int4',
561 fields.serialized: 'text',
564 def get_pg_type(f, type_override=None):
566 :param fields._column f: field to get a Postgres type for
567 :param type type_override: use the provided type for dispatching instead of the field's own type
568 :returns: (postgres_identification_type, postgres_type_specification)
571 field_type = type_override or type(f)
573 if field_type in FIELDS_TO_PGTYPES:
574 pg_type = (FIELDS_TO_PGTYPES[field_type], FIELDS_TO_PGTYPES[field_type])
575 elif issubclass(field_type, fields.float):
577 pg_type = ('numeric', 'NUMERIC')
579 pg_type = ('float8', 'DOUBLE PRECISION')
580 elif issubclass(field_type, (fields.char, fields.reference)):
581 pg_type = ('varchar', pg_varchar(f.size))
582 elif issubclass(field_type, fields.selection):
583 if (isinstance(f.selection, list) and isinstance(f.selection[0][0], int))\
584 or getattr(f, 'size', None) == -1:
585 pg_type = ('int4', 'INTEGER')
587 pg_type = ('varchar', pg_varchar(getattr(f, 'size', None)))
588 elif issubclass(field_type, fields.function):
589 if f._type == 'selection':
590 pg_type = ('varchar', pg_varchar())
592 pg_type = get_pg_type(f, getattr(fields, f._type))
594 _logger.warning('%s type not supported!', field_type)
600 class MetaModel(type):
601 """ Metaclass for the Model.
603 This class is used as the metaclass for the Model class to discover
604 the models defined in a module (i.e. without instanciating them).
605 If the automatic discovery is not needed, it is possible to set the
606 model's _register attribute to False.
610 module_to_models = {}
612 def __init__(self, name, bases, attrs):
613 if not self._register:
614 self._register = True
615 super(MetaModel, self).__init__(name, bases, attrs)
618 # The (OpenERP) module name can be in the `openerp.addons` namespace
619 # or not. For instance module `sale` can be imported as
620 # `openerp.addons.sale` (the good way) or `sale` (for backward
622 module_parts = self.__module__.split('.')
623 if len(module_parts) > 2 and module_parts[0] == 'openerp' and \
624 module_parts[1] == 'addons':
625 module_name = self.__module__.split('.')[2]
627 module_name = self.__module__.split('.')[0]
628 if not hasattr(self, '_module'):
629 self._module = module_name
631 # Remember which models to instanciate for this module.
633 self.module_to_models.setdefault(self._module, []).append(self)
636 # Definition of log access columns, automatically added to models if
637 # self._log_access is True
638 LOG_ACCESS_COLUMNS = {
639 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
640 'create_date': 'TIMESTAMP',
641 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
642 'write_date': 'TIMESTAMP'
644 # special columns automatically created by the ORM
645 MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys()
647 class BaseModel(object):
648 """ Base class for OpenERP models.
650 OpenERP models are created by inheriting from this class' subclasses:
652 * Model: for regular database-persisted models
653 * TransientModel: for temporary data, stored in the database but automatically
654 vaccuumed every so often
655 * AbstractModel: for abstract super classes meant to be shared by multiple
656 _inheriting classes (usually Models or TransientModels)
658 The system will later instantiate the class once per database (on
659 which the class' module is installed).
661 To create a class that should not be instantiated, the _register class attribute
664 __metaclass__ = MetaModel
665 _auto = True # create database backend
666 _register = False # Set to false if the model shouldn't be automatically discovered.
673 _parent_name = 'parent_id'
674 _parent_store = False
675 _parent_order = False
682 # dict of {field:method}, with method returning the (name_get of records, {id: fold})
683 # to include in the _read_group, if grouped on this field
687 _transient = False # True in a TransientModel
690 # { 'parent_model': 'm2o_field', ... }
693 # Mapping from inherits'd field name to triple (m, r, f, n) where m is the
694 # model from which it is inherits'd, r is the (local) field towards m, f
695 # is the _column object itself, and n is the original (i.e. top-most)
698 # { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
699 # field_column_obj, origina_parent_model), ... }
702 # Mapping field name/column_info object
703 # This is similar to _inherit_fields but:
704 # 1. includes self fields,
705 # 2. uses column_info instead of a triple.
711 _sql_constraints = []
712 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
714 CONCURRENCY_CHECK_FIELD = '__last_update'
716 def log(self, cr, uid, id, message, secondary=False, context=None):
717 return _logger.warning("log() is deprecated. Please use OpenChatter notification system instead of the res.log mechanism.")
719 def view_init(self, cr, uid, fields_list, context=None):
720 """Override this method to do specific things when a view on the object is opened."""
723 def _field_create(self, cr, context=None):
724 """ Create entries in ir_model_fields for all the model's fields.
726 If necessary, also create an entry in ir_model, and if called from the
727 modules loading scheme (by receiving 'module' in the context), also
728 create entries in ir_model_data (for the model and the fields).
730 - create an entry in ir_model (if there is not already one),
731 - create an entry in ir_model_data (if there is not already one, and if
732 'module' is in the context),
733 - update ir_model_fields with the fields found in _columns
734 (TODO there is some redundancy as _columns is updated from
735 ir_model_fields in __init__).
740 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
742 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
743 model_id = cr.fetchone()[0]
744 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
746 model_id = cr.fetchone()[0]
747 if 'module' in context:
748 name_id = 'model_'+self._name.replace('.', '_')
749 cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
751 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
752 (name_id, context['module'], 'ir.model', model_id)
757 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
759 for rec in cr.dictfetchall():
760 cols[rec['name']] = rec
762 ir_model_fields_obj = self.pool.get('ir.model.fields')
764 # sparse field should be created at the end, as it depends on its serialized field already existing
765 model_fields = sorted(self._columns.items(), key=lambda x: 1 if x[1]._type == 'sparse' else 0)
766 for (k, f) in model_fields:
768 'model_id': model_id,
771 'field_description': f.string,
773 'relation': f._obj or '',
774 'view_load': (f.view_load and 1) or 0,
775 'select_level': tools.ustr(f.select or 0),
776 'readonly': (f.readonly and 1) or 0,
777 'required': (f.required and 1) or 0,
778 'selectable': (f.selectable and 1) or 0,
779 'translate': (f.translate and 1) or 0,
780 'relation_field': f._fields_id if isinstance(f, fields.one2many) else '',
781 'serialization_field_id': None,
783 if getattr(f, 'serialization_field', None):
784 # resolve link to serialization_field if specified by name
785 serialization_field_id = ir_model_fields_obj.search(cr, SUPERUSER_ID, [('model','=',vals['model']), ('name', '=', f.serialization_field)])
786 if not serialization_field_id:
787 raise except_orm(_('Error'), _("Serialization field `%s` not found for sparse field `%s`!") % (f.serialization_field, k))
788 vals['serialization_field_id'] = serialization_field_id[0]
790 # When its a custom field,it does not contain f.select
791 if context.get('field_state', 'base') == 'manual':
792 if context.get('field_name', '') == k:
793 vals['select_level'] = context.get('select', '0')
794 #setting value to let the problem NOT occur next time
796 vals['select_level'] = cols[k]['select_level']
799 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
800 id = cr.fetchone()[0]
802 cr.execute("""INSERT INTO ir_model_fields (
803 id, model_id, model, name, field_description, ttype,
804 relation,view_load,state,select_level,relation_field, translate, serialization_field_id
806 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
808 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
809 vals['relation'], bool(vals['view_load']), 'base',
810 vals['select_level'], vals['relation_field'], bool(vals['translate']), vals['serialization_field_id']
812 if 'module' in context:
813 name1 = 'field_' + self._table + '_' + k
814 cr.execute("select name from ir_model_data where name=%s", (name1,))
816 name1 = name1 + "_" + str(id)
817 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
818 (name1, context['module'], 'ir.model.fields', id)
821 for key, val in vals.items():
822 if cols[k][key] != vals[key]:
823 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
825 cr.execute("""UPDATE ir_model_fields SET
826 model_id=%s, field_description=%s, ttype=%s, relation=%s,
827 view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s, serialization_field_id=%s
829 model=%s AND name=%s""", (
830 vals['model_id'], vals['field_description'], vals['ttype'],
831 vals['relation'], bool(vals['view_load']),
832 vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['serialization_field_id'], vals['model'], vals['name']
838 # Goal: try to apply inheritance at the instanciation level and
839 # put objects in the pool var
842 def create_instance(cls, pool, cr):
843 """ Instanciate a given model.
845 This class method instanciates the class of some model (i.e. a class
846 deriving from osv or osv_memory). The class might be the class passed
847 in argument or, if it inherits from another class, a class constructed
848 by combining the two classes.
850 The ``attributes`` argument specifies which parent class attributes
853 TODO: the creation of the combined class is repeated at each call of
854 this method. This is probably unnecessary.
857 attributes = ['_columns', '_defaults', '_inherits', '_constraints',
860 parent_names = getattr(cls, '_inherit', None)
862 if isinstance(parent_names, (str, unicode)):
863 name = cls._name or parent_names
864 parent_names = [parent_names]
868 raise TypeError('_name is mandatory in case of multiple inheritance')
870 for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]):
871 parent_model = pool.get(parent_name)
873 raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
874 'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
875 if not getattr(cls, '_original_module', None) and name == parent_model._name:
876 cls._original_module = parent_model._original_module
877 parent_class = parent_model.__class__
880 new = copy.copy(getattr(parent_model, s, {}))
882 # Don't _inherit custom fields.
886 # Duplicate float fields because they have a .digits
887 # cache (which must be per-registry, not server-wide).
889 if new[c]._type == 'float':
890 new[c] = copy.copy(new[c])
891 if hasattr(new, 'update'):
892 new.update(cls.__dict__.get(s, {}))
893 elif s=='_constraints':
894 for c in cls.__dict__.get(s, []):
896 for c2 in range(len(new)):
897 #For _constraints, we should check field and methods as well
898 if new[c2][2]==c[2] and (new[c2][0] == c[0] \
899 or getattr(new[c2][0],'__name__', True) == \
900 getattr(c[0],'__name__', False)):
901 # If new class defines a constraint with
902 # same function name, we let it override
911 new.extend(cls.__dict__.get(s, []))
914 # Keep links to non-inherited constraints, e.g. useful when exporting translations
915 nattr['_local_constraints'] = cls.__dict__.get('_constraints', [])
916 nattr['_local_sql_constraints'] = cls.__dict__.get('_sql_constraints', [])
918 cls = type(name, (cls, parent_class), dict(nattr, _register=False))
920 cls._local_constraints = getattr(cls, '_constraints', [])
921 cls._local_sql_constraints = getattr(cls, '_sql_constraints', [])
923 if not getattr(cls, '_original_module', None):
924 cls._original_module = cls._module
925 obj = object.__new__(cls)
926 obj.__init__(pool, cr)
930 """Register this model.
932 This doesn't create an instance but simply register the model
933 as being part of the module where it is defined.
938 # Set the module name (e.g. base, sale, accounting, ...) on the class.
939 module = cls.__module__.split('.')[0]
940 if not hasattr(cls, '_module'):
943 # Record this class in the list of models to instantiate for this module,
944 # managed by the metaclass.
945 module_model_list = MetaModel.module_to_models.setdefault(cls._module, [])
946 if cls not in module_model_list:
948 module_model_list.append(cls)
950 # Since we don't return an instance here, the __init__
951 # method won't be called.
954 def __init__(self, pool, cr):
955 """ Initialize a model and make it part of the given registry.
957 - copy the stored fields' functions in the osv_pool,
958 - update the _columns with the fields found in ir_model_fields,
959 - ensure there is a many2one for each _inherits'd parent,
960 - update the children's _columns,
961 - give a chance to each field to initialize itself.
964 pool.add(self._name, self)
967 if not self._name and not hasattr(self, '_inherit'):
968 name = type(self).__name__.split('.')[0]
969 msg = "The class %s has to have a _name attribute" % name
972 raise except_orm('ValueError', msg)
974 if not self._description:
975 self._description = self._name
977 self._table = self._name.replace('.', '_')
979 if not hasattr(self, '_log_access'):
980 # If _log_access is not specified, it is the same value as _auto.
981 self._log_access = getattr(self, "_auto", True)
983 self._columns = self._columns.copy()
984 for store_field in self._columns:
985 f = self._columns[store_field]
986 if hasattr(f, 'digits_change'):
988 def not_this_field(stored_func):
989 x, y, z, e, f, l = stored_func
990 return x != self._name or y != store_field
991 self.pool._store_function[self._name] = filter(not_this_field, self.pool._store_function.get(self._name, []))
992 if not isinstance(f, fields.function):
998 sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, 10, None)}
999 for object, aa in sm.items():
1001 (fnct, fields2, order, length) = aa
1003 (fnct, fields2, order) = aa
1006 raise except_orm('Error',
1007 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
1008 self.pool._store_function.setdefault(object, [])
1009 self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
1010 self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
1012 for (key, _, msg) in self._sql_constraints:
1013 self.pool._sql_error[self._table+'_'+key] = msg
1015 # Load manual fields
1017 # Check the query is already done for all modules of if we need to
1019 if self.pool.fields_by_model is not None:
1020 manual_fields = self.pool.fields_by_model.get(self._name, [])
1022 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
1023 manual_fields = cr.dictfetchall()
1024 for field in manual_fields:
1025 if field['name'] in self._columns:
1028 'string': field['field_description'],
1029 'required': bool(field['required']),
1030 'readonly': bool(field['readonly']),
1031 'domain': eval(field['domain']) if field['domain'] else None,
1032 'size': field['size'],
1033 'ondelete': field['on_delete'],
1034 'translate': (field['translate']),
1036 #'select': int(field['select_level'])
1039 if field['serialization_field_id']:
1040 cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],))
1041 attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']})
1042 if field['ttype'] in ['many2one', 'one2many', 'many2many']:
1043 attrs.update({'relation': field['relation']})
1044 self._columns[field['name']] = fields.sparse(**attrs)
1045 elif field['ttype'] == 'selection':
1046 self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
1047 elif field['ttype'] == 'reference':
1048 self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
1049 elif field['ttype'] == 'many2one':
1050 self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
1051 elif field['ttype'] == 'one2many':
1052 self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
1053 elif field['ttype'] == 'many2many':
1054 _rel1 = field['relation'].replace('.', '_')
1055 _rel2 = field['model'].replace('.', '_')
1056 _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
1057 self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
1059 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
1061 self._inherits_check()
1062 self._inherits_reload()
1063 if not self._sequence:
1064 self._sequence = self._table + '_id_seq'
1065 for k in self._defaults:
1066 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
1067 for f in self._columns:
1068 self._columns[f].restart()
1071 if self.is_transient():
1072 self._transient_check_count = 0
1073 self._transient_max_count = config.get('osv_memory_count_limit')
1074 self._transient_max_hours = config.get('osv_memory_age_limit')
1075 assert self._log_access, "TransientModels must have log_access turned on, "\
1076 "in order to implement their access rights policy"
1079 if self._rec_name is not None:
1080 assert self._rec_name in self._all_columns.keys() + ['id'], "Invalid rec_name %s for model %s" % (self._rec_name, self._name)
1082 self._rec_name = 'name'
1085 def __export_row(self, cr, uid, row, fields, context=None):
1089 def check_type(field_type):
1090 if field_type == 'float':
1092 elif field_type == 'integer':
1094 elif field_type == 'boolean':
1098 def selection_field(in_field):
1099 col_obj = self.pool.get(in_field.keys()[0])
1100 if f[i] in col_obj._columns.keys():
1101 return col_obj._columns[f[i]]
1102 elif f[i] in col_obj._inherits.keys():
1103 selection_field(col_obj._inherits)
1107 def _get_xml_id(self, cr, uid, r):
1108 model_data = self.pool.get('ir.model.data')
1109 data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
1111 d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
1113 r = '%s.%s' % (d['module'], d['name'])
1119 n = self._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' )
1120 if not model_data.search(cr, uid, [('name', '=', n)]):
1123 model_data.create(cr, SUPERUSER_ID, {
1125 'model': self._name,
1127 'module': '__export__',
1133 data = map(lambda x: '', range(len(fields)))
1135 for fpos in range(len(fields)):
1145 r = _get_xml_id(self, cr, uid, r)
1148 # To display external name of selection field when its exported
1149 if f[i] in self._columns.keys():
1150 cols = self._columns[f[i]]
1151 elif f[i] in self._inherit_fields.keys():
1152 cols = selection_field(self._inherits)
1153 if cols and cols._type == 'selection':
1154 sel_list = cols.selection
1155 if r and type(sel_list) == type([]):
1156 r = [x[1] for x in sel_list if r==x[0]]
1157 r = r and r[0] or False
1159 if f[i] in self._columns:
1160 r = check_type(self._columns[f[i]]._type)
1161 elif f[i] in self._inherit_fields:
1162 r = check_type(self._inherit_fields[f[i]][2]._type)
1163 data[fpos] = r or False
1165 if isinstance(r, (browse_record_list, list)):
1167 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
1170 if [x for x in fields2 if x]:
1172 done.append(fields2)
1173 if cols and cols._type=='many2many' and len(fields[fpos])>(i+1) and (fields[fpos][i+1]=='id'):
1174 data[fpos] = ','.join([_get_xml_id(self, cr, uid, x) for x in r])
1178 lines2 = row2._model.__export_row(cr, uid, row2, fields2,
1181 for fpos2 in range(len(fields)):
1182 if lines2 and lines2[0][fpos2]:
1183 data[fpos2] = lines2[0][fpos2]
1187 name_relation = self.pool.get(rr._table_name)._rec_name
1188 if isinstance(rr[name_relation], browse_record):
1189 rr = rr[name_relation]
1190 rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
1191 rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
1192 dt += tools.ustr(rr_name or '') + ','
1193 data[fpos] = dt[:-1]
1202 if isinstance(r, browse_record):
1203 r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
1204 r = r and r[0] and r[0][1] or ''
1205 data[fpos] = tools.ustr(r or '')
1206 return [data] + lines
1208 def export_data(self, cr, uid, ids, fields_to_export, context=None):
1210 Export fields for selected objects
1212 :param cr: database cursor
1213 :param uid: current user id
1214 :param ids: list of ids
1215 :param fields_to_export: list of fields
1216 :param context: context arguments, like lang, time zone
1217 :rtype: dictionary with a *datas* matrix
1219 This method is used when exporting data via client menu
1224 cols = self._columns.copy()
1225 for f in self._inherit_fields:
1226 cols.update({f: self._inherit_fields[f][2]})
1227 fields_to_export = map(fix_import_export_id_paths, fields_to_export)
1229 for row in self.browse(cr, uid, ids, context):
1230 datas += self.__export_row(cr, uid, row, fields_to_export, context)
1231 return {'datas': datas}
1233 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
1236 Use :meth:`~load` instead
1238 Import given data in given module
1240 This method is used when importing data via client menu.
1242 Example of fields to import for a sale.order::
1245 partner_id, (=name_search)
1246 order_line/.id, (=database_id)
1248 order_line/product_id/id, (=xml id)
1249 order_line/price_unit,
1250 order_line/product_uom_qty,
1251 order_line/product_uom/id (=xml_id)
1253 This method returns a 4-tuple with the following structure::
1255 (return_code, errored_resource, error_message, unused)
1257 * The first item is a return code, it is ``-1`` in case of
1258 import error, or the last imported row number in case of success
1259 * The second item contains the record data dict that failed to import
1260 in case of error, otherwise it's 0
1261 * The third item contains an error message string in case of error,
1263 * The last item is currently unused, with no specific semantics
1265 :param fields: list of fields to import
1266 :param datas: data to import
1267 :param mode: 'init' or 'update' for record creation
1268 :param current_module: module name
1269 :param noupdate: flag for record creation
1270 :param filename: optional file to store partial import state for recovery
1271 :returns: 4-tuple in the form (return_code, errored_resource, error_message, unused)
1272 :rtype: (int, dict or 0, str or 0, str or 0)
1274 context = dict(context) if context is not None else {}
1275 context['_import_current_module'] = current_module
1277 fields = map(fix_import_export_id_paths, fields)
1278 ir_model_data_obj = self.pool.get('ir.model.data')
1281 if m['type'] == 'error':
1282 raise Exception(m['message'])
1284 if config.get('import_partial') and filename:
1285 with open(config.get('import_partial'), 'rb') as partial_import_file:
1286 data = pickle.load(partial_import_file)
1287 position = data.get(filename, 0)
1291 for res_id, xml_id, res, info in self._convert_records(cr, uid,
1292 self._extract_records(cr, uid, fields, datas,
1293 context=context, log=log),
1294 context=context, log=log):
1295 ir_model_data_obj._update(cr, uid, self._name,
1296 current_module, res, mode=mode, xml_id=xml_id,
1297 noupdate=noupdate, res_id=res_id, context=context)
1298 position = info.get('rows', {}).get('to', 0) + 1
1299 if config.get('import_partial') and filename and (not (position%100)):
1300 with open(config.get('import_partial'), 'rb') as partial_import:
1301 data = pickle.load(partial_import)
1302 data[filename] = position
1303 with open(config.get('import_partial'), 'wb') as partial_import:
1304 pickle.dump(data, partial_import)
1305 if context.get('defer_parent_store_computation'):
1306 self._parent_store_compute(cr)
1308 except Exception, e:
1310 return -1, {}, 'Line %d : %s' % (position + 1, tools.ustr(e)), ''
1312 if context.get('defer_parent_store_computation'):
1313 self._parent_store_compute(cr)
1314 return position, 0, 0, 0
1316 def load(self, cr, uid, fields, data, context=None):
1318 Attempts to load the data matrix, and returns a list of ids (or
1319 ``False`` if there was an error and no id could be generated) and a
1322 The ids are those of the records created and saved (in database), in
1323 the same order they were extracted from the file. They can be passed
1324 directly to :meth:`~read`
1326 :param fields: list of fields to import, at the same index as the corresponding data
1327 :type fields: list(str)
1328 :param data: row-major matrix of data to import
1329 :type data: list(list(str))
1330 :param dict context:
1331 :returns: {ids: list(int)|False, messages: [Message]}
1333 cr.execute('SAVEPOINT model_load')
1336 fields = map(fix_import_export_id_paths, fields)
1337 ModelData = self.pool['ir.model.data'].clear_caches()
1339 fg = self.fields_get(cr, uid, context=context)
1346 for id, xid, record, info in self._convert_records(cr, uid,
1347 self._extract_records(cr, uid, fields, data,
1348 context=context, log=messages.append),
1349 context=context, log=messages.append):
1351 cr.execute('SAVEPOINT model_load_save')
1352 except psycopg2.InternalError, e:
1353 # broken transaction, exit and hope the source error was
1355 if not any(message['type'] == 'error' for message in messages):
1356 messages.append(dict(info, type='error',message=
1357 u"Unknown database error: '%s'" % e))
1360 ids.append(ModelData._update(cr, uid, self._name,
1361 current_module, record, mode=mode, xml_id=xid,
1362 noupdate=noupdate, res_id=id, context=context))
1363 cr.execute('RELEASE SAVEPOINT model_load_save')
1364 except psycopg2.Warning, e:
1365 messages.append(dict(info, type='warning', message=str(e)))
1366 cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
1367 except psycopg2.Error, e:
1368 messages.append(dict(
1370 **PGERROR_TO_OE[e.pgcode](self, fg, info, e)))
1371 # Failed to write, log to messages, rollback savepoint (to
1372 # avoid broken transaction) and keep going
1373 cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
1374 if any(message['type'] == 'error' for message in messages):
1375 cr.execute('ROLLBACK TO SAVEPOINT model_load')
1377 return {'ids': ids, 'messages': messages}
1378 def _extract_records(self, cr, uid, fields_, data,
1379 context=None, log=lambda a: None):
1380 """ Generates record dicts from the data sequence.
1382 The result is a generator of dicts mapping field names to raw
1383 (unconverted, unvalidated) values.
1385 For relational fields, if sub-fields were provided the value will be
1386 a list of sub-records
1388 The following sub-fields may be set on the record (by key):
1389 * None is the name_get for the record (to use with name_create/name_search)
1390 * "id" is the External ID for the record
1391 * ".id" is the Database ID for the record
1393 columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
1394 # Fake columns to avoid special cases in extractor
1395 columns[None] = fields.char('rec_name')
1396 columns['id'] = fields.char('External ID')
1397 columns['.id'] = fields.integer('Database ID')
1399 # m2o fields can't be on multiple lines so exclude them from the
1400 # is_relational field rows filter, but special-case it later on to
1401 # be handled with relational fields (as it can have subfields)
1402 is_relational = lambda field: columns[field]._type in ('one2many', 'many2many', 'many2one')
1403 get_o2m_values = itemgetter_tuple(
1404 [index for index, field in enumerate(fields_)
1405 if columns[field[0]]._type == 'one2many'])
1406 get_nono2m_values = itemgetter_tuple(
1407 [index for index, field in enumerate(fields_)
1408 if columns[field[0]]._type != 'one2many'])
1409 # Checks if the provided row has any non-empty non-relational field
1410 def only_o2m_values(row, f=get_nono2m_values, g=get_o2m_values):
1411 return any(g(row)) and not any(f(row))
1415 if index >= len(data): return
1418 # copy non-relational fields to record dict
1419 record = dict((field[0], value)
1420 for field, value in itertools.izip(fields_, row)
1421 if not is_relational(field[0]))
1423 # Get all following rows which have relational values attached to
1424 # the current record (no non-relational values)
1425 record_span = itertools.takewhile(
1426 only_o2m_values, itertools.islice(data, index + 1, None))
1427 # stitch record row back on for relational fields
1428 record_span = list(itertools.chain([row], record_span))
1429 for relfield in set(
1430 field[0] for field in fields_
1431 if is_relational(field[0])):
1432 column = columns[relfield]
1433 # FIXME: how to not use _obj without relying on fields_get?
1434 Model = self.pool[column._obj]
1436 # get only cells for this sub-field, should be strictly
1437 # non-empty, field path [None] is for name_get column
1438 indices, subfields = zip(*((index, field[1:] or [None])
1439 for index, field in enumerate(fields_)
1440 if field[0] == relfield))
1442 # return all rows which have at least one value for the
1443 # subfields of relfield
1444 relfield_data = filter(any, map(itemgetter_tuple(indices), record_span))
1445 record[relfield] = [subrecord
1446 for subrecord, _subinfo in Model._extract_records(
1447 cr, uid, subfields, relfield_data,
1448 context=context, log=log)]
1450 yield record, {'rows': {
1452 'to': index + len(record_span) - 1
1454 index += len(record_span)
1455 def _convert_records(self, cr, uid, records,
1456 context=None, log=lambda a: None):
1457 """ Converts records from the source iterable (recursive dicts of
1458 strings) into forms which can be written to the database (via
1459 self.create or (ir.model.data)._update)
1461 :returns: a list of triplets of (id, xid, record)
1462 :rtype: list((int|None, str|None, dict))
1464 if context is None: context = {}
1465 Converter = self.pool['ir.fields.converter']
1466 columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
1467 Translation = self.pool['ir.translation']
1469 (f, (Translation._get_source(cr, uid, self._name + ',' + f, 'field',
1470 context.get('lang'))
1472 for f, column in columns.iteritems())
1474 convert = Converter.for_model(cr, uid, self, context=context)
1476 def _log(base, field, exception):
1477 type = 'warning' if isinstance(exception, Warning) else 'error'
1478 # logs the logical (not human-readable) field name for automated
1479 # processing of response, but injects human readable in message
1480 record = dict(base, type=type, field=field,
1481 message=unicode(exception.args[0]) % base)
1482 if len(exception.args) > 1 and exception.args[1]:
1483 record.update(exception.args[1])
1486 stream = CountingStream(records)
1487 for record, extras in stream:
1490 # name_get/name_create
1491 if None in record: pass
1498 dbid = int(record['.id'])
1500 # in case of overridden id column
1501 dbid = record['.id']
1502 if not self.search(cr, uid, [('id', '=', dbid)], context=context):
1505 record=stream.index,
1507 message=_(u"Unknown database identifier '%s'") % dbid))
1510 converted = convert(record, lambda field, err:\
1511 _log(dict(extras, record=stream.index, field=field_names[field]), field, err))
1513 yield dbid, xid, converted, dict(extras, record=stream.index)
1515 def get_invalid_fields(self, cr, uid):
1516 return list(self._invalids)
1518 def _validate(self, cr, uid, ids, context=None):
1519 context = context or {}
1520 lng = context.get('lang')
1521 trans = self.pool.get('ir.translation')
1523 for constraint in self._constraints:
1524 fun, msg, fields = constraint
1525 # We don't pass around the context here: validation code
1526 # must always yield the same results.
1527 if not fun(self, cr, uid, ids):
1528 # Check presence of __call__ directly instead of using
1529 # callable() because it will be deprecated as of Python 3.0
1530 if hasattr(msg, '__call__'):
1531 tmp_msg = msg(self, cr, uid, ids, context=context)
1532 if isinstance(tmp_msg, tuple):
1533 tmp_msg, params = tmp_msg
1534 translated_msg = tmp_msg % params
1536 translated_msg = tmp_msg
1538 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg)
1540 _("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
1542 self._invalids.update(fields)
1544 raise except_orm('ValidateError', '\n'.join(error_msgs))
1546 self._invalids.clear()
1548 def default_get(self, cr, uid, fields_list, context=None):
1550 Returns default values for the fields in fields_list.
1552 :param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
1553 :type fields_list: list
1554 :param context: optional context dictionary - it may contains keys for specifying certain options
1555 like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
1556 It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
1557 or override a default value for a field.
1558 A special ``bin_size`` boolean flag may also be passed in the context to request the
1559 value of all fields.binary columns to be returned as the size of the binary instead of its
1560 contents. This can also be selectively overriden by passing a field-specific flag
1561 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
1562 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
1563 :return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
1565 # trigger view init hook
1566 self.view_init(cr, uid, fields_list, context)
1572 # get the default values for the inherited fields
1573 for t in self._inherits.keys():
1574 defaults.update(self.pool.get(t).default_get(cr, uid, fields_list,
1577 # get the default values defined in the object
1578 for f in fields_list:
1579 if f in self._defaults:
1580 if callable(self._defaults[f]):
1581 defaults[f] = self._defaults[f](self, cr, uid, context)
1583 defaults[f] = self._defaults[f]
1585 fld_def = ((f in self._columns) and self._columns[f]) \
1586 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1589 if isinstance(fld_def, fields.property):
1590 property_obj = self.pool.get('ir.property')
1591 prop_value = property_obj.get(cr, uid, f, self._name, context=context)
1593 if isinstance(prop_value, (browse_record, browse_null)):
1594 defaults[f] = prop_value.id
1596 defaults[f] = prop_value
1598 if f not in defaults:
1601 # get the default values set by the user and override the default
1602 # values defined in the object
1603 ir_values_obj = self.pool.get('ir.values')
1604 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1605 for id, field, field_value in res:
1606 if field in fields_list:
1607 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1608 if fld_def._type == 'many2one':
1609 obj = self.pool.get(fld_def._obj)
1610 if not obj.search(cr, uid, [('id', '=', field_value or False)]):
1612 if fld_def._type == 'many2many':
1613 obj = self.pool.get(fld_def._obj)
1615 for i in range(len(field_value or [])):
1616 if not obj.search(cr, uid, [('id', '=',
1619 field_value2.append(field_value[i])
1620 field_value = field_value2
1621 if fld_def._type == 'one2many':
1622 obj = self.pool.get(fld_def._obj)
1624 for i in range(len(field_value or [])):
1625 field_value2.append({})
1626 for field2 in field_value[i]:
1627 if field2 in obj._columns.keys() and obj._columns[field2]._type == 'many2one':
1628 obj2 = self.pool.get(obj._columns[field2]._obj)
1629 if not obj2.search(cr, uid,
1630 [('id', '=', field_value[i][field2])]):
1632 elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type == 'many2one':
1633 obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
1634 if not obj2.search(cr, uid,
1635 [('id', '=', field_value[i][field2])]):
1637 # TODO add test for many2many and one2many
1638 field_value2[i][field2] = field_value[i][field2]
1639 field_value = field_value2
1640 defaults[field] = field_value
1642 # get the default values from the context
1643 for key in context or {}:
1644 if key.startswith('default_') and (key[8:] in fields_list):
1645 defaults[key[8:]] = context[key]
1648 def fields_get_keys(self, cr, user, context=None):
1649 res = self._columns.keys()
1650 # TODO I believe this loop can be replace by
1651 # res.extend(self._inherit_fields.key())
1652 for parent in self._inherits:
1653 res.extend(self.pool.get(parent).fields_get_keys(cr, user, context))
1656 def _rec_name_fallback(self, cr, uid, context=None):
1657 rec_name = self._rec_name
1658 if rec_name not in self._columns:
1659 rec_name = self._columns.keys()[0] if len(self._columns.keys()) > 0 else "id"
1663 # Overload this method if you need a window title which depends on the context
1665 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
1668 def user_has_groups(self, cr, uid, groups, context=None):
1669 """Return true if the user is at least member of one of the groups
1670 in groups_str. Typically used to resolve ``groups`` attribute
1671 in view and model definitions.
1673 :param str groups: comma-separated list of fully-qualified group
1674 external IDs, e.g.: ``base.group_user,base.group_system``
1675 :return: True if the current user is a member of one of the
1678 return any([self.pool.get('res.users').has_group(cr, uid, group_ext_id)
1679 for group_ext_id in groups.split(',')])
1681 def __view_look_dom(self, cr, user, node, view_id, in_tree_view, model_fields, context=None):
1682 """Return the description of the fields in the node.
1684 In a normal call to this method, node is a complete view architecture
1685 but it is actually possible to give some sub-node (this is used so
1686 that the method can call itself recursively).
1688 Originally, the field descriptions are drawn from the node itself.
1689 But there is now some code calling fields_get() in order to merge some
1690 of those information in the architecture.
1702 if isinstance(s, unicode):
1703 return s.encode('utf8')
1706 def check_group(node):
1707 """Apply group restrictions, may be set at view level or model level::
1708 * at view level this means the element should be made invisible to
1709 people who are not members
1710 * at model level (exclusively for fields, obviously), this means
1711 the field should be completely removed from the view, as it is
1712 completely unavailable for non-members
1714 :return: True if field should be included in the result of fields_view_get
1716 if node.tag == 'field' and node.get('name') in self._all_columns:
1717 column = self._all_columns[node.get('name')].column
1718 if column.groups and not self.user_has_groups(cr, user,
1719 groups=column.groups,
1721 node.getparent().remove(node)
1722 fields.pop(node.get('name'), None)
1723 # no point processing view-level ``groups`` anymore, return
1725 if node.get('groups'):
1726 can_see = self.user_has_groups(cr, user,
1727 groups=node.get('groups'),
1730 node.set('invisible', '1')
1731 modifiers['invisible'] = True
1732 if 'attrs' in node.attrib:
1733 del(node.attrib['attrs']) #avoid making field visible later
1734 del(node.attrib['groups'])
1737 if node.tag in ('field', 'node', 'arrow'):
1738 if node.get('object'):
1743 if f.tag == 'field':
1744 xml += etree.tostring(f, encoding="utf-8")
1746 new_xml = etree.fromstring(encode(xml))
1747 ctx = context.copy()
1748 ctx['base_model_name'] = self._name
1749 xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
1754 attrs = {'views': views}
1756 if node.get('name'):
1759 if node.get('name') in self._columns:
1760 column = self._columns[node.get('name')]
1762 column = self._inherit_fields[node.get('name')][2]
1767 relation = self.pool.get(column._obj)
1772 if f.tag in ('form', 'tree', 'graph', 'kanban'):
1774 ctx = context.copy()
1775 ctx['base_model_name'] = self._name
1776 xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
1777 views[str(f.tag)] = {
1781 attrs = {'views': views}
1782 if node.get('widget') and node.get('widget') == 'selection':
1783 # Prepare the cached selection list for the client. This needs to be
1784 # done even when the field is invisible to the current user, because
1785 # other events could need to change its value to any of the selectable ones
1786 # (such as on_change events, refreshes, etc.)
1788 # If domain and context are strings, we keep them for client-side, otherwise
1789 # we evaluate them server-side to consider them when generating the list of
1791 # TODO: find a way to remove this hack, by allow dynamic domains
1793 if column._domain and not isinstance(column._domain, basestring):
1794 dom = list(column._domain)
1795 dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
1796 search_context = dict(context)
1797 if column._context and not isinstance(column._context, basestring):
1798 search_context.update(column._context)
1799 attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
1800 if (node.get('required') and not int(node.get('required'))) or not column.required:
1801 attrs['selection'].append((False, ''))
1802 fields[node.get('name')] = attrs
1804 field = model_fields.get(node.get('name'))
1806 transfer_field_to_modifiers(field, modifiers)
1809 elif node.tag in ('form', 'tree'):
1810 result = self.view_header_get(cr, user, False, node.tag, context)
1812 node.set('string', result)
1813 in_tree_view = node.tag == 'tree'
1815 elif node.tag == 'calendar':
1816 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
1817 if node.get(additional_field):
1818 fields[node.get(additional_field)] = {}
1820 if not check_group(node):
1821 # node must be removed, no need to proceed further with its children
1824 # The view architeture overrides the python model.
1825 # Get the attrs before they are (possibly) deleted by check_group below
1826 transfer_node_to_modifiers(node, modifiers, context, in_tree_view)
1828 # TODO remove attrs couterpart in modifiers when invisible is true ?
1831 if 'lang' in context:
1832 if node.text and node.text.strip():
1833 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.text.strip())
1835 node.text = node.text.replace(node.text.strip(), trans)
1836 if node.tail and node.tail.strip():
1837 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.tail.strip())
1839 node.tail = node.tail.replace(node.tail.strip(), trans)
1841 if node.get('string') and not result:
1842 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
1843 if trans == node.get('string') and ('base_model_name' in context):
1844 # If translation is same as source, perhaps we'd have more luck with the alternative model name
1845 # (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
1846 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
1848 node.set('string', trans)
1850 for attr_name in ('confirm', 'sum', 'avg', 'help', 'placeholder'):
1851 attr_value = node.get(attr_name)
1853 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], attr_value)
1855 node.set(attr_name, trans)
1858 if children or (node.tag == 'field' and f.tag in ('filter','separator')):
1859 fields.update(self.__view_look_dom(cr, user, f, view_id, in_tree_view, model_fields, context))
1861 transfer_modifiers_to_node(modifiers, node)
1864 def _disable_workflow_buttons(self, cr, user, node):
1865 """ Set the buttons in node to readonly if the user can't activate them. """
1867 # admin user can always activate workflow buttons
1870 # TODO handle the case of more than one workflow for a model or multiple
1871 # transitions with different groups and same signal
1872 usersobj = self.pool.get('res.users')
1873 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1874 for button in buttons:
1875 user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
1876 cr.execute("""SELECT DISTINCT t.group_id
1878 INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
1879 INNER JOIN wkf_transition t ON (t.act_to = a.id)
1882 AND t.group_id is NOT NULL
1883 """, (self._name, button.get('name')))
1884 group_ids = [x[0] for x in cr.fetchall() if x[0]]
1885 can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
1886 button.set('readonly', str(int(not can_click)))
1889 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1890 """ Return an architecture and a description of all the fields.
1892 The field description combines the result of fields_get() and
1895 :param node: the architecture as as an etree
1896 :return: a tuple (arch, fields) where arch is the given node as a
1897 string and fields is the description of all the fields.
1901 if node.tag == 'diagram':
1902 if node.getchildren()[0].tag == 'node':
1903 node_model = self.pool.get(node.getchildren()[0].get('object'))
1904 node_fields = node_model.fields_get(cr, user, None, context)
1905 fields.update(node_fields)
1906 if not node.get("create") and not node_model.check_access_rights(cr, user, 'create', raise_exception=False):
1907 node.set("create", 'false')
1908 if node.getchildren()[1].tag == 'arrow':
1909 arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, None, context)
1910 fields.update(arrow_fields)
1912 fields = self.fields_get(cr, user, None, context)
1913 fields_def = self.__view_look_dom(cr, user, node, view_id, False, fields, context=context)
1914 node = self._disable_workflow_buttons(cr, user, node)
1915 if node.tag in ('kanban', 'tree', 'form', 'gantt'):
1916 for action, operation in (('create', 'create'), ('delete', 'unlink'), ('edit', 'write')):
1917 if not node.get(action) and not self.check_access_rights(cr, user, operation, raise_exception=False):
1918 node.set(action, 'false')
1919 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1920 for k in fields.keys():
1921 if k not in fields_def:
1923 for field in fields_def:
1925 # sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1926 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1927 elif field in fields:
1928 fields[field].update(fields_def[field])
1930 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1931 res = cr.fetchall()[:]
1933 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1934 msg = "\n * ".join([r[0] for r in res])
1935 msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
1937 raise except_orm('View error', msg)
1940 def _get_default_form_view(self, cr, user, context=None):
1941 """ Generates a default single-line form view using all fields
1942 of the current model except the m2m and o2m ones.
1944 :param cr: database cursor
1945 :param int user: user id
1946 :param dict context: connection context
1947 :returns: a form view as an lxml document
1948 :rtype: etree._Element
1950 view = etree.Element('form', string=self._description)
1951 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
1952 for field, descriptor in self.fields_get(cr, user, context=context).iteritems():
1953 if descriptor['type'] in ('one2many', 'many2many'):
1955 etree.SubElement(view, 'field', name=field)
1956 if descriptor['type'] == 'text':
1957 etree.SubElement(view, 'newline')
1960 def _get_default_search_view(self, cr, user, context=None):
1961 """ Generates a single-field search view, based on _rec_name.
1963 :param cr: database cursor
1964 :param int user: user id
1965 :param dict context: connection context
1966 :returns: a tree view as an lxml document
1967 :rtype: etree._Element
1969 view = etree.Element('search', string=self._description)
1970 etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
1973 def _get_default_tree_view(self, cr, user, context=None):
1974 """ Generates a single-field tree view, based on _rec_name.
1976 :param cr: database cursor
1977 :param int user: user id
1978 :param dict context: connection context
1979 :returns: a tree view as an lxml document
1980 :rtype: etree._Element
1982 view = etree.Element('tree', string=self._description)
1983 etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
1986 def _get_default_calendar_view(self, cr, user, context=None):
1987 """ Generates a default calendar view by trying to infer
1988 calendar fields from a number of pre-set attribute names
1990 :param cr: database cursor
1991 :param int user: user id
1992 :param dict context: connection context
1993 :returns: a calendar view
1994 :rtype: etree._Element
1996 def set_first_of(seq, in_, to):
1997 """Sets the first value of ``seq`` also found in ``in_`` to
1998 the ``to`` attribute of the view being closed over.
2000 Returns whether it's found a suitable value (and set it on
2001 the attribute) or not
2009 view = etree.Element('calendar', string=self._description)
2010 etree.SubElement(view, 'field', self._rec_name_fallback(cr, user, context))
2012 if self._date_name not in self._columns:
2014 for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
2015 if dt in self._columns:
2016 self._date_name = dt
2021 raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
2022 view.set('date_start', self._date_name)
2024 set_first_of(["user_id", "partner_id", "x_user_id", "x_partner_id"],
2025 self._columns, 'color')
2027 if not set_first_of(["date_stop", "date_end", "x_date_stop", "x_date_end"],
2028 self._columns, 'date_stop'):
2029 if not set_first_of(["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"],
2030 self._columns, 'date_delay'):
2032 _('Invalid Object Architecture!'),
2033 _("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % self._name))
2038 # if view_id, view_type is not required
2040 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
2042 Get the detailed composition of the requested view like fields, model, view architecture
2044 :param cr: database cursor
2045 :param user: current user id
2046 :param view_id: id of the view or None
2047 :param view_type: type of the view to return if view_id is None ('form', tree', ...)
2048 :param context: context arguments, like lang, time zone
2049 :param toolbar: true to include contextual actions
2050 :param submenu: deprecated
2051 :return: dictionary describing the composition of the requested view (including inherited views and extensions)
2052 :raise AttributeError:
2053 * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
2054 * if some tag other than 'position' is found in parent view
2055 :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
2062 if isinstance(s, unicode):
2063 return s.encode('utf8')
2066 def raise_view_error(error_msg, child_view_id):
2067 view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
2068 error_msg = error_msg % {'parent_xml_id': view.xml_id}
2069 raise AttributeError("View definition error for inherited view '%s' on model '%s': %s"
2070 % (child_view.xml_id, self._name, error_msg))
2072 def locate(source, spec):
2073 """ Locate a node in a source (parent) architecture.
2075 Given a complete source (parent) architecture (i.e. the field
2076 `arch` in a view), and a 'spec' node (a node in an inheriting
2077 view that specifies the location in the source view of what
2078 should be changed), return (if it exists) the node in the
2079 source view matching the specification.
2081 :param source: a parent architecture to modify
2082 :param spec: a modifying node in an inheriting view
2083 :return: a node in the source matching the spec
2086 if spec.tag == 'xpath':
2087 nodes = source.xpath(spec.get('expr'))
2088 return nodes[0] if nodes else None
2089 elif spec.tag == 'field':
2090 # Only compare the field name: a field can be only once in a given view
2091 # at a given level (and for multilevel expressions, we should use xpath
2092 # inheritance spec anyway).
2093 for node in source.getiterator('field'):
2094 if node.get('name') == spec.get('name'):
2098 for node in source.getiterator(spec.tag):
2099 if isinstance(node, SKIPPED_ELEMENT_TYPES):
2101 if all(node.get(attr) == spec.get(attr) \
2102 for attr in spec.attrib
2103 if attr not in ('position','version')):
2104 # Version spec should match parent's root element's version
2105 if spec.get('version') and spec.get('version') != source.get('version'):
2110 def apply_inheritance_specs(source, specs_arch, inherit_id=None):
2111 """ Apply an inheriting view.
2113 Apply to a source architecture all the spec nodes (i.e. nodes
2114 describing where and what changes to apply to some parent
2115 architecture) given by an inheriting view.
2117 :param source: a parent architecture to modify
2118 :param specs_arch: a modifying architecture in an inheriting view
2119 :param inherit_id: the database id of the inheriting view
2120 :return: a modified source where the specs are applied
2123 specs_tree = etree.fromstring(encode(specs_arch))
2124 # Queue of specification nodes (i.e. nodes describing where and
2125 # changes to apply to some parent architecture).
2126 specs = [specs_tree]
2130 if isinstance(spec, SKIPPED_ELEMENT_TYPES):
2132 if spec.tag == 'data':
2133 specs += [ c for c in specs_tree ]
2135 node = locate(source, spec)
2136 if node is not None:
2137 pos = spec.get('position', 'inside')
2138 if pos == 'replace':
2139 if node.getparent() is None:
2140 source = copy.deepcopy(spec[0])
2143 node.addprevious(child)
2144 node.getparent().remove(node)
2145 elif pos == 'attributes':
2146 for child in spec.getiterator('attribute'):
2147 attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
2149 node.set(attribute[0], attribute[1])
2151 del(node.attrib[attribute[0]])
2153 sib = node.getnext()
2157 elif pos == 'after':
2162 sib.addprevious(child)
2163 elif pos == 'before':
2164 node.addprevious(child)
2166 raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
2169 ' %s="%s"' % (attr, spec.get(attr))
2170 for attr in spec.attrib
2171 if attr != 'position'
2173 tag = "<%s%s>" % (spec.tag, attrs)
2174 if spec.get('version') and spec.get('version') != source.get('version'):
2175 raise_view_error("Mismatching view API version for element '%s': %r vs %r in parent view '%%(parent_xml_id)s'" % \
2176 (tag, spec.get('version'), source.get('version')), inherit_id)
2177 raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
2181 def apply_view_inheritance(cr, user, source, inherit_id):
2182 """ Apply all the (directly and indirectly) inheriting views.
2184 :param source: a parent architecture to modify (with parent
2185 modifications already applied)
2186 :param inherit_id: the database view_id of the parent view
2187 :return: a modified source where all the modifying architecture
2191 sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name)
2192 for (view_arch, view_id) in sql_inherit:
2193 source = apply_inheritance_specs(source, view_arch, view_id)
2194 source = apply_view_inheritance(cr, user, source, view_id)
2197 result = {'type': view_type, 'model': self._name}
2200 parent_view_model = None
2201 view_ref = context.get(view_type + '_view_ref')
2202 # Search for a root (i.e. without any parent) view.
2204 if view_ref and not view_id:
2206 module, view_ref = view_ref.split('.', 1)
2207 cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
2208 view_ref_res = cr.fetchone()
2210 view_id = view_ref_res[0]
2213 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2215 WHERE id=%s""", (view_id,))
2217 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2219 WHERE model=%s AND type=%s AND inherit_id IS NULL
2220 ORDER BY priority""", (self._name, view_type))
2221 sql_res = cr.dictfetchone()
2226 view_id = sql_res['inherit_id'] or sql_res['id']
2227 parent_view_model = sql_res['model']
2228 if not sql_res['inherit_id']:
2231 # if a view was found
2233 source = etree.fromstring(encode(sql_res['arch']))
2235 arch=apply_view_inheritance(cr, user, source, sql_res['id']),
2236 type=sql_res['type'],
2237 view_id=sql_res['id'],
2238 name=sql_res['name'],
2239 field_parent=sql_res['field_parent'] or False)
2241 # otherwise, build some kind of default view
2243 view = getattr(self, '_get_default_%s_view' % view_type)(
2245 except AttributeError:
2246 # what happens here, graph case?
2247 raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
2255 if parent_view_model != self._name:
2256 ctx = context.copy()
2257 ctx['base_model_name'] = parent_view_model
2260 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
2261 result['arch'] = xarch
2262 result['fields'] = xfields
2267 for key in ('report_sxw_content', 'report_rml_content',
2268 'report_sxw', 'report_rml',
2269 'report_sxw_content_data', 'report_rml_content_data'):
2273 ir_values_obj = self.pool.get('ir.values')
2274 resprint = ir_values_obj.get(cr, user, 'action',
2275 'client_print_multi', [(self._name, False)], False,
2277 resaction = ir_values_obj.get(cr, user, 'action',
2278 'client_action_multi', [(self._name, False)], False,
2281 resrelate = ir_values_obj.get(cr, user, 'action',
2282 'client_action_relate', [(self._name, False)], False,
2284 resaction = [clean(action) for action in resaction
2285 if view_type == 'tree' or not action[2].get('multi')]
2286 resprint = [clean(print_) for print_ in resprint
2287 if view_type == 'tree' or not print_[2].get('multi')]
2288 #When multi="True" set it will display only in More of the list view
2289 resrelate = [clean(action) for action in resrelate
2290 if (action[2].get('multi') and view_type == 'tree') or (not action[2].get('multi') and view_type == 'form')]
2292 for x in itertools.chain(resprint, resaction, resrelate):
2293 x['string'] = x['name']
2295 result['toolbar'] = {
2297 'action': resaction,
2302 _view_look_dom_arch = __view_look_dom_arch
2304 def search_count(self, cr, user, args, context=None):
2307 res = self.search(cr, user, args, context=context, count=True)
2308 if isinstance(res, list):
2312 def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
2314 Search for records based on a search domain.
2316 :param cr: database cursor
2317 :param user: current user id
2318 :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
2319 :param offset: optional number of results to skip in the returned values (default: 0)
2320 :param limit: optional max number of records to return (default: **None**)
2321 :param order: optional columns to sort by (default: self._order=id )
2322 :param context: optional context arguments, like lang, time zone
2323 :type context: dictionary
2324 :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
2325 :return: id or list of ids of records matching the criteria
2326 :rtype: integer or list of integers
2327 :raise AccessError: * if user tries to bypass access rules for read on the requested object.
2329 **Expressing a search domain (args)**
2331 Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
2333 * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
2334 * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
2335 The semantics of most of these operators are obvious.
2336 The ``child_of`` operator will look for records who are children or grand-children of a given record,
2337 according to the semantics of this model (i.e following the relationship field named by
2338 ``self._parent_name``, by default ``parent_id``.
2339 * **value** must be a valid value to compare with the values of **field_name**, depending on its type.
2341 Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
2342 These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
2343 Be very careful about this when you combine them the first time.
2345 Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
2347 [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
2349 The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
2351 (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
2354 return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
2356 def name_get(self, cr, user, ids, context=None):
2357 """Returns the preferred display value (text representation) for the records with the
2358 given ``ids``. By default this will be the value of the ``name`` column, unless
2359 the model implements a custom behavior.
2360 Can sometimes be seen as the inverse function of :meth:`~.name_search`, but it is not
2364 :return: list of pairs ``(id,text_repr)`` for all records with the given ``ids``.
2368 if isinstance(ids, (int, long)):
2371 if self._rec_name in self._all_columns:
2372 rec_name_column = self._all_columns[self._rec_name].column
2373 return [(r['id'], rec_name_column.as_display_name(cr, user, self, r[self._rec_name], context=context))
2374 for r in self.read(cr, user, ids, [self._rec_name],
2375 load='_classic_write', context=context)]
2376 return [(id, "%s,%s" % (self._name, id)) for id in ids]
2378 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
2379 """Search for records that have a display name matching the given ``name`` pattern if compared
2380 with the given ``operator``, while also matching the optional search domain (``args``).
2381 This is used for example to provide suggestions based on a partial value for a relational
2383 Sometimes be seen as the inverse function of :meth:`~.name_get`, but it is not
2386 This method is equivalent to calling :meth:`~.search` with a search domain based on ``name``
2387 and then :meth:`~.name_get` on the result of the search.
2389 :param list args: optional search domain (see :meth:`~.search` for syntax),
2390 specifying further restrictions
2391 :param str operator: domain operator for matching the ``name`` pattern, such as ``'like'``
2393 :param int limit: optional max number of records to return
2395 :return: list of pairs ``(id,text_repr)`` for all matching records.
2397 return self._name_search(cr, user, name, args, operator, context, limit)
2399 def name_create(self, cr, uid, name, context=None):
2400 """Creates a new record by calling :meth:`~.create` with only one
2401 value provided: the name of the new record (``_rec_name`` field).
2402 The new record will also be initialized with any default values applicable
2403 to this model, or provided through the context. The usual behavior of
2404 :meth:`~.create` applies.
2405 Similarly, this method may raise an exception if the model has multiple
2406 required fields and some do not have default values.
2408 :param name: name of the record to create
2411 :return: the :meth:`~.name_get` pair value for the newly-created record.
2413 rec_id = self.create(cr, uid, {self._rec_name: name}, context)
2414 return self.name_get(cr, uid, [rec_id], context)[0]
2416 # private implementation of name_search, allows passing a dedicated user for the name_get part to
2417 # solve some access rights issues
2418 def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
2424 # optimize out the default criterion of ``ilike ''`` that matches everything
2425 if not (name == '' and operator == 'ilike'):
2426 args += [(self._rec_name, operator, name)]
2427 access_rights_uid = name_get_uid or user
2428 ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
2429 res = self.name_get(cr, access_rights_uid, ids, context)
2432 def read_string(self, cr, uid, id, langs, fields=None, context=None):
2435 self.pool.get('ir.translation').check_access_rights(cr, uid, 'read')
2437 fields = self._columns.keys() + self._inherit_fields.keys()
2438 #FIXME: collect all calls to _get_source into one SQL call.
2440 res[lang] = {'code': lang}
2442 if f in self._columns:
2443 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
2445 res[lang][f] = res_trans
2447 res[lang][f] = self._columns[f].string
2448 for table in self._inherits:
2449 cols = intersect(self._inherit_fields.keys(), fields)
2450 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
2453 res[lang]['code'] = lang
2454 for f in res2[lang]:
2455 res[lang][f] = res2[lang][f]
2458 def write_string(self, cr, uid, id, langs, vals, context=None):
2459 self.pool.get('ir.translation').check_access_rights(cr, uid, 'write')
2460 #FIXME: try to only call the translation in one SQL
2463 if field in self._columns:
2464 src = self._columns[field].string
2465 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
2466 for table in self._inherits:
2467 cols = intersect(self._inherit_fields.keys(), vals)
2469 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
2472 def _add_missing_default_values(self, cr, uid, values, context=None):
2473 missing_defaults = []
2474 avoid_tables = [] # avoid overriding inherited values when parent is set
2475 for tables, parent_field in self._inherits.items():
2476 if parent_field in values:
2477 avoid_tables.append(tables)
2478 for field in self._columns.keys():
2479 if not field in values:
2480 missing_defaults.append(field)
2481 for field in self._inherit_fields.keys():
2482 if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
2483 missing_defaults.append(field)
2485 if len(missing_defaults):
2486 # override defaults with the provided values, never allow the other way around
2487 defaults = self.default_get(cr, uid, missing_defaults, context)
2489 if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
2490 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
2491 and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
2492 defaults[dv] = [(6, 0, defaults[dv])]
2493 if (dv in self._columns and self._columns[dv]._type == 'one2many' \
2494 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
2495 and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
2496 defaults[dv] = [(0, 0, x) for x in defaults[dv]]
2497 defaults.update(values)
2501 def clear_caches(self):
2502 """ Clear the caches
2504 This clears the caches associated to methods decorated with
2505 ``tools.ormcache`` or ``tools.ormcache_multi``.
2508 getattr(self, '_ormcache')
2510 self.pool._any_cache_cleared = True
2511 except AttributeError:
2515 def _read_group_fill_results(self, cr, uid, domain, groupby, groupby_list, aggregated_fields,
2516 read_group_result, read_group_order=None, context=None):
2517 """Helper method for filling in empty groups for all possible values of
2518 the field being grouped by"""
2520 # self._group_by_full should map groupable fields to a method that returns
2521 # a list of all aggregated values that we want to display for this field,
2522 # in the form of a m2o-like pair (key,label).
2523 # This is useful to implement kanban views for instance, where all columns
2524 # should be displayed even if they don't contain any record.
2526 # Grab the list of all groups that should be displayed, including all present groups
2527 present_group_ids = [x[groupby][0] for x in read_group_result if x[groupby]]
2528 all_groups,folded = self._group_by_full[groupby](self, cr, uid, present_group_ids, domain,
2529 read_group_order=read_group_order,
2530 access_rights_uid=openerp.SUPERUSER_ID,
2533 result_template = dict.fromkeys(aggregated_fields, False)
2534 result_template[groupby + '_count'] = 0
2535 if groupby_list and len(groupby_list) > 1:
2536 result_template['__context'] = {'group_by': groupby_list[1:]}
2538 # Merge the left_side (current results as dicts) with the right_side (all
2539 # possible values as m2o pairs). Both lists are supposed to be using the
2540 # same ordering, and can be merged in one pass.
2543 def append_left(left_side):
2544 grouped_value = left_side[groupby] and left_side[groupby][0]
2545 if not grouped_value in known_values:
2546 result.append(left_side)
2547 known_values[grouped_value] = left_side
2549 count_attr = groupby + '_count'
2550 known_values[grouped_value].update({count_attr: left_side[count_attr]})
2551 def append_right(right_side):
2552 grouped_value = right_side[0]
2553 if not grouped_value in known_values:
2554 line = dict(result_template)
2555 line[groupby] = right_side
2556 line['__domain'] = [(groupby,'=',grouped_value)] + domain
2558 known_values[grouped_value] = line
2559 while read_group_result or all_groups:
2560 left_side = read_group_result[0] if read_group_result else None
2561 right_side = all_groups[0] if all_groups else None
2562 assert left_side is None or left_side[groupby] is False \
2563 or isinstance(left_side[groupby], (tuple,list)), \
2564 'M2O-like pair expected, got %r' % left_side[groupby]
2565 assert right_side is None or isinstance(right_side, (tuple,list)), \
2566 'M2O-like pair expected, got %r' % right_side
2567 if left_side is None:
2568 append_right(all_groups.pop(0))
2569 elif right_side is None:
2570 append_left(read_group_result.pop(0))
2571 elif left_side[groupby] == right_side:
2572 append_left(read_group_result.pop(0))
2573 all_groups.pop(0) # discard right_side
2574 elif not left_side[groupby] or not left_side[groupby][0]:
2575 # left side == "Undefined" entry, not present on right_side
2576 append_left(read_group_result.pop(0))
2578 append_right(all_groups.pop(0))
2582 r['__fold'] = folded.get(r[groupby] and r[groupby][0], False)
2585 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
2587 Get the list of records in list view grouped by the given ``groupby`` fields
2589 :param cr: database cursor
2590 :param uid: current user id
2591 :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
2592 :param list fields: list of fields present in the list view specified on the object
2593 :param list groupby: fields by which the records will be grouped
2594 :param int offset: optional number of records to skip
2595 :param int limit: optional max number of records to return
2596 :param dict context: context arguments, like lang, time zone
2597 :param list orderby: optional ``order by`` specification, for
2598 overriding the natural sort ordering of the
2599 groups, see also :py:meth:`~osv.osv.osv.search`
2600 (supported only for many2one fields currently)
2601 :return: list of dictionaries(one dictionary for each record) containing:
2603 * the values of fields grouped by the fields in ``groupby`` argument
2604 * __domain: list of tuples specifying the search criteria
2605 * __context: dictionary with argument like ``groupby``
2606 :rtype: [{'field_name_1': value, ...]
2607 :raise AccessError: * if user has no read rights on the requested object
2608 * if user tries to bypass access rules for read on the requested object
2611 context = context or {}
2612 self.check_access_rights(cr, uid, 'read')
2614 fields = self._columns.keys()
2616 query = self._where_calc(cr, uid, domain, context=context)
2617 self._apply_ir_rules(cr, uid, query, 'read', context=context)
2619 # Take care of adding join(s) if groupby is an '_inherits'ed field
2620 groupby_list = groupby
2621 qualified_groupby_field = groupby
2623 if isinstance(groupby, list):
2624 groupby = groupby[0]
2625 qualified_groupby_field = self._inherits_join_calc(groupby, query)
2628 assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
2629 groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
2630 assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
2632 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
2633 fget = self.fields_get(cr, uid, fields)
2635 group_count = group_by = groupby
2637 if fget.get(groupby):
2638 groupby_type = fget[groupby]['type']
2639 if groupby_type in ('date', 'datetime'):
2640 qualified_groupby_field = "to_char(%s,'yyyy-mm')" % qualified_groupby_field
2641 flist = "%s as %s " % (qualified_groupby_field, groupby)
2642 elif groupby_type == 'boolean':
2643 qualified_groupby_field = "coalesce(%s,false)" % qualified_groupby_field
2644 flist = "%s as %s " % (qualified_groupby_field, groupby)
2646 flist = qualified_groupby_field
2648 # Don't allow arbitrary values, as this would be a SQL injection vector!
2649 raise except_orm(_('Invalid group_by'),
2650 _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
2652 aggregated_fields = [
2654 if f not in ('id', 'sequence')
2655 if fget[f]['type'] in ('integer', 'float')
2656 if (f in self._columns and getattr(self._columns[f], '_classic_write'))]
2657 for f in aggregated_fields:
2658 group_operator = fget[f].get('group_operator', 'sum')
2661 qualified_field = '"%s"."%s"' % (self._table, f)
2662 flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
2664 gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
2666 from_clause, where_clause, where_clause_params = query.get_sql()
2667 where_clause = where_clause and ' WHERE ' + where_clause
2668 limit_str = limit and ' limit %d' % limit or ''
2669 offset_str = offset and ' offset %d' % offset or ''
2670 if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
2672 cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
2675 for r in cr.dictfetchall():
2676 for fld, val in r.items():
2677 if val is None: r[fld] = False
2678 alldata[r['id']] = r
2681 order = orderby or groupby
2682 data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=order, context=context)
2684 # the IDs of records that have groupby field value = False or '' should be included too
2685 data_ids += set(alldata.keys()).difference(data_ids)
2688 data = self.read(cr, uid, data_ids, [groupby], context=context)
2689 # restore order of the search as read() uses the default _order (this is only for groups, so the footprint of data should be small):
2690 data_dict = dict((d['id'], d[groupby] ) for d in data)
2691 result = [{'id': i, groupby: data_dict[i]} for i in data_ids]
2693 result = [{'id': i} for i in data_ids]
2697 d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
2698 if not isinstance(groupby_list, (str, unicode)):
2699 if groupby or not context.get('group_by_no_leaf', False):
2700 d['__context'] = {'group_by': groupby_list[1:]}
2701 if groupby and groupby in fget:
2702 if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
2703 dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7], '%Y-%m')
2704 days = calendar.monthrange(dt.year, dt.month)[1]
2706 date_value = datetime.datetime.strptime(d[groupby][:10], '%Y-%m-%d')
2707 d[groupby] = babel.dates.format_date(
2708 date_value, format='MMMM yyyy', locale=context.get('lang', 'en_US'))
2709 d['__domain'] = [(groupby, '>=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01', '%Y-%m-%d').strftime('%Y-%m-%d') or False),\
2710 (groupby, '<=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days), '%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
2711 del alldata[d['id']][groupby]
2712 d.update(alldata[d['id']])
2715 if groupby and groupby in self._group_by_full:
2716 result = self._read_group_fill_results(cr, uid, domain, groupby, groupby_list,
2717 aggregated_fields, result, read_group_order=order,
2722 def _inherits_join_add(self, current_model, parent_model_name, query):
2724 Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
2725 :param current_model: current model object
2726 :param parent_model_name: name of the parent model for which the clauses should be added
2727 :param query: query object on which the JOIN should be added
2729 inherits_field = current_model._inherits[parent_model_name]
2730 parent_model = self.pool.get(parent_model_name)
2731 parent_alias, parent_alias_statement = query.add_join((current_model._table, parent_model._table, inherits_field, 'id', inherits_field), implicit=True)
2734 def _inherits_join_calc(self, field, query):
2736 Adds missing table select and join clause(s) to ``query`` for reaching
2737 the field coming from an '_inherits' parent table (no duplicates).
2739 :param field: name of inherited field to reach
2740 :param query: query object on which the JOIN should be added
2741 :return: qualified name of field, to be used in SELECT clause
2743 current_table = self
2744 parent_alias = '"%s"' % current_table._table
2745 while field in current_table._inherit_fields and not field in current_table._columns:
2746 parent_model_name = current_table._inherit_fields[field][0]
2747 parent_table = self.pool.get(parent_model_name)
2748 parent_alias = self._inherits_join_add(current_table, parent_model_name, query)
2749 current_table = parent_table
2750 return '%s."%s"' % (parent_alias, field)
2752 def _parent_store_compute(self, cr):
2753 if not self._parent_store:
2755 _logger.info('Computing parent left and right for table %s...', self._table)
2756 def browse_rec(root, pos=0):
2758 where = self._parent_name+'='+str(root)
2760 where = self._parent_name+' IS NULL'
2761 if self._parent_order:
2762 where += ' order by '+self._parent_order
2763 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
2765 for id in cr.fetchall():
2766 pos2 = browse_rec(id[0], pos2)
2767 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
2769 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
2770 if self._parent_order:
2771 query += ' order by ' + self._parent_order
2774 for (root,) in cr.fetchall():
2775 pos = browse_rec(root, pos)
2778 def _update_store(self, cr, f, k):
2779 _logger.info("storing computed values of fields.function '%s'", k)
2780 ss = self._columns[k]._symbol_set
2781 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
2782 cr.execute('select id from '+self._table)
2783 ids_lst = map(lambda x: x[0], cr.fetchall())
2786 ids_lst = ids_lst[40:]
2787 res = f.get(cr, self, iids, k, SUPERUSER_ID, {})
2788 for key, val in res.items():
2791 # if val is a many2one, just write the ID
2792 if type(val) == tuple:
2794 if val is not False:
2795 cr.execute(update_query, (ss[1](val), key))
2797 def _check_selection_field_value(self, cr, uid, field, value, context=None):
2798 """Raise except_orm if value is not among the valid values for the selection field"""
2799 if self._columns[field]._type == 'reference':
2800 val_model, val_id_str = value.split(',', 1)
2803 val_id = long(val_id_str)
2807 raise except_orm(_('ValidateError'),
2808 _('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
2812 if isinstance(self._columns[field].selection, (tuple, list)):
2813 if val in dict(self._columns[field].selection):
2815 elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
2817 raise except_orm(_('ValidateError'),
2818 _('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._table, field))
2820 def _check_removed_columns(self, cr, log=False):
2821 # iterate on the database columns to drop the NOT NULL constraints
2822 # of fields which were required but have been removed (or will be added by another module)
2823 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
2824 columns += MAGIC_COLUMNS
2825 cr.execute("SELECT a.attname, a.attnotnull"
2826 " FROM pg_class c, pg_attribute a"
2827 " WHERE c.relname=%s"
2828 " AND c.oid=a.attrelid"
2829 " AND a.attisdropped=%s"
2830 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
2831 " AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
2833 for column in cr.dictfetchall():
2835 _logger.debug("column %s is in the table %s but not in the corresponding object %s",
2836 column['attname'], self._table, self._name)
2837 if column['attnotnull']:
2838 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
2839 _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2840 self._table, column['attname'])
2842 def _save_constraint(self, cr, constraint_name, type):
2844 Record the creation of a constraint for this model, to make it possible
2845 to delete it later when the module is uninstalled. Type can be either
2846 'f' or 'u' depending on the constraing being a foreign key or not.
2848 assert type in ('f', 'u')
2850 SELECT 1 FROM ir_model_constraint, ir_module_module
2851 WHERE ir_model_constraint.module=ir_module_module.id
2852 AND ir_model_constraint.name=%s
2853 AND ir_module_module.name=%s
2854 """, (constraint_name, self._module))
2857 INSERT INTO ir_model_constraint
2858 (name, date_init, date_update, module, model, type)
2859 VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
2860 (SELECT id FROM ir_module_module WHERE name=%s),
2861 (SELECT id FROM ir_model WHERE model=%s), %s)""",
2862 (constraint_name, self._module, self._name, type))
2864 def _save_relation_table(self, cr, relation_table):
2866 Record the creation of a many2many for this model, to make it possible
2867 to delete it later when the module is uninstalled.
2870 SELECT 1 FROM ir_model_relation, ir_module_module
2871 WHERE ir_model_relation.module=ir_module_module.id
2872 AND ir_model_relation.name=%s
2873 AND ir_module_module.name=%s
2874 """, (relation_table, self._module))
2876 cr.execute("""INSERT INTO ir_model_relation (name, date_init, date_update, module, model)
2877 VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
2878 (SELECT id FROM ir_module_module WHERE name=%s),
2879 (SELECT id FROM ir_model WHERE model=%s))""",
2880 (relation_table, self._module, self._name))
2882 # checked version: for direct m2o starting from `self`
2883 def _m2o_add_foreign_key_checked(self, source_field, dest_model, ondelete):
2884 assert self.is_transient() or not dest_model.is_transient(), \
2885 'Many2One relationships from non-transient Model to TransientModel are forbidden'
2886 if self.is_transient() and not dest_model.is_transient():
2887 # TransientModel relationships to regular Models are annoying
2888 # usually because they could block deletion due to the FKs.
2889 # So unless stated otherwise we default them to ondelete=cascade.
2890 ondelete = ondelete or 'cascade'
2891 fk_def = (self._table, source_field, dest_model._table, ondelete or 'set null')
2892 self._foreign_keys.add(fk_def)
2893 _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
2895 # unchecked version: for custom cases, such as m2m relationships
2896 def _m2o_add_foreign_key_unchecked(self, source_table, source_field, dest_model, ondelete):
2897 fk_def = (source_table, source_field, dest_model._table, ondelete or 'set null')
2898 self._foreign_keys.add(fk_def)
2899 _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
2901 def _drop_constraint(self, cr, source_table, constraint_name):
2902 cr.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (source_table,constraint_name))
2904 def _m2o_fix_foreign_key(self, cr, source_table, source_field, dest_model, ondelete):
2905 # Find FK constraint(s) currently established for the m2o field,
2906 # and see whether they are stale or not
2907 cr.execute("""SELECT confdeltype as ondelete_rule, conname as constraint_name,
2908 cl2.relname as foreign_table
2909 FROM pg_constraint as con, pg_class as cl1, pg_class as cl2,
2910 pg_attribute as att1, pg_attribute as att2
2911 WHERE con.conrelid = cl1.oid
2912 AND cl1.relname = %s
2913 AND con.confrelid = cl2.oid
2914 AND array_lower(con.conkey, 1) = 1
2915 AND con.conkey[1] = att1.attnum
2916 AND att1.attrelid = cl1.oid
2917 AND att1.attname = %s
2918 AND array_lower(con.confkey, 1) = 1
2919 AND con.confkey[1] = att2.attnum
2920 AND att2.attrelid = cl2.oid
2921 AND att2.attname = %s
2922 AND con.contype = 'f'""", (source_table, source_field, 'id'))
2923 constraints = cr.dictfetchall()
2925 if len(constraints) == 1:
2926 # Is it the right constraint?
2928 if cons['ondelete_rule'] != POSTGRES_CONFDELTYPES.get((ondelete or 'set null').upper(), 'a')\
2929 or cons['foreign_table'] != dest_model._table:
2930 # Wrong FK: drop it and recreate
2931 _schema.debug("Table '%s': dropping obsolete FK constraint: '%s'",
2932 source_table, cons['constraint_name'])
2933 self._drop_constraint(cr, source_table, cons['constraint_name'])
2935 # it's all good, nothing to do!
2938 # Multiple FKs found for the same field, drop them all, and re-create
2939 for cons in constraints:
2940 _schema.debug("Table '%s': dropping duplicate FK constraints: '%s'",
2941 source_table, cons['constraint_name'])
2942 self._drop_constraint(cr, source_table, cons['constraint_name'])
2944 # (re-)create the FK
2945 self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete)
2949 def _auto_init(self, cr, context=None):
2952 Call _field_create and, unless _auto is False:
2954 - create the corresponding table in database for the model,
2955 - possibly add the parent columns in database,
2956 - possibly add the columns 'create_uid', 'create_date', 'write_uid',
2957 'write_date' in database if _log_access is True (the default),
2958 - report on database columns no more existing in _columns,
2959 - remove no more existing not null constraints,
2960 - alter existing database columns to match _columns,
2961 - create database tables to match _columns,
2962 - add database indices to match _columns,
2963 - save in self._foreign_keys a list a foreign keys to create (see
2967 self._foreign_keys = set()
2968 raise_on_invalid_object_name(self._name)
2971 store_compute = False
2973 update_custom_fields = context.get('update_custom_fields', False)
2974 self._field_create(cr, context=context)
2975 create = not self._table_exist(cr)
2976 if getattr(self, '_auto', True):
2979 self._create_table(cr)
2982 if self._parent_store:
2983 if not self._parent_columns_exist(cr):
2984 self._create_parent_columns(cr)
2985 store_compute = True
2987 # Create the create_uid, create_date, write_uid, write_date, columns if desired.
2988 if self._log_access:
2989 self._add_log_columns(cr)
2991 self._check_removed_columns(cr, log=False)
2993 # iterate on the "object columns"
2994 column_data = self._select_column_data(cr)
2996 for k, f in self._columns.iteritems():
2997 if k in MAGIC_COLUMNS:
2999 # Don't update custom (also called manual) fields
3000 if f.manual and not update_custom_fields:
3003 if isinstance(f, fields.one2many):
3004 self._o2m_raise_on_missing_reference(cr, f)
3006 elif isinstance(f, fields.many2many):
3007 self._m2m_raise_or_create_relation(cr, f)
3010 res = column_data.get(k)
3012 # The field is not found as-is in database, try if it
3013 # exists with an old name.
3014 if not res and hasattr(f, 'oldname'):
3015 res = column_data.get(f.oldname)
3017 cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
3019 column_data[k] = res
3020 _schema.debug("Table '%s': renamed column '%s' to '%s'",
3021 self._table, f.oldname, k)
3023 # The field already exists in database. Possibly
3024 # change its type, rename it, drop it or change its
3027 f_pg_type = res['typname']
3028 f_pg_size = res['size']
3029 f_pg_notnull = res['attnotnull']
3030 if isinstance(f, fields.function) and not f.store and\
3031 not getattr(f, 'nodrop', False):
3032 _logger.info('column %s (%s) in table %s removed: converted to a function !\n',
3033 k, f.string, self._table)
3034 cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
3036 _schema.debug("Table '%s': dropped column '%s' with cascade",
3040 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
3045 ('text', 'char', pg_varchar(f.size), '::%s' % pg_varchar(f.size)),
3046 ('varchar', 'text', 'TEXT', ''),
3047 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3048 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
3049 ('timestamp', 'date', 'date', '::date'),
3050 ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3051 ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3053 if f_pg_type == 'varchar' and f._type == 'char' and ((f.size is None and f_pg_size) or f_pg_size < f.size):
3054 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
3055 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, pg_varchar(f.size)))
3056 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::%s' % (self._table, k, pg_varchar(f.size)))
3057 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
3059 _schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
3060 self._table, k, f_pg_size or 'unlimited', f.size or 'unlimited')
3062 if (f_pg_type==c[0]) and (f._type==c[1]):
3063 if f_pg_type != f_obj_type:
3065 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
3066 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
3067 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
3068 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
3070 _schema.debug("Table '%s': column '%s' changed type from %s to %s",
3071 self._table, k, c[0], c[1])
3074 if f_pg_type != f_obj_type:
3078 newname = k + '_moved' + str(i)
3079 cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
3080 "WHERE c.relname=%s " \
3081 "AND a.attname=%s " \
3082 "AND c.oid=a.attrelid ", (self._table, newname))
3083 if not cr.fetchone()[0]:
3087 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
3088 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
3089 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
3090 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
3091 _schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
3092 self._table, k, f_pg_type, f._type, newname)
3094 # if the field is required and hasn't got a NOT NULL constraint
3095 if f.required and f_pg_notnull == 0:
3096 # set the field to the default value if any
3097 if k in self._defaults:
3098 if callable(self._defaults[k]):
3099 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
3101 default = self._defaults[k]
3103 if default is not None:
3104 ss = self._columns[k]._symbol_set
3105 query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
3106 cr.execute(query, (ss[1](default),))
3107 # add the NOT NULL constraint
3110 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
3112 _schema.debug("Table '%s': column '%s': added NOT NULL constraint",
3115 msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
3116 "If you want to have it, you should update the records and execute manually:\n"\
3117 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
3118 _schema.warning(msg, self._table, k, self._table, k)
3120 elif not f.required and f_pg_notnull == 1:
3121 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
3123 _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
3126 indexname = '%s_%s_index' % (self._table, k)
3127 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
3128 res2 = cr.dictfetchall()
3129 if not res2 and f.select:
3130 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
3132 if f._type == 'text':
3133 # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
3134 msg = "Table '%s': Adding (b-tree) index for %s column '%s'."\
3135 "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
3136 " because there is a length limit for indexable btree values!\n"\
3137 "Use a search view instead if you simply want to make the field searchable."
3138 _schema.warning(msg, self._table, f._type, k)
3139 if res2 and not f.select:
3140 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
3142 msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
3143 _schema.debug(msg, self._table, k, f._type)
3145 if isinstance(f, fields.many2one):
3146 dest_model = self.pool.get(f._obj)
3147 if dest_model._table != 'ir_actions':
3148 self._m2o_fix_foreign_key(cr, self._table, k, dest_model, f.ondelete)
3150 # The field doesn't exist in database. Create it if necessary.
3152 if not isinstance(f, fields.function) or f.store:
3153 # add the missing field
3154 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
3155 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
3156 _schema.debug("Table '%s': added column '%s' with definition=%s",
3157 self._table, k, get_pg_type(f)[1])
3160 if not create and k in self._defaults:
3161 if callable(self._defaults[k]):
3162 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
3164 default = self._defaults[k]
3166 ss = self._columns[k]._symbol_set
3167 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
3168 cr.execute(query, (ss[1](default),))
3170 _logger.debug("Table '%s': setting default value of new column %s", self._table, k)
3172 # remember the functions to call for the stored fields
3173 if isinstance(f, fields.function):
3175 if f.store is not True: # i.e. if f.store is a dict
3176 order = f.store[f.store.keys()[0]][2]
3177 todo_end.append((order, self._update_store, (f, k)))
3179 # and add constraints if needed
3180 if isinstance(f, fields.many2one):
3181 if not self.pool.get(f._obj):
3182 raise except_orm('Programming Error', 'There is no reference available for %s' % (f._obj,))
3183 dest_model = self.pool.get(f._obj)
3184 ref = dest_model._table
3185 # ir_actions is inherited so foreign key doesn't work on it
3186 if ref != 'ir_actions':
3187 self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete)
3189 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
3193 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
3194 _schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
3197 msg = "WARNING: unable to set column %s of table %s not null !\n"\
3198 "Try to re-run: openerp-server --update=module\n"\
3199 "If it doesn't work, update records and execute manually:\n"\
3200 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
3201 _logger.warning(msg, k, self._table, self._table, k)
3205 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
3206 create = not bool(cr.fetchone())
3208 cr.commit() # start a new transaction
3210 self._add_sql_constraints(cr)
3213 self._execute_sql(cr)
3216 self._parent_store_compute(cr)
3221 def _auto_end(self, cr, context=None):
3222 """ Create the foreign keys recorded by _auto_init. """
3223 for t, k, r, d in self._foreign_keys:
3224 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
3225 self._save_constraint(cr, "%s_%s_fkey" % (t, k), 'f')
3227 del self._foreign_keys
3230 def _table_exist(self, cr):
3231 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
3235 def _create_table(self, cr):
3236 cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id))' % (self._table,))
3237 cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,))
3238 _schema.debug("Table '%s': created", self._table)
3241 def _parent_columns_exist(self, cr):
3242 cr.execute("""SELECT c.relname
3243 FROM pg_class c, pg_attribute a
3244 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
3245 """, (self._table, 'parent_left'))
3249 def _create_parent_columns(self, cr):
3250 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
3251 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
3252 if 'parent_left' not in self._columns:
3253 _logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
3255 _schema.debug("Table '%s': added column '%s' with definition=%s",
3256 self._table, 'parent_left', 'INTEGER')
3257 elif not self._columns['parent_left'].select:
3258 _logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
3260 if 'parent_right' not in self._columns:
3261 _logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
3263 _schema.debug("Table '%s': added column '%s' with definition=%s",
3264 self._table, 'parent_right', 'INTEGER')
3265 elif not self._columns['parent_right'].select:
3266 _logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
3268 if self._columns[self._parent_name].ondelete not in ('cascade', 'restrict'):
3269 _logger.error("The column %s on object %s must be set as ondelete='cascade' or 'restrict'",
3270 self._parent_name, self._name)
3275 def _add_log_columns(self, cr):
3276 for field, field_def in LOG_ACCESS_COLUMNS.iteritems():
3279 FROM pg_class c, pg_attribute a
3280 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
3281 """, (self._table, field))
3283 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, field, field_def))
3285 _schema.debug("Table '%s': added column '%s' with definition=%s",
3286 self._table, field, field_def)
3289 def _select_column_data(self, cr):
3290 # attlen is the number of bytes necessary to represent the type when
3291 # the type has a fixed size. If the type has a varying size attlen is
3292 # -1 and atttypmod is the size limit + 4, or -1 if there is no limit.
3293 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN (CASE WHEN a.atttypmod=-1 THEN 0 ELSE a.atttypmod-4 END) ELSE a.attlen END as size " \
3294 "FROM pg_class c,pg_attribute a,pg_type t " \
3295 "WHERE c.relname=%s " \
3296 "AND c.oid=a.attrelid " \
3297 "AND a.atttypid=t.oid", (self._table,))
3298 return dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
3301 def _o2m_raise_on_missing_reference(self, cr, f):
3302 # TODO this check should be a method on fields.one2many.
3304 other = self.pool.get(f._obj)
3306 # TODO the condition could use fields_get_keys().
3307 if f._fields_id not in other._columns.keys():
3308 if f._fields_id not in other._inherit_fields.keys():
3309 raise except_orm('Programming Error', "There is no reference field '%s' found for '%s'" % (f._fields_id, f._obj,))
3311 def _m2m_raise_or_create_relation(self, cr, f):
3312 m2m_tbl, col1, col2 = f._sql_names(self)
3313 self._save_relation_table(cr, m2m_tbl)
3314 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (m2m_tbl,))
3315 if not cr.dictfetchall():
3316 if not self.pool.get(f._obj):
3317 raise except_orm('Programming Error', 'Many2Many destination model does not exist: `%s`' % (f._obj,))
3318 dest_model = self.pool.get(f._obj)
3319 ref = dest_model._table
3320 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s"))' % (m2m_tbl, col1, col2, col1, col2))
3321 # create foreign key references with ondelete=cascade, unless the targets are SQL views
3322 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (ref,))
3323 if not cr.fetchall():
3324 self._m2o_add_foreign_key_unchecked(m2m_tbl, col2, dest_model, 'cascade')
3325 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (self._table,))
3326 if not cr.fetchall():
3327 self._m2o_add_foreign_key_unchecked(m2m_tbl, col1, self, 'cascade')
3329 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col1, m2m_tbl, col1))
3330 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col2, m2m_tbl, col2))
3331 cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (m2m_tbl, self._table, ref))
3333 _schema.debug("Create table '%s': m2m relation between '%s' and '%s'", m2m_tbl, self._table, ref)
3336 def _add_sql_constraints(self, cr):
3339 Modify this model's database table constraints so they match the one in
3343 def unify_cons_text(txt):
3344 return txt.lower().replace(', ',',').replace(' (','(')
3346 for (key, con, _) in self._sql_constraints:
3347 conname = '%s_%s' % (self._table, key)
3349 self._save_constraint(cr, conname, 'u')
3350 cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
3351 existing_constraints = cr.dictfetchall()
3355 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
3356 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
3357 self._table, conname, con),
3358 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
3363 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
3364 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
3365 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
3371 if not existing_constraints:
3372 # constraint does not exists:
3373 sql_actions['add']['execute'] = True
3374 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3375 elif unify_cons_text(con) not in [unify_cons_text(item['condef']) for item in existing_constraints]:
3376 # constraint exists but its definition has changed:
3377 sql_actions['drop']['execute'] = True
3378 sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
3379 sql_actions['add']['execute'] = True
3380 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3382 # we need to add the constraint:
3383 sql_actions = [item for item in sql_actions.values()]
3384 sql_actions.sort(key=lambda x: x['order'])
3385 for sql_action in [action for action in sql_actions if action['execute']]:
3387 cr.execute(sql_action['query'])
3389 _schema.debug(sql_action['msg_ok'])
3391 _schema.warning(sql_action['msg_err'])
3395 def _execute_sql(self, cr):
3396 """ Execute the SQL code from the _sql attribute (if any)."""
3397 if hasattr(self, "_sql"):
3398 for line in self._sql.split(';'):
3399 line2 = line.replace('\n', '').strip()
3405 # Update objects that uses this one to update their _inherits fields
3408 def _inherits_reload_src(self):
3409 """ Recompute the _inherit_fields mapping on each _inherits'd child model."""
3410 for obj in self.pool.models.values():
3411 if self._name in obj._inherits:
3412 obj._inherits_reload()
3415 def _inherits_reload(self):
3416 """ Recompute the _inherit_fields mapping.
3418 This will also call itself on each inherits'd child model.
3422 for table in self._inherits:
3423 other = self.pool.get(table)
3424 for col in other._columns.keys():
3425 res[col] = (table, self._inherits[table], other._columns[col], table)
3426 for col in other._inherit_fields.keys():
3427 res[col] = (table, self._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
3428 self._inherit_fields = res
3429 self._all_columns = self._get_column_infos()
3430 self._inherits_reload_src()
3433 def _get_column_infos(self):
3434 """Returns a dict mapping all fields names (direct fields and
3435 inherited field via _inherits) to a ``column_info`` struct
3436 giving detailed columns """
3438 for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems():
3439 result[k] = fields.column_info(k, col, parent, m2o, original_parent)
3440 for k, col in self._columns.iteritems():
3441 result[k] = fields.column_info(k, col)
3445 def _inherits_check(self):
3446 for table, field_name in self._inherits.items():
3447 if field_name not in self._columns:
3448 _logger.info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.', field_name, self._name)
3449 self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
3450 required=True, ondelete="cascade")
3451 elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() not in ("cascade", "restrict"):
3452 _logger.warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade" or "restrict", forcing it to required + cascade.', field_name, self._name)
3453 self._columns[field_name].required = True
3454 self._columns[field_name].ondelete = "cascade"
3456 #def __getattr__(self, name):
3458 # Proxies attribute accesses to the `inherits` parent so we can call methods defined on the inherited parent
3459 # (though inherits doesn't use Python inheritance).
3460 # Handles translating between local ids and remote ids.
3461 # Known issue: doesn't work correctly when using python's own super(), don't involve inherit-based inheritance
3462 # when you have inherits.
3464 # for model, field in self._inherits.iteritems():
3465 # proxy = self.pool.get(model)
3466 # if hasattr(proxy, name):
3467 # attribute = getattr(proxy, name)
3468 # if not hasattr(attribute, '__call__'):
3472 # return super(orm, self).__getattr__(name)
3474 # def _proxy(cr, uid, ids, *args, **kwargs):
3475 # objects = self.browse(cr, uid, ids, kwargs.get('context', None))
3476 # lst = [obj[field].id for obj in objects if obj[field]]
3477 # return getattr(proxy, name)(cr, uid, lst, *args, **kwargs)
3482 def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
3483 """ Return the definition of each field.
3485 The returned value is a dictionary (indiced by field name) of
3486 dictionaries. The _inherits'd fields are included. The string, help,
3487 and selection (if present) attributes are translated.
3489 :param cr: database cursor
3490 :param user: current user id
3491 :param allfields: list of fields
3492 :param context: context arguments, like lang, time zone
3493 :return: dictionary of field dictionaries, each one describing a field of the business object
3494 :raise AccessError: * if user has no create/write rights on the requested object
3500 write_access = self.check_access_rights(cr, user, 'write', raise_exception=False) \
3501 or self.check_access_rights(cr, user, 'create', raise_exception=False)
3505 translation_obj = self.pool.get('ir.translation')
3506 for parent in self._inherits:
3507 res.update(self.pool.get(parent).fields_get(cr, user, allfields, context))
3509 for f, field in self._columns.iteritems():
3510 if (allfields and f not in allfields) or \
3511 (field.groups and not self.user_has_groups(cr, user, groups=field.groups, context=context)):
3514 res[f] = fields.field_to_dict(self, cr, user, field, context=context)
3516 if not write_access:
3517 res[f]['readonly'] = True
3518 res[f]['states'] = {}
3520 if 'lang' in context:
3521 if 'string' in res[f]:
3522 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context['lang'])
3524 res[f]['string'] = res_trans
3525 if 'help' in res[f]:
3526 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context['lang'])
3528 res[f]['help'] = help_trans
3529 if 'selection' in res[f]:
3530 if isinstance(field.selection, (tuple, list)):
3531 sel = field.selection
3533 for key, val in sel:
3536 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context['lang'], val)
3537 sel2.append((key, val2 or val))
3538 res[f]['selection'] = sel2
3542 def check_field_access_rights(self, cr, user, operation, fields, context=None):
3544 Check the user access rights on the given fields. This raises Access
3545 Denied if the user does not have the rights. Otherwise it returns the
3546 fields (as is if the fields is not falsy, or the readable/writable
3547 fields if fields is falsy).
3550 """Predicate to test if the user has access to the given field name."""
3551 # Ignore requested field if it doesn't exist. This is ugly but
3552 # it seems to happen at least with 'name_alias' on res.partner.
3553 if field_name not in self._all_columns:
3555 field = self._all_columns[field_name].column
3556 if user != SUPERUSER_ID and field.groups:
3557 return self.user_has_groups(cr, user, groups=field.groups, context=context)
3561 fields = filter(p, self._all_columns.keys())
3563 filtered_fields = filter(lambda a: not p(a), fields)
3565 _logger.warning('Access Denied by ACLs for operation: %s, uid: %s, model: %s, fields: %s', operation, user, self._name, ', '.join(filtered_fields))
3568 _('The requested operation cannot be completed due to security restrictions. '
3569 'Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
3570 (self._description, operation))
3573 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
3574 """ Read records with given ids with the given fields
3576 :param cr: database cursor
3577 :param user: current user id
3578 :param ids: id or list of the ids of the records to read
3579 :param fields: optional list of field names to return (default: all fields would be returned)
3580 :type fields: list (example ['field_name_1', ...])
3581 :param context: optional context dictionary - it may contains keys for specifying certain options
3582 like ``context_lang``, ``context_tz`` to alter the results of the call.
3583 A special ``bin_size`` boolean flag may also be passed in the context to request the
3584 value of all fields.binary columns to be returned as the size of the binary instead of its
3585 contents. This can also be selectively overriden by passing a field-specific flag
3586 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
3587 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
3588 :return: list of dictionaries((dictionary per record asked)) with requested field values
3589 :rtype: [{‘name_of_the_field’: value, ...}, ...]
3590 :raise AccessError: * if user has no read rights on the requested object
3591 * if user tries to bypass access rules for read on the requested object
3597 self.check_access_rights(cr, user, 'read')
3598 fields = self.check_field_access_rights(cr, user, 'read', fields)
3599 if isinstance(ids, (int, long)):
3603 select = map(lambda x: isinstance(x, dict) and x['id'] or x, select)
3604 result = self._read_flat(cr, user, select, fields, context, load)
3607 for key, v in r.items():
3611 if isinstance(ids, (int, long, dict)):
3612 return result and result[0] or False
3615 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
3620 if fields_to_read is None:
3621 fields_to_read = self._columns.keys()
3623 # Construct a clause for the security rules.
3624 # 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses,
3625 # or will at least contain self._table.
3626 rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
3628 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
3629 fields_pre = [f for f in fields_to_read if
3630 f == self.CONCURRENCY_CHECK_FIELD
3631 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
3632 ] + self._inherits.values()
3636 def convert_field(f):
3637 f_qual = '%s."%s"' % (self._table, f) # need fully-qualified references in case len(tables) > 1
3638 if f in ('create_date', 'write_date'):
3639 return "date_trunc('second', %s) as %s" % (f_qual, f)
3640 if f == self.CONCURRENCY_CHECK_FIELD:
3641 if self._log_access:
3642 return "COALESCE(%s.write_date, %s.create_date, (now() at time zone 'UTC'))::timestamp AS %s" % (self._table, self._table, f,)
3643 return "(now() at time zone 'UTC')::timestamp AS %s" % (f,)
3644 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
3645 return 'length(%s) as "%s"' % (f_qual, f)
3648 fields_pre2 = map(convert_field, fields_pre)
3649 order_by = self._parent_order or self._order
3650 select_fields = ','.join(fields_pre2 + ['%s.id' % self._table])
3651 query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table)
3653 query += " AND " + (' OR '.join(rule_clause))
3654 query += " ORDER BY " + order_by
3655 for sub_ids in cr.split_for_in_conditions(ids):
3656 cr.execute(query, [tuple(sub_ids)] + rule_params)
3657 results = cr.dictfetchall()
3658 result_ids = [x['id'] for x in results]
3659 self._check_record_rules_result_count(cr, user, sub_ids, result_ids, 'read', context=context)
3662 res = map(lambda x: {'id': x}, ids)
3664 if context.get('lang'):
3665 for f in fields_pre:
3666 if f == self.CONCURRENCY_CHECK_FIELD:
3668 if self._columns[f].translate:
3669 ids = [x['id'] for x in res]
3670 #TODO: optimize out of this loop
3671 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context['lang'], ids)
3673 r[f] = res_trans.get(r['id'], False) or r[f]
3675 for table in self._inherits:
3676 col = self._inherits[table]
3677 cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()]
3680 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
3688 if not record[col]: # if the record is deleted from _inherits table?
3690 record.update(res3[record[col]])
3691 if col not in fields_to_read:
3694 # all fields which need to be post-processed by a simple function (symbol_get)
3695 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
3698 for f in fields_post:
3699 r[f] = self._columns[f]._symbol_get(r[f])
3700 ids = [x['id'] for x in res]
3702 # all non inherited fields for which the attribute whose name is in load is False
3703 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
3705 # Compute POST fields
3707 for f in fields_post:
3708 todo.setdefault(self._columns[f]._multi, [])
3709 todo[self._columns[f]._multi].append(f)
3710 for key, val in todo.items():
3712 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
3713 assert res2 is not None, \
3714 'The function field "%s" on the "%s" model returned None\n' \
3715 '(a dictionary was expected).' % (val[0], self._name)
3718 if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
3719 multi_fields = res2.get(record['id'],{})
3721 record[pos] = multi_fields.get(pos,[])
3724 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
3727 record[f] = res2[record['id']]
3731 # Warn about deprecated fields now that fields_pre and fields_post are computed
3732 # Explicitly use list() because we may receive tuples
3733 for f in list(fields_pre) + list(fields_post):
3734 field_column = self._all_columns.get(f) and self._all_columns.get(f).column
3735 if field_column and field_column.deprecated:
3736 _logger.warning('Field %s.%s is deprecated: %s', self._name, f, field_column.deprecated)
3740 for field in vals.copy():
3742 if field in self._columns:
3743 fobj = self._columns[field]
3750 for group in groups:
3751 module = group.split(".")[0]
3752 grp = group.split(".")[1]
3753 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3754 (grp, module, 'res.groups', user))
3755 readonly = cr.fetchall()
3756 if readonly[0][0] >= 1:
3759 elif readonly[0][0] == 0:
3765 if type(vals[field]) == type([]):
3767 elif type(vals[field]) == type(0.0):
3769 elif type(vals[field]) == type(''):
3770 vals[field] = '=No Permission='
3775 # TODO check READ access
3776 def perm_read(self, cr, user, ids, context=None, details=True):
3778 Returns some metadata about the given records.
3780 :param details: if True, \*_uid fields are replaced with the name of the user
3781 :return: list of ownership dictionaries for each requested record
3782 :rtype: list of dictionaries with the following keys:
3785 * create_uid: user who created the record
3786 * create_date: date when the record was created
3787 * write_uid: last user who changed the record
3788 * write_date: date of the last change to the record
3789 * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
3796 uniq = isinstance(ids, (int, long))
3800 if self._log_access:
3801 fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
3802 quoted_table = '"%s"' % self._table
3803 fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields)
3804 query = '''SELECT %s, __imd.module, __imd.name
3805 FROM %s LEFT JOIN ir_model_data __imd
3806 ON (__imd.model = %%s and __imd.res_id = %s.id)
3807 WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
3808 cr.execute(query, (self._name, tuple(ids)))
3809 res = cr.dictfetchall()
3812 r[key] = r[key] or False
3813 if details and key in ('write_uid', 'create_uid') and r[key]:
3815 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
3817 pass # Leave the numeric uid there
3818 r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
3819 del r['name'], r['module']
3824 def _check_concurrency(self, cr, ids, context):
3827 if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
3829 check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp)"
3830 for sub_ids in cr.split_for_in_conditions(ids):
3833 id_ref = "%s,%s" % (self._name, id)
3834 update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
3836 ids_to_check.extend([id, update_date])
3837 if not ids_to_check:
3839 cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
3842 # mention the first one only to keep the error message readable
3843 raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
3845 def _check_record_rules_result_count(self, cr, uid, ids, result_ids, operation, context=None):
3846 """Verify the returned rows after applying record rules matches
3847 the length of `ids`, and raise an appropriate exception if it does not.
3849 ids, result_ids = set(ids), set(result_ids)
3850 missing_ids = ids - result_ids
3852 # Attempt to distinguish record rule restriction vs deleted records,
3853 # to provide a more specific error message - check if the missinf
3854 cr.execute('SELECT id FROM ' + self._table + ' WHERE id IN %s', (tuple(missing_ids),))
3856 # the missing ids are (at least partially) hidden by access rules
3857 if uid == SUPERUSER_ID:
3859 _logger.warning('Access Denied by record rules for operation: %s, uid: %s, model: %s', operation, uid, self._name)
3860 raise except_orm(_('Access Denied'),
3861 _('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
3862 (self._description, operation))
3864 # If we get here, the missing_ids are not in the database
3865 if operation in ('read','unlink'):
3866 # No need to warn about deleting an already deleted record.
3867 # And no error when reading a record that was deleted, to prevent spurious
3868 # errors for non-transactional search/read sequences coming from clients
3870 _logger.warning('Failed operation on deleted record(s): %s, uid: %s, model: %s', operation, uid, self._name)
3871 raise except_orm(_('Missing document(s)'),
3872 _('One of the documents you are trying to access has been deleted, please try again after refreshing.'))
3875 def check_access_rights(self, cr, uid, operation, raise_exception=True): # no context on purpose.
3876 """Verifies that the operation given by ``operation`` is allowed for the user
3877 according to the access rights."""
3878 return self.pool.get('ir.model.access').check(cr, uid, self._name, operation, raise_exception)
3880 def check_access_rule(self, cr, uid, ids, operation, context=None):
3881 """Verifies that the operation given by ``operation`` is allowed for the user
3882 according to ir.rules.
3884 :param operation: one of ``write``, ``unlink``
3885 :raise except_orm: * if current ir.rules do not permit this operation.
3886 :return: None if the operation is allowed
3888 if uid == SUPERUSER_ID:
3891 if self.is_transient():
3892 # Only one single implicit access rule for transient models: owner only!
3893 # This is ok to hardcode because we assert that TransientModels always
3894 # have log_access enabled so that the create_uid column is always there.
3895 # And even with _inherits, these fields are always present in the local
3896 # table too, so no need for JOINs.
3897 cr.execute("""SELECT distinct create_uid
3899 WHERE id IN %%s""" % self._table, (tuple(ids),))
3900 uids = [x[0] for x in cr.fetchall()]
3901 if len(uids) != 1 or uids[0] != uid:
3902 raise except_orm(_('Access Denied'),
3903 _('For this kind of document, you may only access records you created yourself.\n\n(Document type: %s)') % (self._description,))
3905 where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
3907 where_clause = ' and ' + ' and '.join(where_clause)
3908 for sub_ids in cr.split_for_in_conditions(ids):
3909 cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
3910 ' WHERE ' + self._table + '.id IN %s' + where_clause,
3911 [sub_ids] + where_params)
3912 returned_ids = [x['id'] for x in cr.dictfetchall()]
3913 self._check_record_rules_result_count(cr, uid, sub_ids, returned_ids, operation, context=context)
3915 def _workflow_trigger(self, cr, uid, ids, trigger, context=None):
3916 """Call given workflow trigger as a result of a CRUD operation"""
3917 wf_service = netsvc.LocalService("workflow")
3919 getattr(wf_service, trigger)(uid, self._name, res_id, cr)
3921 def _workflow_signal(self, cr, uid, ids, signal, context=None):
3922 """Send given workflow signal and return a dict mapping ids to workflow results"""
3923 wf_service = netsvc.LocalService("workflow")
3926 result[res_id] = wf_service.trg_validate(uid, self._name, res_id, signal, cr)
3929 def unlink(self, cr, uid, ids, context=None):
3931 Delete records with given ids
3933 :param cr: database cursor
3934 :param uid: current user id
3935 :param ids: id or list of ids
3936 :param context: (optional) context arguments, like lang, time zone
3938 :raise AccessError: * if user has no unlink rights on the requested object
3939 * if user tries to bypass access rules for unlink on the requested object
3940 :raise UserError: if the record is default property for other records
3945 if isinstance(ids, (int, long)):
3948 result_store = self._store_get_values(cr, uid, ids, self._all_columns.keys(), context)
3950 self._check_concurrency(cr, ids, context)
3952 self.check_access_rights(cr, uid, 'unlink')
3954 ir_property = self.pool.get('ir.property')
3956 # Check if the records are used as default properties.
3957 domain = [('res_id', '=', False),
3958 ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
3960 if ir_property.search(cr, uid, domain, context=context):
3961 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
3963 # Delete the records' properties.
3964 property_ids = ir_property.search(cr, uid, [('res_id', 'in', ['%s,%s' % (self._name, i) for i in ids])], context=context)
3965 ir_property.unlink(cr, uid, property_ids, context=context)
3967 self._workflow_trigger(cr, uid, ids, 'trg_delete', context=context)
3969 self.check_access_rule(cr, uid, ids, 'unlink', context=context)
3970 pool_model_data = self.pool.get('ir.model.data')
3971 ir_values_obj = self.pool.get('ir.values')
3972 for sub_ids in cr.split_for_in_conditions(ids):
3973 cr.execute('delete from ' + self._table + ' ' \
3974 'where id IN %s', (sub_ids,))
3976 # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
3977 # as these are not connected with real database foreign keys, and would be dangling references.
3978 # Note: following steps performed as admin to avoid access rights restrictions, and with no context
3979 # to avoid possible side-effects during admin calls.
3980 # Step 1. Calling unlink of ir_model_data only for the affected IDS
3981 reference_ids = pool_model_data.search(cr, SUPERUSER_ID, [('res_id','in',list(sub_ids)),('model','=',self._name)])
3982 # Step 2. Marching towards the real deletion of referenced records
3984 pool_model_data.unlink(cr, SUPERUSER_ID, reference_ids)
3986 # For the same reason, removing the record relevant to ir_values
3987 ir_value_ids = ir_values_obj.search(cr, uid,
3988 ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
3991 ir_values_obj.unlink(cr, uid, ir_value_ids, context=context)
3993 for order, object, store_ids, fields in result_store:
3994 if object != self._name:
3995 obj = self.pool.get(object)
3996 cr.execute('select id from '+obj._table+' where id IN %s', (tuple(store_ids),))
3997 rids = map(lambda x: x[0], cr.fetchall())
3999 obj._store_set_values(cr, uid, rids, fields, context)
4006 def write(self, cr, user, ids, vals, context=None):
4008 Update records with given ids with the given field values
4010 :param cr: database cursor
4011 :param user: current user id
4013 :param ids: object id or list of object ids to update according to **vals**
4014 :param vals: field values to update, e.g {'field_name': new_field_value, ...}
4015 :type vals: dictionary
4016 :param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
4017 :type context: dictionary
4019 :raise AccessError: * if user has no write rights on the requested object
4020 * if user tries to bypass access rules for write on the requested object
4021 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
4022 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
4024 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
4026 + For a many2many field, a list of tuples is expected.
4027 Here is the list of tuple that are accepted, with the corresponding semantics ::
4029 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
4030 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
4031 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
4032 (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
4033 (4, ID) link to existing record with id = ID (adds a relationship)
4034 (5) unlink all (like using (3,ID) for all linked records)
4035 (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
4038 [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
4040 + For a one2many field, a lits of tuples is expected.
4041 Here is the list of tuple that are accepted, with the corresponding semantics ::
4043 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
4044 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
4045 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
4048 [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
4050 + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
4051 + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
4055 self.check_field_access_rights(cr, user, 'write', vals.keys())
4056 for field in vals.copy():
4058 if field in self._columns:
4059 fobj = self._columns[field]
4060 elif field in self._inherit_fields:
4061 fobj = self._inherit_fields[field][2]
4068 for group in groups:
4069 module = group.split(".")[0]
4070 grp = group.split(".")[1]
4071 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
4072 (grp, module, 'res.groups', user))
4073 readonly = cr.fetchall()
4074 if readonly[0][0] >= 1:
4085 if isinstance(ids, (int, long)):
4088 self._check_concurrency(cr, ids, context)
4089 self.check_access_rights(cr, user, 'write')
4091 result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
4093 # No direct update of parent_left/right
4094 vals.pop('parent_left', None)
4095 vals.pop('parent_right', None)
4097 parents_changed = []
4098 parent_order = self._parent_order or self._order
4099 if self._parent_store and (self._parent_name in vals):
4100 # The parent_left/right computation may take up to
4101 # 5 seconds. No need to recompute the values if the
4102 # parent is the same.
4103 # Note: to respect parent_order, nodes must be processed in
4104 # order, so ``parents_changed`` must be ordered properly.
4105 parent_val = vals[self._parent_name]
4107 query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \
4108 (self._table, self._parent_name, self._parent_name, parent_order)
4109 cr.execute(query, (tuple(ids), parent_val))
4111 query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \
4112 (self._table, self._parent_name, parent_order)
4113 cr.execute(query, (tuple(ids),))
4114 parents_changed = map(operator.itemgetter(0), cr.fetchall())
4121 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
4123 field_column = self._all_columns.get(field) and self._all_columns.get(field).column
4124 if field_column and field_column.deprecated:
4125 _logger.warning('Field %s.%s is deprecated: %s', self._name, field, field_column.deprecated)
4126 if field in self._columns:
4127 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
4128 if (not totranslate) or not self._columns[field].translate:
4129 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
4130 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
4131 direct.append(field)
4133 upd_todo.append(field)
4135 updend.append(field)
4136 if field in self._columns \
4137 and hasattr(self._columns[field], 'selection') \
4139 self._check_selection_field_value(cr, user, field, vals[field], context=context)
4141 if self._log_access:
4142 upd0.append('write_uid=%s')
4143 upd0.append("write_date=(now() at time zone 'UTC')")
4147 self.check_access_rule(cr, user, ids, 'write', context=context)
4148 for sub_ids in cr.split_for_in_conditions(ids):
4149 cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
4150 'where id IN %s', upd1 + [sub_ids])
4151 if cr.rowcount != len(sub_ids):
4152 raise except_orm(_('AccessError'),
4153 _('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
4158 if self._columns[f].translate:
4159 src_trans = self.pool.get(self._name).read(cr, user, ids, [f])[0][f]
4162 # Inserting value to DB
4163 self.write(cr, user, ids, {f: vals[f]})
4164 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
4167 # call the 'set' method of fields which are not classic_write
4168 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
4170 # default element in context must be removed when call a one2many or many2many
4171 rel_context = context.copy()
4172 for c in context.items():
4173 if c[0].startswith('default_'):
4174 del rel_context[c[0]]
4176 for field in upd_todo:
4178 result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
4180 unknown_fields = updend[:]
4181 for table in self._inherits:
4182 col = self._inherits[table]
4184 for sub_ids in cr.split_for_in_conditions(ids):
4185 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
4186 'where id IN %s', (sub_ids,))
4187 nids.extend([x[0] for x in cr.fetchall()])
4191 if self._inherit_fields[val][0] == table:
4193 unknown_fields.remove(val)
4195 self.pool.get(table).write(cr, user, nids, v, context)
4199 'No such field(s) in model %s: %s.',
4200 self._name, ', '.join(unknown_fields))
4201 self._validate(cr, user, ids, context)
4203 # TODO: use _order to set dest at the right position and not first node of parent
4204 # We can't defer parent_store computation because the stored function
4205 # fields that are computer may refer (directly or indirectly) to
4206 # parent_left/right (via a child_of domain)
4209 self.pool._init_parent[self._name] = True
4211 order = self._parent_order or self._order
4212 parent_val = vals[self._parent_name]
4214 clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
4216 clause, params = '%s IS NULL' % (self._parent_name,), ()
4218 for id in parents_changed:
4219 cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
4220 pleft, pright = cr.fetchone()
4221 distance = pright - pleft + 1
4223 # Positions of current siblings, to locate proper insertion point;
4224 # this can _not_ be fetched outside the loop, as it needs to be refreshed
4225 # after each update, in case several nodes are sequentially inserted one
4226 # next to the other (i.e computed incrementally)
4227 cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params)
4228 parents = cr.fetchall()
4230 # Find Position of the element
4232 for (parent_pright, parent_id) in parents:
4235 position = parent_pright + 1
4237 # It's the first node of the parent
4242 cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
4243 position = cr.fetchone()[0] + 1
4245 if pleft < position <= pright:
4246 raise except_orm(_('UserError'), _('Recursivity Detected.'))
4248 if pleft < position:
4249 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
4250 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
4251 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
4253 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
4254 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
4255 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
4257 result += self._store_get_values(cr, user, ids, vals.keys(), context)
4261 for order, object, ids_to_update, fields_to_recompute in result:
4262 key = (object, tuple(fields_to_recompute))
4263 done.setdefault(key, {})
4264 # avoid to do several times the same computation
4266 for id in ids_to_update:
4267 if id not in done[key]:
4268 done[key][id] = True
4270 self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context)
4272 self._workflow_trigger(cr, user, ids, 'trg_write', context=context)
4276 # TODO: Should set perm to user.xxx
4278 def create(self, cr, user, vals, context=None):
4280 Create a new record for the model.
4282 The values for the new record are initialized using the ``vals``
4283 argument, and if necessary the result of ``default_get()``.
4285 :param cr: database cursor
4286 :param user: current user id
4288 :param vals: field values for new record, e.g {'field_name': field_value, ...}
4289 :type vals: dictionary
4290 :param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
4291 :type context: dictionary
4292 :return: id of new record created
4293 :raise AccessError: * if user has no create rights on the requested object
4294 * if user tries to bypass access rules for create on the requested object
4295 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
4296 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
4298 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific.
4299 Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how
4306 if self.is_transient():
4307 self._transient_vacuum(cr, user)
4309 self.check_access_rights(cr, user, 'create')
4311 if self._log_access:
4312 for f in LOG_ACCESS_COLUMNS:
4313 if vals.pop(f, None) is not None:
4315 'Field `%s` is not allowed when creating the model `%s`.',
4317 vals = self._add_missing_default_values(cr, user, vals, context)
4320 for v in self._inherits:
4321 if self._inherits[v] not in vals:
4324 tocreate[v] = {'id': vals[self._inherits[v]]}
4325 (upd0, upd1, upd2) = ('', '', [])
4328 for v in vals.keys():
4329 if v in self._inherit_fields and v not in self._columns:
4330 (table, col, col_detail, original_parent) = self._inherit_fields[v]
4331 tocreate[table][v] = vals[v]
4334 if (v not in self._inherit_fields) and (v not in self._columns):
4336 unknown_fields.append(v)
4339 'No such field(s) in model %s: %s.',
4340 self._name, ', '.join(unknown_fields))
4342 # Try-except added to filter the creation of those records whose filds are readonly.
4343 # Example : any dashboard which has all the fields readonly.(due to Views(database views))
4345 cr.execute("SELECT nextval('"+self._sequence+"')")
4347 raise except_orm(_('UserError'),
4348 _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.'))
4350 id_new = cr.fetchone()[0]
4351 for table in tocreate:
4352 if self._inherits[table] in vals:
4353 del vals[self._inherits[table]]
4355 record_id = tocreate[table].pop('id', None)
4357 # When linking/creating parent records, force context without 'no_store_function' key that
4358 # defers stored functions computing, as these won't be computed in batch at the end of create().
4359 parent_context = dict(context)
4360 parent_context.pop('no_store_function', None)
4362 if record_id is None or not record_id:
4363 record_id = self.pool.get(table).create(cr, user, tocreate[table], context=parent_context)
4365 self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=parent_context)
4367 upd0 += ',' + self._inherits[table]
4369 upd2.append(record_id)
4371 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
4372 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
4374 for bool_field in bool_fields:
4375 if bool_field not in vals:
4376 vals[bool_field] = False
4378 for field in vals.copy():
4380 if field in self._columns:
4381 fobj = self._columns[field]
4383 fobj = self._inherit_fields[field][2]
4389 for group in groups:
4390 module = group.split(".")[0]
4391 grp = group.split(".")[1]
4392 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
4393 (grp, module, 'res.groups', user))
4394 readonly = cr.fetchall()
4395 if readonly[0][0] >= 1:
4398 elif readonly[0][0] == 0:
4406 if self._columns[field]._classic_write:
4407 upd0 = upd0 + ',"' + field + '"'
4408 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
4409 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
4410 #for the function fields that receive a value, we set them directly in the database
4411 #(they may be required), but we also need to trigger the _fct_inv()
4412 if (hasattr(self._columns[field], '_fnct_inv')) and not isinstance(self._columns[field], fields.related):
4413 #TODO: this way to special case the related fields is really creepy but it shouldn't be changed at
4414 #one week of the release candidate. It seems the only good way to handle correctly this is to add an
4415 #attribute to make a field `really readonly´ and thus totally ignored by the create()... otherwise
4416 #if, for example, the related has a default value (for usability) then the fct_inv is called and it
4417 #may raise some access rights error. Changing this is a too big change for now, and is thus postponed
4418 #after the release but, definitively, the behavior shouldn't be different for related and function
4420 upd_todo.append(field)
4422 #TODO: this `if´ statement should be removed because there is no good reason to special case the fields
4423 #related. See the above TODO comment for further explanations.
4424 if not isinstance(self._columns[field], fields.related):
4425 upd_todo.append(field)
4426 if field in self._columns \
4427 and hasattr(self._columns[field], 'selection') \
4429 self._check_selection_field_value(cr, user, field, vals[field], context=context)
4430 if self._log_access:
4431 upd0 += ',create_uid,create_date,write_uid,write_date'
4432 upd1 += ",%s,(now() at time zone 'UTC'),%s,(now() at time zone 'UTC')"
4433 upd2.extend((user, user))
4434 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
4435 self.check_access_rule(cr, user, [id_new], 'create', context=context)
4436 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
4438 if self._parent_store and not context.get('defer_parent_store_computation'):
4440 self.pool._init_parent[self._name] = True
4442 parent = vals.get(self._parent_name, False)
4444 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
4446 result_p = cr.fetchall()
4447 for (pleft,) in result_p:
4452 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
4453 pleft_old = cr.fetchone()[0]
4456 cr.execute('select max(parent_right) from '+self._table)
4457 pleft = cr.fetchone()[0] or 0
4458 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
4459 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
4460 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
4462 # default element in context must be remove when call a one2many or many2many
4463 rel_context = context.copy()
4464 for c in context.items():
4465 if c[0].startswith('default_'):
4466 del rel_context[c[0]]
4469 for field in upd_todo:
4470 result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
4471 self._validate(cr, user, [id_new], context)
4473 if not context.get('no_store_function', False):
4474 result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
4477 for order, object, ids, fields2 in result:
4478 if not (object, ids, fields2) in done:
4479 self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
4480 done.append((object, ids, fields2))
4482 if self._log_create and not (context and context.get('no_store_function', False)):
4483 message = self._description + \
4485 self.name_get(cr, user, [id_new], context=context)[0][1] + \
4486 "' " + _("created.")
4487 self.log(cr, user, id_new, message, True, context=context)
4488 self._workflow_trigger(cr, user, [id_new], 'trg_create', context=context)
4491 def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None):
4492 """Fetch records as objects allowing to use dot notation to browse fields and relations
4494 :param cr: database cursor
4495 :param uid: current user id
4496 :param select: id or list of ids.
4497 :param context: context arguments, like lang, time zone
4498 :rtype: object or list of objects requested
4501 self._list_class = list_class or browse_record_list
4503 # need to accepts ints and longs because ids coming from a method
4504 # launched by button in the interface have a type long...
4505 if isinstance(select, (int, long)):
4506 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
4507 elif isinstance(select, list):
4508 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context=context)
4510 return browse_null()
4512 def _store_get_values(self, cr, uid, ids, fields, context):
4513 """Returns an ordered list of fields.functions to call due to
4514 an update operation on ``fields`` of records with ``ids``,
4515 obtained by calling the 'store' functions of these fields,
4516 as setup by their 'store' attribute.
4518 :return: [(priority, model_name, [record_ids,], [function_fields,])]
4520 if fields is None: fields = []
4521 stored_functions = self.pool._store_function.get(self._name, [])
4523 # use indexed names for the details of the stored_functions:
4524 model_name_, func_field_to_compute_, id_mapping_fnct_, trigger_fields_, priority_ = range(5)
4526 # only keep functions that should be triggered for the ``fields``
4528 to_compute = [f for f in stored_functions \
4529 if ((not f[trigger_fields_]) or set(fields).intersection(f[trigger_fields_]))]
4532 for function in to_compute:
4533 # use admin user for accessing objects having rules defined on store fields
4534 target_ids = [id for id in function[id_mapping_fnct_](self, cr, SUPERUSER_ID, ids, context) if id]
4536 # the compound key must consider the priority and model name
4537 key = (function[priority_], function[model_name_])
4538 for target_id in target_ids:
4539 mapping.setdefault(key, {}).setdefault(target_id,set()).add(tuple(function))
4541 # Here mapping looks like:
4542 # { (10, 'model_a') : { target_id1: [ (function_1_tuple, function_2_tuple) ], ... }
4543 # (20, 'model_a') : { target_id2: [ (function_3_tuple, function_4_tuple) ], ... }
4544 # (99, 'model_a') : { target_id1: [ (function_5_tuple, function_6_tuple) ], ... }
4547 # Now we need to generate the batch function calls list
4549 # { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] }
4551 for ((priority,model), id_map) in mapping.iteritems():
4552 functions_ids_maps = {}
4553 # function_ids_maps =
4554 # { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] }
4555 for id, functions in id_map.iteritems():
4556 functions_ids_maps.setdefault(tuple(functions), []).append(id)
4557 for functions, ids in functions_ids_maps.iteritems():
4558 call_map.setdefault((priority,model),[]).append((priority, model, ids,
4559 [f[func_field_to_compute_] for f in functions]))
4560 ordered_keys = call_map.keys()
4564 result = reduce(operator.add, (call_map[k] for k in ordered_keys))
4567 def _store_set_values(self, cr, uid, ids, fields, context):
4568 """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
4569 respecting ``multi`` attributes), and stores the resulting values in the database directly."""
4574 if self._log_access:
4575 cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
4579 field_dict.setdefault(r[0], [])
4580 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
4581 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
4582 for i in self.pool._store_function.get(self._name, []):
4584 up_write_date = write_date + datetime.timedelta(hours=i[5])
4585 if datetime.datetime.now() < up_write_date:
4587 field_dict[r[0]].append(i[1])
4593 if self._columns[f]._multi not in keys:
4594 keys.append(self._columns[f]._multi)
4595 todo.setdefault(self._columns[f]._multi, [])
4596 todo[self._columns[f]._multi].append(f)
4600 # use admin user for accessing objects having rules defined on store fields
4601 result = self._columns[val[0]].get(cr, self, ids, val, SUPERUSER_ID, context=context)
4602 for id, value in result.items():
4604 for f in value.keys():
4605 if f in field_dict[id]:
4612 if self._columns[v]._type == 'many2one':
4614 value[v] = value[v][0]
4617 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
4618 upd1.append(self._columns[v]._symbol_set[1](value[v]))
4621 cr.execute('update "' + self._table + '" set ' + \
4622 ','.join(upd0) + ' where id = %s', upd1)
4626 # use admin user for accessing objects having rules defined on store fields
4627 result = self._columns[f].get(cr, self, ids, f, SUPERUSER_ID, context=context)
4628 for r in result.keys():
4630 if r in field_dict.keys():
4631 if f in field_dict[r]:
4633 for id, value in result.items():
4634 if self._columns[f]._type == 'many2one':
4639 cr.execute('update "' + self._table + '" set ' + \
4640 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
4646 def perm_write(self, cr, user, ids, fields, context=None):
4647 raise NotImplementedError(_('This method does not exist anymore'))
4649 # TODO: ameliorer avec NULL
4650 def _where_calc(self, cr, user, domain, active_test=True, context=None):
4651 """Computes the WHERE clause needed to implement an OpenERP domain.
4652 :param domain: the domain to compute
4654 :param active_test: whether the default filtering of records with ``active``
4655 field set to ``False`` should be applied.
4656 :return: the query expressing the given domain as provided in domain
4657 :rtype: osv.query.Query
4662 # if the object has a field named 'active', filter out all inactive
4663 # records unless they were explicitely asked for
4664 if 'active' in self._all_columns and (active_test and context.get('active_test', True)):
4666 # the item[0] trick below works for domain items and '&'/'|'/'!'
4668 if not any(item[0] == 'active' for item in domain):
4669 domain.insert(0, ('active', '=', 1))
4671 domain = [('active', '=', 1)]
4674 e = expression.expression(cr, user, domain, self, context)
4675 tables = e.get_tables()
4676 where_clause, where_params = e.to_sql()
4677 where_clause = where_clause and [where_clause] or []
4679 where_clause, where_params, tables = [], [], ['"%s"' % self._table]
4681 return Query(tables, where_clause, where_params)
4683 def _check_qorder(self, word):
4684 if not regex_order.match(word):
4685 raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
4688 def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
4689 """Add what's missing in ``query`` to implement all appropriate ir.rules
4690 (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
4692 :param query: the current query object
4694 def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None):
4695 """ :param string parent_model: string of the parent model
4696 :param model child_object: model object, base of the rule application
4699 if parent_model and child_object:
4700 # as inherited rules are being applied, we need to add the missing JOIN
4701 # to reach the parent table (if it was not JOINed yet in the query)
4702 parent_alias = child_object._inherits_join_add(child_object, parent_model, query)
4703 # inherited rules are applied on the external table -> need to get the alias and replace
4704 parent_table = self.pool.get(parent_model)._table
4705 added_clause = [clause.replace('"%s"' % parent_table, '"%s"' % parent_alias) for clause in added_clause]
4706 # change references to parent_table to parent_alias, because we now use the alias to refer to the table
4708 for table in added_tables:
4709 # table is just a table name -> switch to the full alias
4710 if table == '"%s"' % parent_table:
4711 new_tables.append('"%s" as "%s"' % (parent_table, parent_alias))
4712 # table is already a full statement -> replace reference to the table to its alias, is correct with the way aliases are generated
4714 new_tables.append(table.replace('"%s"' % parent_table, '"%s"' % parent_alias))
4715 added_tables = new_tables
4716 query.where_clause += added_clause
4717 query.where_clause_params += added_params
4718 for table in added_tables:
4719 if table not in query.tables:
4720 query.tables.append(table)
4724 # apply main rules on the object
4725 rule_obj = self.pool.get('ir.rule')
4726 rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, self._name, mode, context=context)
4727 apply_rule(rule_where_clause, rule_where_clause_params, rule_tables)
4729 # apply ir.rules from the parents (through _inherits)
4730 for inherited_model in self._inherits:
4731 rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, inherited_model, mode, context=context)
4732 apply_rule(rule_where_clause, rule_where_clause_params, rule_tables,
4733 parent_model=inherited_model, child_object=self)
4735 def _generate_m2o_order_by(self, order_field, query):
4737 Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
4738 either native m2o fields or function/related fields that are stored, including
4739 intermediate JOINs for inheritance if required.
4741 :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
4743 if order_field not in self._columns and order_field in self._inherit_fields:
4744 # also add missing joins for reaching the table containing the m2o field
4745 qualified_field = self._inherits_join_calc(order_field, query)
4746 order_field_column = self._inherit_fields[order_field][2]
4748 qualified_field = '"%s"."%s"' % (self._table, order_field)
4749 order_field_column = self._columns[order_field]
4751 assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
4752 if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
4753 _logger.debug("Many2one function/related fields must be stored " \
4754 "to be used as ordering fields! Ignoring sorting for %s.%s",
4755 self._name, order_field)
4758 # figure out the applicable order_by for the m2o
4759 dest_model = self.pool.get(order_field_column._obj)
4760 m2o_order = dest_model._order
4761 if not regex_order.match(m2o_order):
4762 # _order is complex, can't use it here, so we default to _rec_name
4763 m2o_order = dest_model._rec_name
4765 # extract the field names, to be able to qualify them and add desc/asc
4767 for order_part in m2o_order.split(","):
4768 m2o_order_list.append(order_part.strip().split(" ", 1)[0].strip())
4769 m2o_order = m2o_order_list
4771 # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
4772 # as we don't want to exclude results that have NULL values for the m2o
4773 src_table, src_field = qualified_field.replace('"', '').split('.', 1)
4774 dst_alias, dst_alias_statement = query.add_join((src_table, dest_model._table, src_field, 'id', src_field), implicit=False, outer=True)
4775 qualify = lambda field: '"%s"."%s"' % (dst_alias, field)
4776 return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
4778 def _generate_order_by(self, order_spec, query):
4780 Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
4781 a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
4783 :raise" except_orm in case order_spec is malformed
4785 order_by_clause = ''
4786 order_spec = order_spec or self._order
4788 order_by_elements = []
4789 self._check_qorder(order_spec)
4790 for order_part in order_spec.split(','):
4791 order_split = order_part.strip().split(' ')
4792 order_field = order_split[0].strip()
4793 order_direction = order_split[1].strip() if len(order_split) == 2 else ''
4795 if order_field == 'id':
4796 order_by_elements.append('"%s"."id" %s' % (self._table, order_direction))
4797 elif order_field in self._columns:
4798 order_column = self._columns[order_field]
4799 if order_column._classic_read:
4800 inner_clause = '"%s"."%s"' % (self._table, order_field)
4801 elif order_column._type == 'many2one':
4802 inner_clause = self._generate_m2o_order_by(order_field, query)
4804 continue # ignore non-readable or "non-joinable" fields
4805 elif order_field in self._inherit_fields:
4806 parent_obj = self.pool.get(self._inherit_fields[order_field][3])
4807 order_column = parent_obj._columns[order_field]
4808 if order_column._classic_read:
4809 inner_clause = self._inherits_join_calc(order_field, query)
4810 elif order_column._type == 'many2one':
4811 inner_clause = self._generate_m2o_order_by(order_field, query)
4813 continue # ignore non-readable or "non-joinable" fields
4815 if isinstance(inner_clause, list):
4816 for clause in inner_clause:
4817 order_by_elements.append("%s %s" % (clause, order_direction))
4819 order_by_elements.append("%s %s" % (inner_clause, order_direction))
4820 if order_by_elements:
4821 order_by_clause = ",".join(order_by_elements)
4823 return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
4825 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
4827 Private implementation of search() method, allowing specifying the uid to use for the access right check.
4828 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
4829 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
4830 This is ok at the security level because this method is private and not callable through XML-RPC.
4832 :param access_rights_uid: optional user ID to use when checking access rights
4833 (not for ir.rules, this is only for ir.model.access)
4837 self.check_access_rights(cr, access_rights_uid or user, 'read')
4839 # For transient models, restrict acces to the current user, except for the super-user
4840 if self.is_transient() and self._log_access and user != SUPERUSER_ID:
4841 args = expression.AND(([('create_uid', '=', user)], args or []))
4843 query = self._where_calc(cr, user, args, context=context)
4844 self._apply_ir_rules(cr, user, query, 'read', context=context)
4845 order_by = self._generate_order_by(order, query)
4846 from_clause, where_clause, where_clause_params = query.get_sql()
4848 limit_str = limit and ' limit %d' % limit or ''
4849 offset_str = offset and ' offset %d' % offset or ''
4850 where_str = where_clause and (" WHERE %s" % where_clause) or ''
4853 cr.execute('SELECT count("%s".id) FROM ' % self._table + from_clause + where_str + limit_str + offset_str, where_clause_params)
4856 cr.execute('SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str, where_clause_params)
4859 # TDE note: with auto_join, we could have several lines about the same result
4860 # i.e. a lead with several unread messages; we uniquify the result using
4861 # a fast way to do it while preserving order (http://www.peterbe.com/plog/uniqifiers-benchmark)
4862 def _uniquify_list(seq):
4864 return [x for x in seq if x not in seen and not seen.add(x)]
4866 return _uniquify_list([x[0] for x in res])
4868 # returns the different values ever entered for one field
4869 # this is used, for example, in the client when the user hits enter on
4871 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
4874 if field in self._inherit_fields:
4875 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
4877 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
4879 def copy_data(self, cr, uid, id, default=None, context=None):
4881 Copy given record's data with all its fields values
4883 :param cr: database cursor
4884 :param uid: current user id
4885 :param id: id of the record to copy
4886 :param default: field values to override in the original values of the copied record
4887 :type default: dictionary
4888 :param context: context arguments, like lang, time zone
4889 :type context: dictionary
4890 :return: dictionary containing all the field values
4896 # avoid recursion through already copied records in case of circular relationship
4897 seen_map = context.setdefault('__copy_data_seen',{})
4898 if id in seen_map.setdefault(self._name,[]):
4900 seen_map[self._name].append(id)
4904 if 'state' not in default:
4905 if 'state' in self._defaults:
4906 if callable(self._defaults['state']):
4907 default['state'] = self._defaults['state'](self, cr, uid, context)
4909 default['state'] = self._defaults['state']
4911 context_wo_lang = context.copy()
4912 if 'lang' in context:
4913 del context_wo_lang['lang']
4914 data = self.read(cr, uid, [id,], context=context_wo_lang)
4918 raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
4920 # build a black list of fields that should not be copied
4921 blacklist = set(MAGIC_COLUMNS + ['parent_left', 'parent_right'])
4922 def blacklist_given_fields(obj):
4923 # blacklist the fields that are given by inheritance
4924 for other, field_to_other in obj._inherits.items():
4925 blacklist.add(field_to_other)
4926 if field_to_other in default:
4927 # all the fields of 'other' are given by the record: default[field_to_other],
4928 # except the ones redefined in self
4929 blacklist.update(set(self.pool.get(other)._all_columns) - set(self._columns))
4931 blacklist_given_fields(self.pool.get(other))
4932 blacklist_given_fields(self)
4935 for f, colinfo in self._all_columns.items():
4936 field = colinfo.column
4939 elif f in blacklist:
4941 elif isinstance(field, fields.function):
4943 elif field._type == 'many2one':
4944 res[f] = data[f] and data[f][0]
4945 elif field._type == 'one2many':
4946 other = self.pool.get(field._obj)
4947 # duplicate following the order of the ids because we'll rely on
4948 # it later for copying translations in copy_translation()!
4949 lines = [other.copy_data(cr, uid, line_id, context=context) for line_id in sorted(data[f])]
4950 # the lines are duplicated using the wrong (old) parent, but then
4951 # are reassigned to the correct one thanks to the (0, 0, ...)
4952 res[f] = [(0, 0, line) for line in lines if line]
4953 elif field._type == 'many2many':
4954 res[f] = [(6, 0, data[f])]
4960 def copy_translations(self, cr, uid, old_id, new_id, context=None):
4964 # avoid recursion through already copied records in case of circular relationship
4965 seen_map = context.setdefault('__copy_translations_seen',{})
4966 if old_id in seen_map.setdefault(self._name,[]):
4968 seen_map[self._name].append(old_id)
4970 trans_obj = self.pool.get('ir.translation')
4971 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
4972 fields = self.fields_get(cr, uid, context=context)
4974 translation_records = []
4975 for field_name, field_def in fields.items():
4976 # we must recursively copy the translations for o2o and o2m
4977 if field_def['type'] == 'one2many':
4978 target_obj = self.pool.get(field_def['relation'])
4979 old_record, new_record = self.read(cr, uid, [old_id, new_id], [field_name], context=context)
4980 # here we rely on the order of the ids to match the translations
4981 # as foreseen in copy_data()
4982 old_children = sorted(old_record[field_name])
4983 new_children = sorted(new_record[field_name])
4984 for (old_child, new_child) in zip(old_children, new_children):
4985 target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
4986 # and for translatable fields we keep them for copy
4987 elif field_def.get('translate'):
4989 if field_name in self._columns:
4990 trans_name = self._name + "," + field_name
4991 elif field_name in self._inherit_fields:
4992 trans_name = self._inherit_fields[field_name][0] + "," + field_name
4994 trans_ids = trans_obj.search(cr, uid, [
4995 ('name', '=', trans_name),
4996 ('res_id', '=', old_id)
4998 translation_records.extend(trans_obj.read(cr, uid, trans_ids, context=context))
5000 for record in translation_records:
5002 record['res_id'] = new_id
5003 trans_obj.create(cr, uid, record, context=context)
5006 def copy(self, cr, uid, id, default=None, context=None):
5008 Duplicate record with given id updating it with default values
5010 :param cr: database cursor
5011 :param uid: current user id
5012 :param id: id of the record to copy
5013 :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
5014 :type default: dictionary
5015 :param context: context arguments, like lang, time zone
5016 :type context: dictionary
5017 :return: id of the newly created record
5022 context = context.copy()
5023 data = self.copy_data(cr, uid, id, default, context)
5024 new_id = self.create(cr, uid, data, context)
5025 self.copy_translations(cr, uid, id, new_id, context)
5028 def exists(self, cr, uid, ids, context=None):
5029 """Checks whether the given id or ids exist in this model,
5030 and return the list of ids that do. This is simple to use for
5031 a truth test on a browse_record::
5036 :param ids: id or list of ids to check for existence
5037 :type ids: int or [int]
5038 :return: the list of ids that currently exist, out of
5041 if type(ids) in (int, long):
5043 query = 'SELECT id FROM "%s"' % self._table
5044 cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
5045 return [x[0] for x in cr.fetchall()]
5047 def check_recursion(self, cr, uid, ids, context=None, parent=None):
5048 _logger.warning("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
5050 assert parent is None or parent in self._columns or parent in self._inherit_fields,\
5051 "The 'parent' parameter passed to check_recursion() must be None or a valid field name"
5052 return self._check_recursion(cr, uid, ids, context, parent)
5054 def _check_recursion(self, cr, uid, ids, context=None, parent=None):
5056 Verifies that there is no loop in a hierarchical structure of records,
5057 by following the parent relationship using the **parent** field until a loop
5058 is detected or until a top-level record is found.
5060 :param cr: database cursor
5061 :param uid: current user id
5062 :param ids: list of ids of records to check
5063 :param parent: optional parent field name (default: ``self._parent_name = parent_id``)
5064 :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
5068 parent = self._parent_name
5070 query = 'SELECT distinct "%s" FROM "%s" WHERE id IN %%s' % (parent, self._table)
5073 for i in range(0, len(ids), cr.IN_MAX):
5074 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
5075 cr.execute(query, (tuple(sub_ids_parent),))
5076 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
5077 ids_parent = ids_parent2
5078 for i in ids_parent:
5083 def _get_external_ids(self, cr, uid, ids, *args, **kwargs):
5084 """Retrieve the External ID(s) of any database record.
5086 **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
5088 :return: map of ids to the list of their fully qualified External IDs
5089 in the form ``module.key``, or an empty list when there's no External
5090 ID for a record, e.g.::
5092 { 'id': ['module.ext_id', 'module.ext_id_bis'],
5095 ir_model_data = self.pool.get('ir.model.data')
5096 data_ids = ir_model_data.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
5097 data_results = ir_model_data.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
5100 # can't use dict.fromkeys() as the list would be shared!
5102 for record in data_results:
5103 result[record['res_id']].append('%(module)s.%(name)s' % record)
5106 def get_external_id(self, cr, uid, ids, *args, **kwargs):
5107 """Retrieve the External ID of any database record, if there
5108 is one. This method works as a possible implementation
5109 for a function field, to be able to add it to any
5110 model object easily, referencing it as ``Model.get_external_id``.
5112 When multiple External IDs exist for a record, only one
5113 of them is returned (randomly).
5115 :return: map of ids to their fully qualified XML ID,
5116 defaulting to an empty string when there's none
5117 (to be usable as a function field),
5120 { 'id': 'module.ext_id',
5123 results = self._get_xml_ids(cr, uid, ids)
5124 for k, v in results.iteritems():
5131 # backwards compatibility
5132 get_xml_id = get_external_id
5133 _get_xml_ids = _get_external_ids
5136 def is_transient(self):
5137 """ Return whether the model is transient.
5139 See :class:`TransientModel`.
5142 return self._transient
5144 def _transient_clean_rows_older_than(self, cr, seconds):
5145 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5146 # Never delete rows used in last 5 minutes
5147 seconds = max(seconds, 300)
5148 query = ("SELECT id FROM " + self._table + " WHERE"
5149 " COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp"
5150 " < ((now() at time zone 'UTC') - interval %s)")
5151 cr.execute(query, ("%s seconds" % seconds,))
5152 ids = [x[0] for x in cr.fetchall()]
5153 self.unlink(cr, SUPERUSER_ID, ids)
5155 def _transient_clean_old_rows(self, cr, max_count):
5156 # Check how many rows we have in the table
5157 cr.execute("SELECT count(*) AS row_count FROM " + self._table)
5159 if res[0][0] <= max_count:
5160 return # max not reached, nothing to do
5161 self._transient_clean_rows_older_than(cr, 300)
5163 def _transient_vacuum(self, cr, uid, force=False):
5164 """Clean the transient records.
5166 This unlinks old records from the transient model tables whenever the
5167 "_transient_max_count" or "_max_age" conditions (if any) are reached.
5168 Actual cleaning will happen only once every "_transient_check_time" calls.
5169 This means this method can be called frequently called (e.g. whenever
5170 a new record is created).
5171 Example with both max_hours and max_count active:
5172 Suppose max_hours = 0.2 (e.g. 12 minutes), max_count = 20, there are 55 rows in the
5173 table, 10 created/changed in the last 5 minutes, an additional 12 created/changed between
5174 5 and 10 minutes ago, the rest created/changed more then 12 minutes ago.
5175 - age based vacuum will leave the 22 rows created/changed in the last 12 minutes
5176 - count based vacuum will wipe out another 12 rows. Not just 2, otherwise each addition
5177 would immediately cause the maximum to be reached again.
5178 - the 10 rows that have been created/changed the last 5 minutes will NOT be deleted
5180 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5181 _transient_check_time = 20 # arbitrary limit on vacuum executions
5182 self._transient_check_count += 1
5183 if not force and (self._transient_check_count < _transient_check_time):
5184 return True # no vacuum cleaning this time
5185 self._transient_check_count = 0
5187 # Age-based expiration
5188 if self._transient_max_hours:
5189 self._transient_clean_rows_older_than(cr, self._transient_max_hours * 60 * 60)
5191 # Count-based expiration
5192 if self._transient_max_count:
5193 self._transient_clean_old_rows(cr, self._transient_max_count)
5197 def resolve_2many_commands(self, cr, uid, field_name, commands, fields=None, context=None):
5198 """ Serializes one2many and many2many commands into record dictionaries
5199 (as if all the records came from the database via a read()). This
5200 method is aimed at onchange methods on one2many and many2many fields.
5202 Because commands might be creation commands, not all record dicts
5203 will contain an ``id`` field. Commands matching an existing record
5204 will have an ``id``.
5206 :param field_name: name of the one2many or many2many field matching the commands
5207 :type field_name: str
5208 :param commands: one2many or many2many commands to execute on ``field_name``
5209 :type commands: list((int|False, int|False, dict|False))
5210 :param fields: list of fields to read from the database, when applicable
5211 :type fields: list(str)
5212 :returns: records in a shape similar to that returned by ``read()``
5213 (except records may be missing the ``id`` field if they don't exist in db)
5216 result = [] # result (list of dict)
5217 record_ids = [] # ids of records to read
5218 updates = {} # {id: dict} of updates on particular records
5220 for command in commands:
5221 if not isinstance(command, (list, tuple)):
5222 record_ids.append(command)
5223 elif command[0] == 0:
5224 result.append(command[2])
5225 elif command[0] == 1:
5226 record_ids.append(command[1])
5227 updates.setdefault(command[1], {}).update(command[2])
5228 elif command[0] in (2, 3):
5229 record_ids = [id for id in record_ids if id != command[1]]
5230 elif command[0] == 4:
5231 record_ids.append(command[1])
5232 elif command[0] == 5:
5233 result, record_ids = [], []
5234 elif command[0] == 6:
5235 result, record_ids = [], list(command[2])
5237 # read the records and apply the updates
5238 other_model = self.pool.get(self._all_columns[field_name].column._obj)
5239 for record in other_model.read(cr, uid, record_ids, fields=fields, context=context):
5240 record.update(updates.get(record['id'], {}))
5241 result.append(record)
5245 # for backward compatibility
5246 resolve_o2m_commands_to_record_dicts = resolve_2many_commands
5248 def _register_hook(self, cr):
5249 """ stuff to do right after the registry is built """
5252 # keep this import here, at top it will cause dependency cycle errors
5255 class Model(BaseModel):
5256 """Main super-class for regular database-persisted OpenERP models.
5258 OpenERP models are created by inheriting from this class::
5263 The system will later instantiate the class once per database (on
5264 which the class' module is installed).
5267 _register = False # not visible in ORM registry, meant to be python-inherited only
5268 _transient = False # True in a TransientModel
5270 class TransientModel(BaseModel):
5271 """Model super-class for transient records, meant to be temporarily
5272 persisted, and regularly vaccuum-cleaned.
5274 A TransientModel has a simplified access rights management,
5275 all users can create new records, and may only access the
5276 records they created. The super-user has unrestricted access
5277 to all TransientModel records.
5280 _register = False # not visible in ORM registry, meant to be python-inherited only
5283 class AbstractModel(BaseModel):
5284 """Abstract Model super-class for creating an abstract class meant to be
5285 inherited by regular models (Models or TransientModels) but not meant to
5286 be usable on its own, or persisted.
5288 Technical note: we don't want to make AbstractModel the super-class of
5289 Model or BaseModel because it would not make sense to put the main
5290 definition of persistence methods such as create() in it, and still we
5291 should be able to override them within an AbstractModel.
5293 _auto = False # don't create any database backend for AbstractModels
5294 _register = False # not visible in ORM registry, meant to be python-inherited only
5297 def itemgetter_tuple(items):
5298 """ Fixes itemgetter inconsistency (useful in some cases) of not returning
5299 a tuple if len(items) == 1: always returns an n-tuple where n = len(items)
5304 return lambda gettable: (gettable[items[0]],)
5305 return operator.itemgetter(*items)
5306 class ImportWarning(Warning):
5307 """ Used to send warnings upwards the stack during the import process
5312 def convert_pgerror_23502(model, fields, info, e):
5313 m = re.match(r'^null value in column "(?P<field>\w+)" violates '
5314 r'not-null constraint\n',
5316 field_name = m.group('field')
5317 if not m or field_name not in fields:
5318 return {'message': unicode(e)}
5319 message = _(u"Missing required value for the field '%s'.") % field_name
5320 field = fields.get(field_name)
5322 message = _(u"%s This might be '%s' in the current model, or a field "
5323 u"of the same name in an o2m.") % (message, field['string'])
5326 'field': field_name,
5328 def convert_pgerror_23505(model, fields, info, e):
5329 m = re.match(r'^duplicate key (?P<field>\w+) violates unique constraint',
5331 field_name = m.group('field')
5332 if not m or field_name not in fields:
5333 return {'message': unicode(e)}
5334 message = _(u"The value for the field '%s' already exists.") % field_name
5335 field = fields.get(field_name)
5337 message = _(u"%s This might be '%s' in the current model, or a field "
5338 u"of the same name in an o2m.") % (message, field['string'])
5341 'field': field_name,
5344 PGERROR_TO_OE = collections.defaultdict(
5345 # shape of mapped converters
5346 lambda: (lambda model, fvg, info, pgerror: {'message': unicode(pgerror)}), {
5347 # not_null_violation
5348 '23502': convert_pgerror_23502,
5349 # unique constraint error
5350 '23505': convert_pgerror_23505,
5352 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: