1 # -*- coding: utf-8 -*-
2 ##############################################################################
4 # OpenERP, Open Source Management Solution
5 # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
7 # This program is free software: you can redistribute it and/or modify
8 # it under the terms of the GNU Affero General Public License as
9 # published by the Free Software Foundation, either version 3 of the
10 # License, or (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU Affero General Public License for more details.
17 # You should have received a copy of the GNU Affero General Public License
18 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 ##############################################################################
22 #.apidoc title: Object Relational Mapping
23 #.apidoc module-mods: member-order: bysource
26 Object relational mapping to database (postgresql) module
27 * Hierarchical structure
28 * Constraints consistency, validations
29 * Object meta Data depends on its status
30 * Optimised processing by complex query (multiple actions at once)
31 * Default fields value
32 * Permissions optimisation
33 * Persistant object: DB postgresql
35 * Multi-level caching system
36 * 2 different inheritancies
38 - classicals (varchar, integer, boolean, ...)
39 - relations (one2many, many2one, many2many)
60 from lxml import etree
64 import openerp.netsvc as netsvc
65 import openerp.tools as tools
66 from openerp.tools.config import config
67 from openerp.tools.misc import CountingStream
68 from openerp.tools.safe_eval import safe_eval as eval
69 from openerp.tools.translate import _
70 from openerp import SUPERUSER_ID
71 from query import Query
73 _logger = logging.getLogger(__name__)
74 _schema = logging.getLogger(__name__ + '.schema')
76 # List of etree._Element subclasses that we choose to ignore when parsing XML.
77 from openerp.tools import SKIPPED_ELEMENT_TYPES
79 regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
80 regex_object_name = re.compile(r'^[a-z0-9_.]+$')
82 def transfer_field_to_modifiers(field, modifiers):
85 for attr in ('invisible', 'readonly', 'required'):
86 state_exceptions[attr] = []
87 default_values[attr] = bool(field.get(attr))
88 for state, modifs in (field.get("states",{})).items():
90 if default_values[modif[0]] != modif[1]:
91 state_exceptions[modif[0]].append(state)
93 for attr, default_value in default_values.items():
94 if state_exceptions[attr]:
95 modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])]
97 modifiers[attr] = default_value
100 # Don't deal with groups, it is done by check_group().
101 # Need the context to evaluate the invisible attribute on tree views.
102 # For non-tree views, the context shouldn't be given.
103 def transfer_node_to_modifiers(node, modifiers, context=None, in_tree_view=False):
104 if node.get('attrs'):
105 modifiers.update(eval(node.get('attrs')))
107 if node.get('states'):
108 if 'invisible' in modifiers and isinstance(modifiers['invisible'], list):
109 # TODO combine with AND or OR, use implicit AND for now.
110 modifiers['invisible'].append(('state', 'not in', node.get('states').split(',')))
112 modifiers['invisible'] = [('state', 'not in', node.get('states').split(','))]
114 for a in ('invisible', 'readonly', 'required'):
116 v = bool(eval(node.get(a), {'context': context or {}}))
117 if in_tree_view and a == 'invisible':
118 # Invisible in a tree view has a specific meaning, make it a
119 # new key in the modifiers attribute.
120 modifiers['tree_invisible'] = v
121 elif v or (a not in modifiers or not isinstance(modifiers[a], list)):
122 # Don't set the attribute to False if a dynamic value was
123 # provided (i.e. a domain from attrs or states).
127 def simplify_modifiers(modifiers):
128 for a in ('invisible', 'readonly', 'required'):
129 if a in modifiers and not modifiers[a]:
133 def transfer_modifiers_to_node(modifiers, node):
135 simplify_modifiers(modifiers)
136 node.set('modifiers', simplejson.dumps(modifiers))
138 def setup_modifiers(node, field=None, context=None, in_tree_view=False):
139 """ Processes node attributes and field descriptors to generate
140 the ``modifiers`` node attribute and set it on the provided node.
142 Alters its first argument in-place.
144 :param node: ``field`` node from an OpenERP view
145 :type node: lxml.etree._Element
146 :param dict field: field descriptor corresponding to the provided node
147 :param dict context: execution context used to evaluate node attributes
148 :param bool in_tree_view: triggers the ``tree_invisible`` code
149 path (separate from ``invisible``): in
150 tree view there are two levels of
151 invisibility, cell content (a column is
152 present but the cell itself is not
153 displayed) with ``invisible`` and column
154 invisibility (the whole column is
155 hidden) with ``tree_invisible``.
159 if field is not None:
160 transfer_field_to_modifiers(field, modifiers)
161 transfer_node_to_modifiers(
162 node, modifiers, context=context, in_tree_view=in_tree_view)
163 transfer_modifiers_to_node(modifiers, node)
165 def test_modifiers(what, expected):
167 if isinstance(what, basestring):
168 node = etree.fromstring(what)
169 transfer_node_to_modifiers(node, modifiers)
170 simplify_modifiers(modifiers)
171 json = simplejson.dumps(modifiers)
172 assert json == expected, "%s != %s" % (json, expected)
173 elif isinstance(what, dict):
174 transfer_field_to_modifiers(what, modifiers)
175 simplify_modifiers(modifiers)
176 json = simplejson.dumps(modifiers)
177 assert json == expected, "%s != %s" % (json, expected)
182 # openerp.osv.orm.modifiers_tests()
183 def modifiers_tests():
184 test_modifiers('<field name="a"/>', '{}')
185 test_modifiers('<field name="a" invisible="1"/>', '{"invisible": true}')
186 test_modifiers('<field name="a" readonly="1"/>', '{"readonly": true}')
187 test_modifiers('<field name="a" required="1"/>', '{"required": true}')
188 test_modifiers('<field name="a" invisible="0"/>', '{}')
189 test_modifiers('<field name="a" readonly="0"/>', '{}')
190 test_modifiers('<field name="a" required="0"/>', '{}')
191 test_modifiers('<field name="a" invisible="1" required="1"/>', '{"invisible": true, "required": true}') # TODO order is not guaranteed
192 test_modifiers('<field name="a" invisible="1" required="0"/>', '{"invisible": true}')
193 test_modifiers('<field name="a" invisible="0" required="1"/>', '{"required": true}')
194 test_modifiers("""<field name="a" attrs="{'invisible': [('b', '=', 'c')]}"/>""", '{"invisible": [["b", "=", "c"]]}')
196 # The dictionary is supposed to be the result of fields_get().
197 test_modifiers({}, '{}')
198 test_modifiers({"invisible": True}, '{"invisible": true}')
199 test_modifiers({"invisible": False}, '{}')
202 def check_object_name(name):
203 """ Check if the given name is a valid openerp object name.
205 The _name attribute in osv and osv_memory object is subject to
206 some restrictions. This function returns True or False whether
207 the given name is allowed or not.
209 TODO: this is an approximation. The goal in this approximation
210 is to disallow uppercase characters (in some places, we quote
211 table/column names and in other not, which leads to this kind
214 psycopg2.ProgrammingError: relation "xxx" does not exist).
216 The same restriction should apply to both osv and osv_memory
217 objects for consistency.
220 if regex_object_name.match(name) is None:
224 def raise_on_invalid_object_name(name):
225 if not check_object_name(name):
226 msg = "The _name attribute %s is not valid." % name
228 raise except_orm('ValueError', msg)
230 POSTGRES_CONFDELTYPES = {
238 def intersect(la, lb):
239 return filter(lambda x: x in lb, la)
241 def fix_import_export_id_paths(fieldname):
243 Fixes the id fields in import and exports, and splits field paths
246 :param str fieldname: name of the field to import/export
247 :return: split field name
250 fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
251 fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
252 return fixed_external_id.split('/')
254 class except_orm(Exception):
255 def __init__(self, name, value):
258 self.args = (name, value)
260 class BrowseRecordError(Exception):
263 class browse_null(object):
264 """ Readonly python database object browser
270 def __getitem__(self, name):
273 def __getattr__(self, name):
274 return None # XXX: return self ?
282 def __nonzero__(self):
285 def __unicode__(self):
290 # TODO: execute an object method on browse_record_list
292 class browse_record_list(list):
293 """ Collection of browse objects
295 Such an instance will be returned when doing a ``browse([ids..])``
296 and will be iterable, yielding browse() objects
299 def __init__(self, lst, context=None):
302 super(browse_record_list, self).__init__(lst)
303 self.context = context
306 class browse_record(object):
307 """ An object that behaves like a row of an object's table.
308 It has attributes after the columns of the corresponding object.
312 uobj = pool.get('res.users')
313 user_rec = uobj.browse(cr, uid, 104)
317 def __init__(self, cr, uid, id, table, cache, context=None,
318 list_class=browse_record_list, fields_process=None):
320 :param table: the browsed object (inherited from orm)
321 :param dict cache: a dictionary of model->field->data to be shared
322 across browse objects, thus reducing the SQL
323 read()s. It can speed up things a lot, but also be
324 disastrous if not discarded after write()/unlink()
326 :param dict context: dictionary with an optional context
328 if fields_process is None:
332 self._list_class = list_class
336 self._table = table # deprecated, use _model!
338 self._table_name = self._table._name
339 self.__logger = logging.getLogger('openerp.osv.orm.browse_record.' + self._table_name)
340 self._context = context
341 self._fields_process = fields_process
343 cache.setdefault(table._name, {})
344 self._data = cache[table._name]
346 # if not (id and isinstance(id, (int, long,))):
347 # raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
348 # if not table.exists(cr, uid, id, context):
349 # raise BrowseRecordError(_('Object %s does not exists') % (self,))
351 if id not in self._data:
352 self._data[id] = {'id': id}
356 def __getitem__(self, name):
360 if name not in self._data[self._id]:
361 # build the list of fields we will fetch
363 # fetch the definition of the field which was asked for
364 if name in self._table._columns:
365 col = self._table._columns[name]
366 elif name in self._table._inherit_fields:
367 col = self._table._inherit_fields[name][2]
368 elif hasattr(self._table, str(name)):
369 attr = getattr(self._table, name)
370 if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
371 def function_proxy(*args, **kwargs):
372 if 'context' not in kwargs and self._context:
373 kwargs.update(context=self._context)
374 return attr(self._cr, self._uid, [self._id], *args, **kwargs)
375 return function_proxy
379 error_msg = "Field '%s' does not exist in object '%s'" % (name, self)
380 self.__logger.warning(error_msg)
381 if self.__logger.isEnabledFor(logging.DEBUG):
382 self.__logger.debug(''.join(traceback.format_stack()))
383 raise KeyError(error_msg)
385 # if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
387 # gen the list of "local" (ie not inherited) fields which are classic or many2one
388 fields_to_fetch = filter(lambda x: x[1]._classic_write, self._table._columns.items())
389 # gen the list of inherited fields
390 inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
391 # complete the field list with the inherited fields which are classic or many2one
392 fields_to_fetch += filter(lambda x: x[1]._classic_write, inherits)
393 # otherwise we fetch only that field
395 fields_to_fetch = [(name, col)]
396 ids = filter(lambda id: name not in self._data[id], self._data.keys())
398 field_names = map(lambda x: x[0], fields_to_fetch)
399 field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
401 # TODO: improve this, very slow for reports
402 if self._fields_process:
403 lang = self._context.get('lang', 'en_US') or 'en_US'
404 lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
406 raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
407 lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
409 for field_name, field_column in fields_to_fetch:
410 if field_column._type in self._fields_process:
411 for result_line in field_values:
412 result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
413 if result_line[field_name]:
414 result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
417 # Where did those ids come from? Perhaps old entries in ir_model_dat?
418 _logger.warning("No field_values found for ids %s in %s", ids, self)
419 raise KeyError('Field %s not found in %s'%(name, self))
420 # create browse records for 'remote' objects
421 for result_line in field_values:
423 for field_name, field_column in fields_to_fetch:
424 if field_column._type == 'many2one':
425 if result_line[field_name]:
426 obj = self._table.pool.get(field_column._obj)
427 if isinstance(result_line[field_name], (list, tuple)):
428 value = result_line[field_name][0]
430 value = result_line[field_name]
432 # FIXME: this happen when a _inherits object
433 # overwrite a field of it parent. Need
434 # testing to be sure we got the right
435 # object and not the parent one.
436 if not isinstance(value, browse_record):
438 # In some cases the target model is not available yet, so we must ignore it,
439 # which is safe in most cases, this value will just be loaded later when needed.
440 # This situation can be caused by custom fields that connect objects with m2o without
441 # respecting module dependencies, causing relationships to be connected to soon when
442 # the target is not loaded yet.
444 new_data[field_name] = browse_record(self._cr,
445 self._uid, value, obj, self._cache,
446 context=self._context,
447 list_class=self._list_class,
448 fields_process=self._fields_process)
450 new_data[field_name] = value
452 new_data[field_name] = browse_null()
454 new_data[field_name] = browse_null()
455 elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
456 new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
457 elif field_column._type == 'reference':
458 if result_line[field_name]:
459 if isinstance(result_line[field_name], browse_record):
460 new_data[field_name] = result_line[field_name]
462 ref_obj, ref_id = result_line[field_name].split(',')
463 ref_id = long(ref_id)
465 obj = self._table.pool.get(ref_obj)
466 new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
468 new_data[field_name] = browse_null()
470 new_data[field_name] = browse_null()
472 new_data[field_name] = result_line[field_name]
473 self._data[result_line['id']].update(new_data)
475 if not name in self._data[self._id]:
476 # How did this happen? Could be a missing model due to custom fields used too soon, see above.
477 self.__logger.error("Fields to fetch: %s, Field values: %s", field_names, field_values)
478 self.__logger.error("Cached: %s, Table: %s", self._data[self._id], self._table)
479 raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
480 return self._data[self._id][name]
482 def __getattr__(self, name):
486 raise AttributeError(e)
488 def __contains__(self, name):
489 return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
492 raise NotImplementedError("Iteration is not allowed on %s" % self)
494 def __hasattr__(self, name):
501 return "browse_record(%s, %d)" % (self._table_name, self._id)
503 def __eq__(self, other):
504 if not isinstance(other, browse_record):
506 return (self._table_name, self._id) == (other._table_name, other._id)
508 def __ne__(self, other):
509 if not isinstance(other, browse_record):
511 return (self._table_name, self._id) != (other._table_name, other._id)
513 # we need to define __unicode__ even though we've already defined __str__
514 # because we have overridden __getattr__
515 def __unicode__(self):
516 return unicode(str(self))
519 return hash((self._table_name, self._id))
524 """Force refreshing this browse_record's data and all the data of the
525 records that belong to the same cache, by emptying the cache completely,
526 preserving only the record identifiers (for prefetching optimizations).
528 for model, model_cache in self._cache.iteritems():
529 # only preserve the ids of the records that were in the cache
530 cached_ids = dict([(i, {'id': i}) for i in model_cache.keys()])
531 self._cache[model].clear()
532 self._cache[model].update(cached_ids)
534 def pg_varchar(size=0):
535 """ Returns the VARCHAR declaration for the provided size:
537 * If no size (or an empty or negative size is provided) return an
539 * Otherwise return a VARCHAR(n)
541 :type int size: varchar size, optional
545 if not isinstance(size, int):
546 raise TypeError("VARCHAR parameter should be an int, got %s"
549 return 'VARCHAR(%d)' % size
552 FIELDS_TO_PGTYPES = {
553 fields.boolean: 'bool',
554 fields.integer: 'int4',
558 fields.datetime: 'timestamp',
559 fields.binary: 'bytea',
560 fields.many2one: 'int4',
561 fields.serialized: 'text',
564 def get_pg_type(f, type_override=None):
566 :param fields._column f: field to get a Postgres type for
567 :param type type_override: use the provided type for dispatching instead of the field's own type
568 :returns: (postgres_identification_type, postgres_type_specification)
571 field_type = type_override or type(f)
573 if field_type in FIELDS_TO_PGTYPES:
574 pg_type = (FIELDS_TO_PGTYPES[field_type], FIELDS_TO_PGTYPES[field_type])
575 elif issubclass(field_type, fields.float):
577 pg_type = ('numeric', 'NUMERIC')
579 pg_type = ('float8', 'DOUBLE PRECISION')
580 elif issubclass(field_type, (fields.char, fields.reference)):
581 pg_type = ('varchar', pg_varchar(f.size))
582 elif issubclass(field_type, fields.selection):
583 if (isinstance(f.selection, list) and isinstance(f.selection[0][0], int))\
584 or getattr(f, 'size', None) == -1:
585 pg_type = ('int4', 'INTEGER')
587 pg_type = ('varchar', pg_varchar(getattr(f, 'size', None)))
588 elif issubclass(field_type, fields.function):
589 if f._type == 'selection':
590 pg_type = ('varchar', pg_varchar())
592 pg_type = get_pg_type(f, getattr(fields, f._type))
594 _logger.warning('%s type not supported!', field_type)
600 class MetaModel(type):
601 """ Metaclass for the Model.
603 This class is used as the metaclass for the Model class to discover
604 the models defined in a module (i.e. without instanciating them).
605 If the automatic discovery is not needed, it is possible to set the
606 model's _register attribute to False.
610 module_to_models = {}
612 def __init__(self, name, bases, attrs):
613 if not self._register:
614 self._register = True
615 super(MetaModel, self).__init__(name, bases, attrs)
618 # The (OpenERP) module name can be in the `openerp.addons` namespace
619 # or not. For instance module `sale` can be imported as
620 # `openerp.addons.sale` (the good way) or `sale` (for backward
622 module_parts = self.__module__.split('.')
623 if len(module_parts) > 2 and module_parts[0] == 'openerp' and \
624 module_parts[1] == 'addons':
625 module_name = self.__module__.split('.')[2]
627 module_name = self.__module__.split('.')[0]
628 if not hasattr(self, '_module'):
629 self._module = module_name
631 # Remember which models to instanciate for this module.
632 self.module_to_models.setdefault(self._module, []).append(self)
635 # Definition of log access columns, automatically added to models if
636 # self._log_access is True
637 LOG_ACCESS_COLUMNS = {
638 'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
639 'create_date': 'TIMESTAMP',
640 'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
641 'write_date': 'TIMESTAMP'
643 # special columns automatically created by the ORM
644 MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys()
646 class BaseModel(object):
647 """ Base class for OpenERP models.
649 OpenERP models are created by inheriting from this class' subclasses:
651 * Model: for regular database-persisted models
652 * TransientModel: for temporary data, stored in the database but automatically
653 vaccuumed every so often
654 * AbstractModel: for abstract super classes meant to be shared by multiple
655 _inheriting classes (usually Models or TransientModels)
657 The system will later instantiate the class once per database (on
658 which the class' module is installed).
660 To create a class that should not be instantiated, the _register class attribute
663 __metaclass__ = MetaModel
664 _auto = True # create database backend
665 _register = False # Set to false if the model shouldn't be automatically discovered.
671 _parent_name = 'parent_id'
672 _parent_store = False
673 _parent_order = False
680 # dict of {field:method}, with method returning the (name_get of records, {id: fold})
681 # to include in the _read_group, if grouped on this field
685 _transient = False # True in a TransientModel
686 _transient_max_count = None
687 _transient_max_hours = None
688 _transient_check_time = 20
691 # { 'parent_model': 'm2o_field', ... }
694 # Mapping from inherits'd field name to triple (m, r, f, n) where m is the
695 # model from which it is inherits'd, r is the (local) field towards m, f
696 # is the _column object itself, and n is the original (i.e. top-most)
699 # { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
700 # field_column_obj, origina_parent_model), ... }
703 # Mapping field name/column_info object
704 # This is similar to _inherit_fields but:
705 # 1. includes self fields,
706 # 2. uses column_info instead of a triple.
712 _sql_constraints = []
713 _protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
715 CONCURRENCY_CHECK_FIELD = '__last_update'
717 def log(self, cr, uid, id, message, secondary=False, context=None):
718 return _logger.warning("log() is deprecated. Please use OpenChatter notification system instead of the res.log mechanism.")
720 def view_init(self, cr, uid, fields_list, context=None):
721 """Override this method to do specific things when a view on the object is opened."""
724 def _field_create(self, cr, context=None):
725 """ Create entries in ir_model_fields for all the model's fields.
727 If necessary, also create an entry in ir_model, and if called from the
728 modules loading scheme (by receiving 'module' in the context), also
729 create entries in ir_model_data (for the model and the fields).
731 - create an entry in ir_model (if there is not already one),
732 - create an entry in ir_model_data (if there is not already one, and if
733 'module' is in the context),
734 - update ir_model_fields with the fields found in _columns
735 (TODO there is some redundancy as _columns is updated from
736 ir_model_fields in __init__).
741 cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
743 cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
744 model_id = cr.fetchone()[0]
745 cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
747 model_id = cr.fetchone()[0]
748 if 'module' in context:
749 name_id = 'model_'+self._name.replace('.', '_')
750 cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
752 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
753 (name_id, context['module'], 'ir.model', model_id)
758 cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
760 for rec in cr.dictfetchall():
761 cols[rec['name']] = rec
763 ir_model_fields_obj = self.pool.get('ir.model.fields')
765 # sparse field should be created at the end, as it depends on its serialized field already existing
766 model_fields = sorted(self._columns.items(), key=lambda x: 1 if x[1]._type == 'sparse' else 0)
767 for (k, f) in model_fields:
769 'model_id': model_id,
772 'field_description': f.string,
774 'relation': f._obj or '',
775 'view_load': (f.view_load and 1) or 0,
776 'select_level': tools.ustr(f.select or 0),
777 'readonly': (f.readonly and 1) or 0,
778 'required': (f.required and 1) or 0,
779 'selectable': (f.selectable and 1) or 0,
780 'translate': (f.translate and 1) or 0,
781 'relation_field': f._fields_id if isinstance(f, fields.one2many) else '',
782 'serialization_field_id': None,
784 if getattr(f, 'serialization_field', None):
785 # resolve link to serialization_field if specified by name
786 serialization_field_id = ir_model_fields_obj.search(cr, SUPERUSER_ID, [('model','=',vals['model']), ('name', '=', f.serialization_field)])
787 if not serialization_field_id:
788 raise except_orm(_('Error'), _("Serialization field `%s` not found for sparse field `%s`!") % (f.serialization_field, k))
789 vals['serialization_field_id'] = serialization_field_id[0]
791 # When its a custom field,it does not contain f.select
792 if context.get('field_state', 'base') == 'manual':
793 if context.get('field_name', '') == k:
794 vals['select_level'] = context.get('select', '0')
795 #setting value to let the problem NOT occur next time
797 vals['select_level'] = cols[k]['select_level']
800 cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
801 id = cr.fetchone()[0]
803 cr.execute("""INSERT INTO ir_model_fields (
804 id, model_id, model, name, field_description, ttype,
805 relation,view_load,state,select_level,relation_field, translate, serialization_field_id
807 %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
809 id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
810 vals['relation'], bool(vals['view_load']), 'base',
811 vals['select_level'], vals['relation_field'], bool(vals['translate']), vals['serialization_field_id']
813 if 'module' in context:
814 name1 = 'field_' + self._table + '_' + k
815 cr.execute("select name from ir_model_data where name=%s", (name1,))
817 name1 = name1 + "_" + str(id)
818 cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
819 (name1, context['module'], 'ir.model.fields', id)
822 for key, val in vals.items():
823 if cols[k][key] != vals[key]:
824 cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
826 cr.execute("""UPDATE ir_model_fields SET
827 model_id=%s, field_description=%s, ttype=%s, relation=%s,
828 view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s, serialization_field_id=%s
830 model=%s AND name=%s""", (
831 vals['model_id'], vals['field_description'], vals['ttype'],
832 vals['relation'], bool(vals['view_load']),
833 vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['serialization_field_id'], vals['model'], vals['name']
839 # Goal: try to apply inheritance at the instanciation level and
840 # put objects in the pool var
843 def create_instance(cls, pool, cr):
844 """ Instanciate a given model.
846 This class method instanciates the class of some model (i.e. a class
847 deriving from osv or osv_memory). The class might be the class passed
848 in argument or, if it inherits from another class, a class constructed
849 by combining the two classes.
851 The ``attributes`` argument specifies which parent class attributes
854 TODO: the creation of the combined class is repeated at each call of
855 this method. This is probably unnecessary.
858 attributes = ['_columns', '_defaults', '_inherits', '_constraints',
861 parent_names = getattr(cls, '_inherit', None)
863 if isinstance(parent_names, (str, unicode)):
864 name = cls._name or parent_names
865 parent_names = [parent_names]
869 raise TypeError('_name is mandatory in case of multiple inheritance')
871 for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]):
872 parent_model = pool.get(parent_name)
874 raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
875 'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
876 if not getattr(cls, '_original_module', None) and name == parent_model._name:
877 cls._original_module = parent_model._original_module
878 parent_class = parent_model.__class__
881 new = copy.copy(getattr(parent_model, s, {}))
883 # Don't _inherit custom fields.
887 # Duplicate float fields because they have a .digits
888 # cache (which must be per-registry, not server-wide).
890 if new[c]._type == 'float':
891 new[c] = copy.copy(new[c])
892 if hasattr(new, 'update'):
893 new.update(cls.__dict__.get(s, {}))
894 elif s=='_constraints':
895 for c in cls.__dict__.get(s, []):
897 for c2 in range(len(new)):
898 #For _constraints, we should check field and methods as well
899 if new[c2][2]==c[2] and (new[c2][0] == c[0] \
900 or getattr(new[c2][0],'__name__', True) == \
901 getattr(c[0],'__name__', False)):
902 # If new class defines a constraint with
903 # same function name, we let it override
912 new.extend(cls.__dict__.get(s, []))
915 # Keep links to non-inherited constraints, e.g. useful when exporting translations
916 nattr['_local_constraints'] = cls.__dict__.get('_constraints', [])
917 nattr['_local_sql_constraints'] = cls.__dict__.get('_sql_constraints', [])
919 cls = type(name, (cls, parent_class), dict(nattr, _register=False))
921 cls._local_constraints = getattr(cls, '_constraints', [])
922 cls._local_sql_constraints = getattr(cls, '_sql_constraints', [])
924 if not getattr(cls, '_original_module', None):
925 cls._original_module = cls._module
926 obj = object.__new__(cls)
927 obj.__init__(pool, cr)
931 """Register this model.
933 This doesn't create an instance but simply register the model
934 as being part of the module where it is defined.
939 # Set the module name (e.g. base, sale, accounting, ...) on the class.
940 module = cls.__module__.split('.')[0]
941 if not hasattr(cls, '_module'):
944 # Record this class in the list of models to instantiate for this module,
945 # managed by the metaclass.
946 module_model_list = MetaModel.module_to_models.setdefault(cls._module, [])
947 if cls not in module_model_list:
948 module_model_list.append(cls)
950 # Since we don't return an instance here, the __init__
951 # method won't be called.
954 def __init__(self, pool, cr):
955 """ Initialize a model and make it part of the given registry.
957 - copy the stored fields' functions in the osv_pool,
958 - update the _columns with the fields found in ir_model_fields,
959 - ensure there is a many2one for each _inherits'd parent,
960 - update the children's _columns,
961 - give a chance to each field to initialize itself.
964 pool.add(self._name, self)
967 if not self._name and not hasattr(self, '_inherit'):
968 name = type(self).__name__.split('.')[0]
969 msg = "The class %s has to have a _name attribute" % name
972 raise except_orm('ValueError', msg)
974 if not self._description:
975 self._description = self._name
977 self._table = self._name.replace('.', '_')
979 if not hasattr(self, '_log_access'):
980 # If _log_access is not specified, it is the same value as _auto.
981 self._log_access = getattr(self, "_auto", True)
983 self._columns = self._columns.copy()
984 for store_field in self._columns:
985 f = self._columns[store_field]
986 if hasattr(f, 'digits_change'):
988 def not_this_field(stored_func):
989 x, y, z, e, f, l = stored_func
990 return x != self._name or y != store_field
991 self.pool._store_function[self._name] = filter(not_this_field, self.pool._store_function.get(self._name, []))
992 if not isinstance(f, fields.function):
998 sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, 10, None)}
999 for object, aa in sm.items():
1001 (fnct, fields2, order, length) = aa
1003 (fnct, fields2, order) = aa
1006 raise except_orm('Error',
1007 ('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
1008 self.pool._store_function.setdefault(object, [])
1009 self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
1010 self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
1012 for (key, _, msg) in self._sql_constraints:
1013 self.pool._sql_error[self._table+'_'+key] = msg
1015 # Load manual fields
1017 # Check the query is already done for all modules of if we need to
1019 if self.pool.fields_by_model is not None:
1020 manual_fields = self.pool.fields_by_model.get(self._name, [])
1022 cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
1023 manual_fields = cr.dictfetchall()
1024 for field in manual_fields:
1025 if field['name'] in self._columns:
1028 'string': field['field_description'],
1029 'required': bool(field['required']),
1030 'readonly': bool(field['readonly']),
1031 'domain': eval(field['domain']) if field['domain'] else None,
1032 'size': field['size'],
1033 'ondelete': field['on_delete'],
1034 'translate': (field['translate']),
1036 #'select': int(field['select_level'])
1039 if field['serialization_field_id']:
1040 cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],))
1041 attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']})
1042 if field['ttype'] in ['many2one', 'one2many', 'many2many']:
1043 attrs.update({'relation': field['relation']})
1044 self._columns[field['name']] = fields.sparse(**attrs)
1045 elif field['ttype'] == 'selection':
1046 self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
1047 elif field['ttype'] == 'reference':
1048 self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
1049 elif field['ttype'] == 'many2one':
1050 self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
1051 elif field['ttype'] == 'one2many':
1052 self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
1053 elif field['ttype'] == 'many2many':
1054 _rel1 = field['relation'].replace('.', '_')
1055 _rel2 = field['model'].replace('.', '_')
1056 _rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
1057 self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
1059 self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
1061 self._inherits_check()
1062 self._inherits_reload()
1063 if not self._sequence:
1064 self._sequence = self._table + '_id_seq'
1065 for k in self._defaults:
1066 assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
1067 for f in self._columns:
1068 self._columns[f].restart()
1071 if self.is_transient():
1072 self._transient_check_count = 0
1073 self._transient_max_count = config.get('osv_memory_count_limit')
1074 self._transient_max_hours = config.get('osv_memory_age_limit')
1075 assert self._log_access, "TransientModels must have log_access turned on, "\
1076 "in order to implement their access rights policy"
1079 if self._rec_name is not None:
1080 assert self._rec_name in self._columns.keys() + ['id'], "Invalid rec_name %s for model %s" % (self._rec_name, self._name)
1082 self._rec_name = 'name'
1085 def __export_row(self, cr, uid, row, fields, context=None):
1089 def check_type(field_type):
1090 if field_type == 'float':
1092 elif field_type == 'integer':
1094 elif field_type == 'boolean':
1098 def selection_field(in_field):
1099 col_obj = self.pool.get(in_field.keys()[0])
1100 if f[i] in col_obj._columns.keys():
1101 return col_obj._columns[f[i]]
1102 elif f[i] in col_obj._inherits.keys():
1103 selection_field(col_obj._inherits)
1107 def _get_xml_id(self, cr, uid, r):
1108 model_data = self.pool.get('ir.model.data')
1109 data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
1111 d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
1113 r = '%s.%s' % (d['module'], d['name'])
1119 n = self._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' )
1120 if not model_data.search(cr, uid, [('name', '=', n)]):
1123 model_data.create(cr, SUPERUSER_ID, {
1125 'model': self._name,
1127 'module': '__export__',
1133 data = map(lambda x: '', range(len(fields)))
1135 for fpos in range(len(fields)):
1145 r = _get_xml_id(self, cr, uid, r)
1148 # To display external name of selection field when its exported
1149 if f[i] in self._columns.keys():
1150 cols = self._columns[f[i]]
1151 elif f[i] in self._inherit_fields.keys():
1152 cols = selection_field(self._inherits)
1153 if cols and cols._type == 'selection':
1154 sel_list = cols.selection
1155 if r and type(sel_list) == type([]):
1156 r = [x[1] for x in sel_list if r==x[0]]
1157 r = r and r[0] or False
1159 if f[i] in self._columns:
1160 r = check_type(self._columns[f[i]]._type)
1161 elif f[i] in self._inherit_fields:
1162 r = check_type(self._inherit_fields[f[i]][2]._type)
1163 data[fpos] = r or False
1165 if isinstance(r, (browse_record_list, list)):
1167 fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
1170 if [x for x in fields2 if x]:
1172 done.append(fields2)
1173 if cols and cols._type=='many2many' and len(fields[fpos])>(i+1) and (fields[fpos][i+1]=='id'):
1174 data[fpos] = ','.join([_get_xml_id(self, cr, uid, x) for x in r])
1178 lines2 = row2._model.__export_row(cr, uid, row2, fields2,
1181 for fpos2 in range(len(fields)):
1182 if lines2 and lines2[0][fpos2]:
1183 data[fpos2] = lines2[0][fpos2]
1187 name_relation = self.pool.get(rr._table_name)._rec_name
1188 if isinstance(rr[name_relation], browse_record):
1189 rr = rr[name_relation]
1190 rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
1191 rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
1192 dt += tools.ustr(rr_name or '') + ','
1193 data[fpos] = dt[:-1]
1202 if isinstance(r, browse_record):
1203 r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
1204 r = r and r[0] and r[0][1] or ''
1205 data[fpos] = tools.ustr(r or '')
1206 return [data] + lines
1208 def export_data(self, cr, uid, ids, fields_to_export, context=None):
1210 Export fields for selected objects
1212 :param cr: database cursor
1213 :param uid: current user id
1214 :param ids: list of ids
1215 :param fields_to_export: list of fields
1216 :param context: context arguments, like lang, time zone
1217 :rtype: dictionary with a *datas* matrix
1219 This method is used when exporting data via client menu
1224 cols = self._columns.copy()
1225 for f in self._inherit_fields:
1226 cols.update({f: self._inherit_fields[f][2]})
1227 fields_to_export = map(fix_import_export_id_paths, fields_to_export)
1229 for row in self.browse(cr, uid, ids, context):
1230 datas += self.__export_row(cr, uid, row, fields_to_export, context)
1231 return {'datas': datas}
1233 def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
1236 Use :meth:`~load` instead
1238 Import given data in given module
1240 This method is used when importing data via client menu.
1242 Example of fields to import for a sale.order::
1245 partner_id, (=name_search)
1246 order_line/.id, (=database_id)
1248 order_line/product_id/id, (=xml id)
1249 order_line/price_unit,
1250 order_line/product_uom_qty,
1251 order_line/product_uom/id (=xml_id)
1253 This method returns a 4-tuple with the following structure::
1255 (return_code, errored_resource, error_message, unused)
1257 * The first item is a return code, it is ``-1`` in case of
1258 import error, or the last imported row number in case of success
1259 * The second item contains the record data dict that failed to import
1260 in case of error, otherwise it's 0
1261 * The third item contains an error message string in case of error,
1263 * The last item is currently unused, with no specific semantics
1265 :param fields: list of fields to import
1266 :param datas: data to import
1267 :param mode: 'init' or 'update' for record creation
1268 :param current_module: module name
1269 :param noupdate: flag for record creation
1270 :param filename: optional file to store partial import state for recovery
1271 :returns: 4-tuple in the form (return_code, errored_resource, error_message, unused)
1272 :rtype: (int, dict or 0, str or 0, str or 0)
1274 context = dict(context) if context is not None else {}
1275 context['_import_current_module'] = current_module
1277 fields = map(fix_import_export_id_paths, fields)
1278 ir_model_data_obj = self.pool.get('ir.model.data')
1281 if m['type'] == 'error':
1282 raise Exception(m['message'])
1284 if config.get('import_partial') and filename:
1285 with open(config.get('import_partial'), 'rb') as partial_import_file:
1286 data = pickle.load(partial_import_file)
1287 position = data.get(filename, 0)
1291 for res_id, xml_id, res, info in self._convert_records(cr, uid,
1292 self._extract_records(cr, uid, fields, datas,
1293 context=context, log=log),
1294 context=context, log=log):
1295 ir_model_data_obj._update(cr, uid, self._name,
1296 current_module, res, mode=mode, xml_id=xml_id,
1297 noupdate=noupdate, res_id=res_id, context=context)
1298 position = info.get('rows', {}).get('to', 0) + 1
1299 if config.get('import_partial') and filename and (not (position%100)):
1300 with open(config.get('import_partial'), 'rb') as partial_import:
1301 data = pickle.load(partial_import)
1302 data[filename] = position
1303 with open(config.get('import_partial'), 'wb') as partial_import:
1304 pickle.dump(data, partial_import)
1305 if context.get('defer_parent_store_computation'):
1306 self._parent_store_compute(cr)
1308 except Exception, e:
1310 return -1, {}, 'Line %d : %s' % (position + 1, tools.ustr(e)), ''
1312 if context.get('defer_parent_store_computation'):
1313 self._parent_store_compute(cr)
1314 return position, 0, 0, 0
1316 def load(self, cr, uid, fields, data, context=None):
1318 Attempts to load the data matrix, and returns a list of ids (or
1319 ``False`` if there was an error and no id could be generated) and a
1322 The ids are those of the records created and saved (in database), in
1323 the same order they were extracted from the file. They can be passed
1324 directly to :meth:`~read`
1326 :param fields: list of fields to import, at the same index as the corresponding data
1327 :type fields: list(str)
1328 :param data: row-major matrix of data to import
1329 :type data: list(list(str))
1330 :param dict context:
1331 :returns: {ids: list(int)|False, messages: [Message]}
1333 cr.execute('SAVEPOINT model_load')
1336 fields = map(fix_import_export_id_paths, fields)
1337 ModelData = self.pool['ir.model.data'].clear_caches()
1339 fg = self.fields_get(cr, uid, context=context)
1346 for id, xid, record, info in self._convert_records(cr, uid,
1347 self._extract_records(cr, uid, fields, data,
1348 context=context, log=messages.append),
1349 context=context, log=messages.append):
1351 cr.execute('SAVEPOINT model_load_save')
1352 except psycopg2.InternalError, e:
1353 # broken transaction, exit and hope the source error was
1355 if not any(message['type'] == 'error' for message in messages):
1356 messages.append(dict(info, type='error',message=
1357 u"Unknown database error: '%s'" % e))
1360 ids.append(ModelData._update(cr, uid, self._name,
1361 current_module, record, mode=mode, xml_id=xid,
1362 noupdate=noupdate, res_id=id, context=context))
1363 cr.execute('RELEASE SAVEPOINT model_load_save')
1364 except psycopg2.Warning, e:
1365 _logger.exception('Failed to import record %s', record)
1366 messages.append(dict(info, type='warning', message=str(e)))
1367 cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
1368 except psycopg2.Error, e:
1369 _logger.exception('Failed to import record %s', record)
1370 messages.append(dict(
1372 **PGERROR_TO_OE[e.pgcode](self, fg, info, e)))
1373 # Failed to write, log to messages, rollback savepoint (to
1374 # avoid broken transaction) and keep going
1375 cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
1376 if any(message['type'] == 'error' for message in messages):
1377 cr.execute('ROLLBACK TO SAVEPOINT model_load')
1379 return {'ids': ids, 'messages': messages}
1380 def _extract_records(self, cr, uid, fields_, data,
1381 context=None, log=lambda a: None):
1382 """ Generates record dicts from the data sequence.
1384 The result is a generator of dicts mapping field names to raw
1385 (unconverted, unvalidated) values.
1387 For relational fields, if sub-fields were provided the value will be
1388 a list of sub-records
1390 The following sub-fields may be set on the record (by key):
1391 * None is the name_get for the record (to use with name_create/name_search)
1392 * "id" is the External ID for the record
1393 * ".id" is the Database ID for the record
1395 columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
1396 # Fake columns to avoid special cases in extractor
1397 columns[None] = fields.char('rec_name')
1398 columns['id'] = fields.char('External ID')
1399 columns['.id'] = fields.integer('Database ID')
1401 # m2o fields can't be on multiple lines so exclude them from the
1402 # is_relational field rows filter, but special-case it later on to
1403 # be handled with relational fields (as it can have subfields)
1404 is_relational = lambda field: columns[field]._type in ('one2many', 'many2many', 'many2one')
1405 get_o2m_values = itemgetter_tuple(
1406 [index for index, field in enumerate(fields_)
1407 if columns[field[0]]._type == 'one2many'])
1408 get_nono2m_values = itemgetter_tuple(
1409 [index for index, field in enumerate(fields_)
1410 if columns[field[0]]._type != 'one2many'])
1411 # Checks if the provided row has any non-empty non-relational field
1412 def only_o2m_values(row, f=get_nono2m_values, g=get_o2m_values):
1413 return any(g(row)) and not any(f(row))
1417 if index >= len(data): return
1420 # copy non-relational fields to record dict
1421 record = dict((field[0], value)
1422 for field, value in itertools.izip(fields_, row)
1423 if not is_relational(field[0]))
1425 # Get all following rows which have relational values attached to
1426 # the current record (no non-relational values)
1427 record_span = itertools.takewhile(
1428 only_o2m_values, itertools.islice(data, index + 1, None))
1429 # stitch record row back on for relational fields
1430 record_span = list(itertools.chain([row], record_span))
1431 for relfield in set(
1432 field[0] for field in fields_
1433 if is_relational(field[0])):
1434 column = columns[relfield]
1435 # FIXME: how to not use _obj without relying on fields_get?
1436 Model = self.pool[column._obj]
1438 # get only cells for this sub-field, should be strictly
1439 # non-empty, field path [None] is for name_get column
1440 indices, subfields = zip(*((index, field[1:] or [None])
1441 for index, field in enumerate(fields_)
1442 if field[0] == relfield))
1444 # return all rows which have at least one value for the
1445 # subfields of relfield
1446 relfield_data = filter(any, map(itemgetter_tuple(indices), record_span))
1447 record[relfield] = [subrecord
1448 for subrecord, _subinfo in Model._extract_records(
1449 cr, uid, subfields, relfield_data,
1450 context=context, log=log)]
1452 yield record, {'rows': {
1454 'to': index + len(record_span) - 1
1456 index += len(record_span)
1457 def _convert_records(self, cr, uid, records,
1458 context=None, log=lambda a: None):
1459 """ Converts records from the source iterable (recursive dicts of
1460 strings) into forms which can be written to the database (via
1461 self.create or (ir.model.data)._update)
1463 :returns: a list of triplets of (id, xid, record)
1464 :rtype: list((int|None, str|None, dict))
1466 if context is None: context = {}
1467 Converter = self.pool['ir.fields.converter']
1468 columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
1469 Translation = self.pool['ir.translation']
1471 (f, (Translation._get_source(cr, uid, self._name + ',' + f, 'field',
1472 context.get('lang'))
1474 for f, column in columns.iteritems())
1476 convert = Converter.for_model(cr, uid, self, context=context)
1478 def _log(base, field, exception):
1479 type = 'warning' if isinstance(exception, Warning) else 'error'
1480 # logs the logical (not human-readable) field name for automated
1481 # processing of response, but injects human readable in message
1482 record = dict(base, type=type, field=field,
1483 message=unicode(exception.args[0]) % base)
1484 if len(exception.args) > 1 and exception.args[1]:
1485 record.update(exception.args[1])
1488 stream = CountingStream(records)
1489 for record, extras in stream:
1492 # name_get/name_create
1493 if None in record: pass
1500 dbid = int(record['.id'])
1502 # in case of overridden id column
1503 dbid = record['.id']
1504 if not self.search(cr, uid, [('id', '=', dbid)], context=context):
1507 record=stream.index,
1509 message=_(u"Unknown database identifier '%s'") % dbid))
1512 converted = convert(record, lambda field, err:\
1513 _log(dict(extras, record=stream.index, field=field_names[field]), field, err))
1515 yield dbid, xid, converted, dict(extras, record=stream.index)
1517 def get_invalid_fields(self, cr, uid):
1518 return list(self._invalids)
1520 def _validate(self, cr, uid, ids, context=None):
1521 context = context or {}
1522 lng = context.get('lang')
1523 trans = self.pool.get('ir.translation')
1525 for constraint in self._constraints:
1526 fun, msg, fields = constraint
1527 # We don't pass around the context here: validation code
1528 # must always yield the same results.
1529 if not fun(self, cr, uid, ids):
1530 # Check presence of __call__ directly instead of using
1531 # callable() because it will be deprecated as of Python 3.0
1532 if hasattr(msg, '__call__'):
1533 tmp_msg = msg(self, cr, uid, ids, context=context)
1534 if isinstance(tmp_msg, tuple):
1535 tmp_msg, params = tmp_msg
1536 translated_msg = tmp_msg % params
1538 translated_msg = tmp_msg
1540 translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg)
1542 _("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
1544 self._invalids.update(fields)
1546 raise except_orm('ValidateError', '\n'.join(error_msgs))
1548 self._invalids.clear()
1550 def default_get(self, cr, uid, fields_list, context=None):
1552 Returns default values for the fields in fields_list.
1554 :param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
1555 :type fields_list: list
1556 :param context: optional context dictionary - it may contains keys for specifying certain options
1557 like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
1558 It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
1559 or override a default value for a field.
1560 A special ``bin_size`` boolean flag may also be passed in the context to request the
1561 value of all fields.binary columns to be returned as the size of the binary instead of its
1562 contents. This can also be selectively overriden by passing a field-specific flag
1563 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
1564 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
1565 :return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
1567 # trigger view init hook
1568 self.view_init(cr, uid, fields_list, context)
1574 # get the default values for the inherited fields
1575 for t in self._inherits.keys():
1576 defaults.update(self.pool.get(t).default_get(cr, uid, fields_list,
1579 # get the default values defined in the object
1580 for f in fields_list:
1581 if f in self._defaults:
1582 if callable(self._defaults[f]):
1583 defaults[f] = self._defaults[f](self, cr, uid, context)
1585 defaults[f] = self._defaults[f]
1587 fld_def = ((f in self._columns) and self._columns[f]) \
1588 or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
1591 if isinstance(fld_def, fields.property):
1592 property_obj = self.pool.get('ir.property')
1593 prop_value = property_obj.get(cr, uid, f, self._name, context=context)
1595 if isinstance(prop_value, (browse_record, browse_null)):
1596 defaults[f] = prop_value.id
1598 defaults[f] = prop_value
1600 if f not in defaults:
1603 # get the default values set by the user and override the default
1604 # values defined in the object
1605 ir_values_obj = self.pool.get('ir.values')
1606 res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
1607 for id, field, field_value in res:
1608 if field in fields_list:
1609 fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
1610 if fld_def._type == 'many2one':
1611 obj = self.pool.get(fld_def._obj)
1612 if not obj.search(cr, uid, [('id', '=', field_value or False)]):
1614 if fld_def._type == 'many2many':
1615 obj = self.pool.get(fld_def._obj)
1617 for i in range(len(field_value or [])):
1618 if not obj.search(cr, uid, [('id', '=',
1621 field_value2.append(field_value[i])
1622 field_value = field_value2
1623 if fld_def._type == 'one2many':
1624 obj = self.pool.get(fld_def._obj)
1626 for i in range(len(field_value or [])):
1627 field_value2.append({})
1628 for field2 in field_value[i]:
1629 if field2 in obj._columns.keys() and obj._columns[field2]._type == 'many2one':
1630 obj2 = self.pool.get(obj._columns[field2]._obj)
1631 if not obj2.search(cr, uid,
1632 [('id', '=', field_value[i][field2])]):
1634 elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type == 'many2one':
1635 obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
1636 if not obj2.search(cr, uid,
1637 [('id', '=', field_value[i][field2])]):
1639 # TODO add test for many2many and one2many
1640 field_value2[i][field2] = field_value[i][field2]
1641 field_value = field_value2
1642 defaults[field] = field_value
1644 # get the default values from the context
1645 for key in context or {}:
1646 if key.startswith('default_') and (key[8:] in fields_list):
1647 defaults[key[8:]] = context[key]
1650 def fields_get_keys(self, cr, user, context=None):
1651 res = self._columns.keys()
1652 # TODO I believe this loop can be replace by
1653 # res.extend(self._inherit_fields.key())
1654 for parent in self._inherits:
1655 res.extend(self.pool.get(parent).fields_get_keys(cr, user, context))
1658 def _rec_name_fallback(self, cr, uid, context=None):
1659 rec_name = self._rec_name
1660 if rec_name not in self._columns:
1661 rec_name = self._columns.keys()[0] if len(self._columns.keys()) > 0 else "id"
1665 # Overload this method if you need a window title which depends on the context
1667 def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
1670 def user_has_groups(self, cr, uid, groups, context=None):
1671 """Return true if the user is at least member of one of the groups
1672 in groups_str. Typically used to resolve ``groups`` attribute
1673 in view and model definitions.
1675 :param str groups: comma-separated list of fully-qualified group
1676 external IDs, e.g.: ``base.group_user,base.group_system``
1677 :return: True if the current user is a member of one of the
1680 return any([self.pool.get('res.users').has_group(cr, uid, group_ext_id)
1681 for group_ext_id in groups.split(',')])
1683 def __view_look_dom(self, cr, user, node, view_id, in_tree_view, model_fields, context=None):
1684 """Return the description of the fields in the node.
1686 In a normal call to this method, node is a complete view architecture
1687 but it is actually possible to give some sub-node (this is used so
1688 that the method can call itself recursively).
1690 Originally, the field descriptions are drawn from the node itself.
1691 But there is now some code calling fields_get() in order to merge some
1692 of those information in the architecture.
1704 if isinstance(s, unicode):
1705 return s.encode('utf8')
1708 def check_group(node):
1709 """Apply group restrictions, may be set at view level or model level::
1710 * at view level this means the element should be made invisible to
1711 people who are not members
1712 * at model level (exclusively for fields, obviously), this means
1713 the field should be completely removed from the view, as it is
1714 completely unavailable for non-members
1716 :return: True if field should be included in the result of fields_view_get
1718 if node.tag == 'field' and node.get('name') in self._all_columns:
1719 column = self._all_columns[node.get('name')].column
1720 if column.groups and not self.user_has_groups(cr, user,
1721 groups=column.groups,
1723 node.getparent().remove(node)
1724 fields.pop(node.get('name'), None)
1725 # no point processing view-level ``groups`` anymore, return
1727 if node.get('groups'):
1728 can_see = self.user_has_groups(cr, user,
1729 groups=node.get('groups'),
1732 node.set('invisible', '1')
1733 modifiers['invisible'] = True
1734 if 'attrs' in node.attrib:
1735 del(node.attrib['attrs']) #avoid making field visible later
1736 del(node.attrib['groups'])
1739 if node.tag in ('field', 'node', 'arrow'):
1740 if node.get('object'):
1745 if f.tag == 'field':
1746 xml += etree.tostring(f, encoding="utf-8")
1748 new_xml = etree.fromstring(encode(xml))
1749 ctx = context.copy()
1750 ctx['base_model_name'] = self._name
1751 xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
1756 attrs = {'views': views}
1758 if node.get('name'):
1761 if node.get('name') in self._columns:
1762 column = self._columns[node.get('name')]
1764 column = self._inherit_fields[node.get('name')][2]
1769 relation = self.pool.get(column._obj)
1774 if f.tag in ('form', 'tree', 'graph', 'kanban'):
1776 ctx = context.copy()
1777 ctx['base_model_name'] = self._name
1778 xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
1779 views[str(f.tag)] = {
1783 attrs = {'views': views}
1784 if node.get('widget') and node.get('widget') == 'selection':
1785 # Prepare the cached selection list for the client. This needs to be
1786 # done even when the field is invisible to the current user, because
1787 # other events could need to change its value to any of the selectable ones
1788 # (such as on_change events, refreshes, etc.)
1790 # If domain and context are strings, we keep them for client-side, otherwise
1791 # we evaluate them server-side to consider them when generating the list of
1793 # TODO: find a way to remove this hack, by allow dynamic domains
1795 if column._domain and not isinstance(column._domain, basestring):
1796 dom = list(column._domain)
1797 dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
1798 search_context = dict(context)
1799 if column._context and not isinstance(column._context, basestring):
1800 search_context.update(column._context)
1801 attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
1802 if (node.get('required') and not int(node.get('required'))) or not column.required:
1803 attrs['selection'].append((False, ''))
1804 fields[node.get('name')] = attrs
1806 field = model_fields.get(node.get('name'))
1808 transfer_field_to_modifiers(field, modifiers)
1811 elif node.tag in ('form', 'tree'):
1812 result = self.view_header_get(cr, user, False, node.tag, context)
1814 node.set('string', result)
1815 in_tree_view = node.tag == 'tree'
1817 elif node.tag == 'calendar':
1818 for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
1819 if node.get(additional_field):
1820 fields[node.get(additional_field)] = {}
1822 if not check_group(node):
1823 # node must be removed, no need to proceed further with its children
1826 # The view architeture overrides the python model.
1827 # Get the attrs before they are (possibly) deleted by check_group below
1828 transfer_node_to_modifiers(node, modifiers, context, in_tree_view)
1830 # TODO remove attrs couterpart in modifiers when invisible is true ?
1833 if 'lang' in context:
1834 if node.text and node.text.strip():
1835 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.text.strip())
1837 node.text = node.text.replace(node.text.strip(), trans)
1838 if node.tail and node.tail.strip():
1839 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.tail.strip())
1841 node.tail = node.tail.replace(node.tail.strip(), trans)
1843 if node.get('string') and not result:
1844 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
1845 if trans == node.get('string') and ('base_model_name' in context):
1846 # If translation is same as source, perhaps we'd have more luck with the alternative model name
1847 # (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
1848 trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
1850 node.set('string', trans)
1852 for attr_name in ('confirm', 'sum', 'avg', 'help', 'placeholder'):
1853 attr_value = node.get(attr_name)
1855 trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], attr_value)
1857 node.set(attr_name, trans)
1860 if children or (node.tag == 'field' and f.tag in ('filter','separator')):
1861 fields.update(self.__view_look_dom(cr, user, f, view_id, in_tree_view, model_fields, context))
1863 transfer_modifiers_to_node(modifiers, node)
1866 def _disable_workflow_buttons(self, cr, user, node):
1867 """ Set the buttons in node to readonly if the user can't activate them. """
1869 # admin user can always activate workflow buttons
1872 # TODO handle the case of more than one workflow for a model or multiple
1873 # transitions with different groups and same signal
1874 usersobj = self.pool.get('res.users')
1875 buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
1876 for button in buttons:
1877 user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
1878 cr.execute("""SELECT DISTINCT t.group_id
1880 INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
1881 INNER JOIN wkf_transition t ON (t.act_to = a.id)
1884 AND t.group_id is NOT NULL
1885 """, (self._name, button.get('name')))
1886 group_ids = [x[0] for x in cr.fetchall() if x[0]]
1887 can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
1888 button.set('readonly', str(int(not can_click)))
1891 def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
1892 """ Return an architecture and a description of all the fields.
1894 The field description combines the result of fields_get() and
1897 :param node: the architecture as as an etree
1898 :return: a tuple (arch, fields) where arch is the given node as a
1899 string and fields is the description of all the fields.
1903 if node.tag == 'diagram':
1904 if node.getchildren()[0].tag == 'node':
1905 node_model = self.pool.get(node.getchildren()[0].get('object'))
1906 node_fields = node_model.fields_get(cr, user, None, context)
1907 fields.update(node_fields)
1908 if not node.get("create") and not node_model.check_access_rights(cr, user, 'create', raise_exception=False):
1909 node.set("create", 'false')
1910 if node.getchildren()[1].tag == 'arrow':
1911 arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, None, context)
1912 fields.update(arrow_fields)
1914 fields = self.fields_get(cr, user, None, context)
1915 fields_def = self.__view_look_dom(cr, user, node, view_id, False, fields, context=context)
1916 node = self._disable_workflow_buttons(cr, user, node)
1917 if node.tag in ('kanban', 'tree', 'form', 'gantt'):
1918 for action, operation in (('create', 'create'), ('delete', 'unlink'), ('edit', 'write')):
1919 if not node.get(action) and not self.check_access_rights(cr, user, operation, raise_exception=False):
1920 node.set(action, 'false')
1921 arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
1922 for k in fields.keys():
1923 if k not in fields_def:
1925 for field in fields_def:
1927 # sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
1928 fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
1929 elif field in fields:
1930 fields[field].update(fields_def[field])
1932 cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
1933 res = cr.fetchall()[:]
1935 res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
1936 msg = "\n * ".join([r[0] for r in res])
1937 msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
1939 raise except_orm('View error', msg)
1942 def _get_default_form_view(self, cr, user, context=None):
1943 """ Generates a default single-line form view using all fields
1944 of the current model except the m2m and o2m ones.
1946 :param cr: database cursor
1947 :param int user: user id
1948 :param dict context: connection context
1949 :returns: a form view as an lxml document
1950 :rtype: etree._Element
1952 view = etree.Element('form', string=self._description)
1953 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
1954 for field, descriptor in self.fields_get(cr, user, context=context).iteritems():
1955 if descriptor['type'] in ('one2many', 'many2many'):
1957 etree.SubElement(view, 'field', name=field)
1958 if descriptor['type'] == 'text':
1959 etree.SubElement(view, 'newline')
1962 def _get_default_search_view(self, cr, user, context=None):
1963 """ Generates a single-field search view, based on _rec_name.
1965 :param cr: database cursor
1966 :param int user: user id
1967 :param dict context: connection context
1968 :returns: a tree view as an lxml document
1969 :rtype: etree._Element
1971 view = etree.Element('search', string=self._description)
1972 etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
1975 def _get_default_tree_view(self, cr, user, context=None):
1976 """ Generates a single-field tree view, based on _rec_name.
1978 :param cr: database cursor
1979 :param int user: user id
1980 :param dict context: connection context
1981 :returns: a tree view as an lxml document
1982 :rtype: etree._Element
1984 view = etree.Element('tree', string=self._description)
1985 etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
1988 def _get_default_calendar_view(self, cr, user, context=None):
1989 """ Generates a default calendar view by trying to infer
1990 calendar fields from a number of pre-set attribute names
1992 :param cr: database cursor
1993 :param int user: user id
1994 :param dict context: connection context
1995 :returns: a calendar view
1996 :rtype: etree._Element
1998 def set_first_of(seq, in_, to):
1999 """Sets the first value of ``seq`` also found in ``in_`` to
2000 the ``to`` attribute of the view being closed over.
2002 Returns whether it's found a suitable value (and set it on
2003 the attribute) or not
2011 view = etree.Element('calendar', string=self._description)
2012 etree.SubElement(view, 'field', self._rec_name_fallback(cr, user, context))
2014 if self._date_name not in self._columns:
2016 for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
2017 if dt in self._columns:
2018 self._date_name = dt
2023 raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
2024 view.set('date_start', self._date_name)
2026 set_first_of(["user_id", "partner_id", "x_user_id", "x_partner_id"],
2027 self._columns, 'color')
2029 if not set_first_of(["date_stop", "date_end", "x_date_stop", "x_date_end"],
2030 self._columns, 'date_stop'):
2031 if not set_first_of(["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"],
2032 self._columns, 'date_delay'):
2034 _('Invalid Object Architecture!'),
2035 _("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % self._name))
2040 # if view_id, view_type is not required
2042 def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
2044 Get the detailed composition of the requested view like fields, model, view architecture
2046 :param cr: database cursor
2047 :param user: current user id
2048 :param view_id: id of the view or None
2049 :param view_type: type of the view to return if view_id is None ('form', tree', ...)
2050 :param context: context arguments, like lang, time zone
2051 :param toolbar: true to include contextual actions
2052 :param submenu: deprecated
2053 :return: dictionary describing the composition of the requested view (including inherited views and extensions)
2054 :raise AttributeError:
2055 * if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
2056 * if some tag other than 'position' is found in parent view
2057 :raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
2064 if isinstance(s, unicode):
2065 return s.encode('utf8')
2068 def raise_view_error(error_msg, child_view_id):
2069 view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
2070 error_msg = error_msg % {'parent_xml_id': view.xml_id}
2071 raise AttributeError("View definition error for inherited view '%s' on model '%s': %s"
2072 % (child_view.xml_id, self._name, error_msg))
2074 def locate(source, spec):
2075 """ Locate a node in a source (parent) architecture.
2077 Given a complete source (parent) architecture (i.e. the field
2078 `arch` in a view), and a 'spec' node (a node in an inheriting
2079 view that specifies the location in the source view of what
2080 should be changed), return (if it exists) the node in the
2081 source view matching the specification.
2083 :param source: a parent architecture to modify
2084 :param spec: a modifying node in an inheriting view
2085 :return: a node in the source matching the spec
2088 if spec.tag == 'xpath':
2089 nodes = source.xpath(spec.get('expr'))
2090 return nodes[0] if nodes else None
2091 elif spec.tag == 'field':
2092 # Only compare the field name: a field can be only once in a given view
2093 # at a given level (and for multilevel expressions, we should use xpath
2094 # inheritance spec anyway).
2095 for node in source.getiterator('field'):
2096 if node.get('name') == spec.get('name'):
2100 for node in source.getiterator(spec.tag):
2101 if isinstance(node, SKIPPED_ELEMENT_TYPES):
2103 if all(node.get(attr) == spec.get(attr) \
2104 for attr in spec.attrib
2105 if attr not in ('position','version')):
2106 # Version spec should match parent's root element's version
2107 if spec.get('version') and spec.get('version') != source.get('version'):
2112 def apply_inheritance_specs(source, specs_arch, inherit_id=None):
2113 """ Apply an inheriting view.
2115 Apply to a source architecture all the spec nodes (i.e. nodes
2116 describing where and what changes to apply to some parent
2117 architecture) given by an inheriting view.
2119 :param source: a parent architecture to modify
2120 :param specs_arch: a modifying architecture in an inheriting view
2121 :param inherit_id: the database id of the inheriting view
2122 :return: a modified source where the specs are applied
2125 specs_tree = etree.fromstring(encode(specs_arch))
2126 # Queue of specification nodes (i.e. nodes describing where and
2127 # changes to apply to some parent architecture).
2128 specs = [specs_tree]
2132 if isinstance(spec, SKIPPED_ELEMENT_TYPES):
2134 if spec.tag == 'data':
2135 specs += [ c for c in specs_tree ]
2137 node = locate(source, spec)
2138 if node is not None:
2139 pos = spec.get('position', 'inside')
2140 if pos == 'replace':
2141 if node.getparent() is None:
2142 source = copy.deepcopy(spec[0])
2145 node.addprevious(child)
2146 node.getparent().remove(node)
2147 elif pos == 'attributes':
2148 for child in spec.getiterator('attribute'):
2149 attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
2151 node.set(attribute[0], attribute[1])
2153 del(node.attrib[attribute[0]])
2155 sib = node.getnext()
2159 elif pos == 'after':
2164 sib.addprevious(child)
2165 elif pos == 'before':
2166 node.addprevious(child)
2168 raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
2171 ' %s="%s"' % (attr, spec.get(attr))
2172 for attr in spec.attrib
2173 if attr != 'position'
2175 tag = "<%s%s>" % (spec.tag, attrs)
2176 if spec.get('version') and spec.get('version') != source.get('version'):
2177 raise_view_error("Mismatching view API version for element '%s': %r vs %r in parent view '%%(parent_xml_id)s'" % \
2178 (tag, spec.get('version'), source.get('version')), inherit_id)
2179 raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
2183 def apply_view_inheritance(cr, user, source, inherit_id):
2184 """ Apply all the (directly and indirectly) inheriting views.
2186 :param source: a parent architecture to modify (with parent
2187 modifications already applied)
2188 :param inherit_id: the database view_id of the parent view
2189 :return: a modified source where all the modifying architecture
2193 sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name)
2194 for (view_arch, view_id) in sql_inherit:
2195 source = apply_inheritance_specs(source, view_arch, view_id)
2196 source = apply_view_inheritance(cr, user, source, view_id)
2199 result = {'type': view_type, 'model': self._name}
2202 parent_view_model = None
2203 view_ref = context.get(view_type + '_view_ref')
2204 # Search for a root (i.e. without any parent) view.
2206 if view_ref and not view_id:
2208 module, view_ref = view_ref.split('.', 1)
2209 cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
2210 view_ref_res = cr.fetchone()
2212 view_id = view_ref_res[0]
2215 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2217 WHERE id=%s""", (view_id,))
2219 cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
2221 WHERE model=%s AND type=%s AND inherit_id IS NULL
2222 ORDER BY priority""", (self._name, view_type))
2223 sql_res = cr.dictfetchone()
2228 view_id = sql_res['inherit_id'] or sql_res['id']
2229 parent_view_model = sql_res['model']
2230 if not sql_res['inherit_id']:
2233 # if a view was found
2235 source = etree.fromstring(encode(sql_res['arch']))
2237 arch=apply_view_inheritance(cr, user, source, sql_res['id']),
2238 type=sql_res['type'],
2239 view_id=sql_res['id'],
2240 name=sql_res['name'],
2241 field_parent=sql_res['field_parent'] or False)
2243 # otherwise, build some kind of default view
2245 view = getattr(self, '_get_default_%s_view' % view_type)(
2247 except AttributeError:
2248 # what happens here, graph case?
2249 raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
2257 if parent_view_model != self._name:
2258 ctx = context.copy()
2259 ctx['base_model_name'] = parent_view_model
2262 xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
2263 result['arch'] = xarch
2264 result['fields'] = xfields
2269 for key in ('report_sxw_content', 'report_rml_content',
2270 'report_sxw', 'report_rml',
2271 'report_sxw_content_data', 'report_rml_content_data'):
2275 ir_values_obj = self.pool.get('ir.values')
2276 resprint = ir_values_obj.get(cr, user, 'action',
2277 'client_print_multi', [(self._name, False)], False,
2279 resaction = ir_values_obj.get(cr, user, 'action',
2280 'client_action_multi', [(self._name, False)], False,
2283 resrelate = ir_values_obj.get(cr, user, 'action',
2284 'client_action_relate', [(self._name, False)], False,
2286 resaction = [clean(action) for action in resaction
2287 if view_type == 'tree' or not action[2].get('multi')]
2288 resprint = [clean(print_) for print_ in resprint
2289 if view_type == 'tree' or not print_[2].get('multi')]
2290 #When multi="True" set it will display only in More of the list view
2291 resrelate = [clean(action) for action in resrelate
2292 if (action[2].get('multi') and view_type == 'tree') or (not action[2].get('multi') and view_type == 'form')]
2294 for x in itertools.chain(resprint, resaction, resrelate):
2295 x['string'] = x['name']
2297 result['toolbar'] = {
2299 'action': resaction,
2304 _view_look_dom_arch = __view_look_dom_arch
2306 def search_count(self, cr, user, args, context=None):
2309 res = self.search(cr, user, args, context=context, count=True)
2310 if isinstance(res, list):
2314 def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
2316 Search for records based on a search domain.
2318 :param cr: database cursor
2319 :param user: current user id
2320 :param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
2321 :param offset: optional number of results to skip in the returned values (default: 0)
2322 :param limit: optional max number of records to return (default: **None**)
2323 :param order: optional columns to sort by (default: self._order=id )
2324 :param context: optional context arguments, like lang, time zone
2325 :type context: dictionary
2326 :param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
2327 :return: id or list of ids of records matching the criteria
2328 :rtype: integer or list of integers
2329 :raise AccessError: * if user tries to bypass access rules for read on the requested object.
2331 **Expressing a search domain (args)**
2333 Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
2335 * **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
2336 * **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
2337 The semantics of most of these operators are obvious.
2338 The ``child_of`` operator will look for records who are children or grand-children of a given record,
2339 according to the semantics of this model (i.e following the relationship field named by
2340 ``self._parent_name``, by default ``parent_id``.
2341 * **value** must be a valid value to compare with the values of **field_name**, depending on its type.
2343 Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
2344 These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
2345 Be very careful about this when you combine them the first time.
2347 Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
2349 [('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
2351 The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
2353 (name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
2356 return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
2358 def name_get(self, cr, user, ids, context=None):
2359 """Returns the preferred display value (text representation) for the records with the
2360 given ``ids``. By default this will be the value of the ``name`` column, unless
2361 the model implements a custom behavior.
2362 Can sometimes be seen as the inverse function of :meth:`~.name_search`, but it is not
2366 :return: list of pairs ``(id,text_repr)`` for all records with the given ``ids``.
2370 if isinstance(ids, (int, long)):
2373 if self._rec_name in self._all_columns:
2374 rec_name_column = self._all_columns[self._rec_name].column
2375 return [(r['id'], rec_name_column.as_display_name(cr, user, self, r[self._rec_name], context=context))
2376 for r in self.read(cr, user, ids, [self._rec_name],
2377 load='_classic_write', context=context)]
2378 return [(id, "%s,%s" % (self._name, id)) for id in ids]
2380 def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
2381 """Search for records that have a display name matching the given ``name`` pattern if compared
2382 with the given ``operator``, while also matching the optional search domain (``args``).
2383 This is used for example to provide suggestions based on a partial value for a relational
2385 Sometimes be seen as the inverse function of :meth:`~.name_get`, but it is not
2388 This method is equivalent to calling :meth:`~.search` with a search domain based on ``name``
2389 and then :meth:`~.name_get` on the result of the search.
2391 :param list args: optional search domain (see :meth:`~.search` for syntax),
2392 specifying further restrictions
2393 :param str operator: domain operator for matching the ``name`` pattern, such as ``'like'``
2395 :param int limit: optional max number of records to return
2397 :return: list of pairs ``(id,text_repr)`` for all matching records.
2399 return self._name_search(cr, user, name, args, operator, context, limit)
2401 def name_create(self, cr, uid, name, context=None):
2402 """Creates a new record by calling :meth:`~.create` with only one
2403 value provided: the name of the new record (``_rec_name`` field).
2404 The new record will also be initialized with any default values applicable
2405 to this model, or provided through the context. The usual behavior of
2406 :meth:`~.create` applies.
2407 Similarly, this method may raise an exception if the model has multiple
2408 required fields and some do not have default values.
2410 :param name: name of the record to create
2413 :return: the :meth:`~.name_get` pair value for the newly-created record.
2415 rec_id = self.create(cr, uid, {self._rec_name: name}, context)
2416 return self.name_get(cr, uid, [rec_id], context)[0]
2418 # private implementation of name_search, allows passing a dedicated user for the name_get part to
2419 # solve some access rights issues
2420 def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
2426 # optimize out the default criterion of ``ilike ''`` that matches everything
2427 if not (name == '' and operator == 'ilike'):
2428 args += [(self._rec_name, operator, name)]
2429 access_rights_uid = name_get_uid or user
2430 ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
2431 res = self.name_get(cr, access_rights_uid, ids, context)
2434 def read_string(self, cr, uid, id, langs, fields=None, context=None):
2437 self.pool.get('ir.translation').check_access_rights(cr, uid, 'read')
2439 fields = self._columns.keys() + self._inherit_fields.keys()
2440 #FIXME: collect all calls to _get_source into one SQL call.
2442 res[lang] = {'code': lang}
2444 if f in self._columns:
2445 res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
2447 res[lang][f] = res_trans
2449 res[lang][f] = self._columns[f].string
2450 for table in self._inherits:
2451 cols = intersect(self._inherit_fields.keys(), fields)
2452 res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
2455 res[lang]['code'] = lang
2456 for f in res2[lang]:
2457 res[lang][f] = res2[lang][f]
2460 def write_string(self, cr, uid, id, langs, vals, context=None):
2461 self.pool.get('ir.translation').check_access_rights(cr, uid, 'write')
2462 #FIXME: try to only call the translation in one SQL
2465 if field in self._columns:
2466 src = self._columns[field].string
2467 self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
2468 for table in self._inherits:
2469 cols = intersect(self._inherit_fields.keys(), vals)
2471 self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
2474 def _add_missing_default_values(self, cr, uid, values, context=None):
2475 missing_defaults = []
2476 avoid_tables = [] # avoid overriding inherited values when parent is set
2477 for tables, parent_field in self._inherits.items():
2478 if parent_field in values:
2479 avoid_tables.append(tables)
2480 for field in self._columns.keys():
2481 if not field in values:
2482 missing_defaults.append(field)
2483 for field in self._inherit_fields.keys():
2484 if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
2485 missing_defaults.append(field)
2487 if len(missing_defaults):
2488 # override defaults with the provided values, never allow the other way around
2489 defaults = self.default_get(cr, uid, missing_defaults, context)
2491 if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
2492 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
2493 and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
2494 defaults[dv] = [(6, 0, defaults[dv])]
2495 if (dv in self._columns and self._columns[dv]._type == 'one2many' \
2496 or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
2497 and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
2498 defaults[dv] = [(0, 0, x) for x in defaults[dv]]
2499 defaults.update(values)
2503 def clear_caches(self):
2504 """ Clear the caches
2506 This clears the caches associated to methods decorated with
2507 ``tools.ormcache`` or ``tools.ormcache_multi``.
2510 getattr(self, '_ormcache')
2512 self.pool._any_cache_cleared = True
2513 except AttributeError:
2517 def _read_group_fill_results(self, cr, uid, domain, groupby, groupby_list, aggregated_fields,
2518 read_group_result, read_group_order=None, context=None):
2519 """Helper method for filling in empty groups for all possible values of
2520 the field being grouped by"""
2522 # self._group_by_full should map groupable fields to a method that returns
2523 # a list of all aggregated values that we want to display for this field,
2524 # in the form of a m2o-like pair (key,label).
2525 # This is useful to implement kanban views for instance, where all columns
2526 # should be displayed even if they don't contain any record.
2528 # Grab the list of all groups that should be displayed, including all present groups
2529 present_group_ids = [x[groupby][0] for x in read_group_result if x[groupby]]
2530 all_groups,folded = self._group_by_full[groupby](self, cr, uid, present_group_ids, domain,
2531 read_group_order=read_group_order,
2532 access_rights_uid=openerp.SUPERUSER_ID,
2535 result_template = dict.fromkeys(aggregated_fields, False)
2536 result_template[groupby + '_count'] = 0
2537 if groupby_list and len(groupby_list) > 1:
2538 result_template['__context'] = {'group_by': groupby_list[1:]}
2540 # Merge the left_side (current results as dicts) with the right_side (all
2541 # possible values as m2o pairs). Both lists are supposed to be using the
2542 # same ordering, and can be merged in one pass.
2545 def append_left(left_side):
2546 grouped_value = left_side[groupby] and left_side[groupby][0]
2547 if not grouped_value in known_values:
2548 result.append(left_side)
2549 known_values[grouped_value] = left_side
2551 count_attr = groupby + '_count'
2552 known_values[grouped_value].update({count_attr: left_side[count_attr]})
2553 def append_right(right_side):
2554 grouped_value = right_side[0]
2555 if not grouped_value in known_values:
2556 line = dict(result_template)
2557 line[groupby] = right_side
2558 line['__domain'] = [(groupby,'=',grouped_value)] + domain
2560 known_values[grouped_value] = line
2561 while read_group_result or all_groups:
2562 left_side = read_group_result[0] if read_group_result else None
2563 right_side = all_groups[0] if all_groups else None
2564 assert left_side is None or left_side[groupby] is False \
2565 or isinstance(left_side[groupby], (tuple,list)), \
2566 'M2O-like pair expected, got %r' % left_side[groupby]
2567 assert right_side is None or isinstance(right_side, (tuple,list)), \
2568 'M2O-like pair expected, got %r' % right_side
2569 if left_side is None:
2570 append_right(all_groups.pop(0))
2571 elif right_side is None:
2572 append_left(read_group_result.pop(0))
2573 elif left_side[groupby] == right_side:
2574 append_left(read_group_result.pop(0))
2575 all_groups.pop(0) # discard right_side
2576 elif not left_side[groupby] or not left_side[groupby][0]:
2577 # left side == "Undefined" entry, not present on right_side
2578 append_left(read_group_result.pop(0))
2580 append_right(all_groups.pop(0))
2584 r['__fold'] = folded.get(r[groupby] and r[groupby][0], False)
2587 def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
2589 Get the list of records in list view grouped by the given ``groupby`` fields
2591 :param cr: database cursor
2592 :param uid: current user id
2593 :param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
2594 :param list fields: list of fields present in the list view specified on the object
2595 :param list groupby: fields by which the records will be grouped
2596 :param int offset: optional number of records to skip
2597 :param int limit: optional max number of records to return
2598 :param dict context: context arguments, like lang, time zone
2599 :param list orderby: optional ``order by`` specification, for
2600 overriding the natural sort ordering of the
2601 groups, see also :py:meth:`~osv.osv.osv.search`
2602 (supported only for many2one fields currently)
2603 :return: list of dictionaries(one dictionary for each record) containing:
2605 * the values of fields grouped by the fields in ``groupby`` argument
2606 * __domain: list of tuples specifying the search criteria
2607 * __context: dictionary with argument like ``groupby``
2608 :rtype: [{'field_name_1': value, ...]
2609 :raise AccessError: * if user has no read rights on the requested object
2610 * if user tries to bypass access rules for read on the requested object
2613 context = context or {}
2614 self.check_access_rights(cr, uid, 'read')
2616 fields = self._columns.keys()
2618 query = self._where_calc(cr, uid, domain, context=context)
2619 self._apply_ir_rules(cr, uid, query, 'read', context=context)
2621 # Take care of adding join(s) if groupby is an '_inherits'ed field
2622 groupby_list = groupby
2623 qualified_groupby_field = groupby
2625 if isinstance(groupby, list):
2626 groupby = groupby[0]
2627 qualified_groupby_field = self._inherits_join_calc(groupby, query)
2630 assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
2631 groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
2632 assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
2634 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
2635 fget = self.fields_get(cr, uid, fields)
2637 group_count = group_by = groupby
2639 if fget.get(groupby):
2640 groupby_type = fget[groupby]['type']
2641 if groupby_type in ('date', 'datetime'):
2642 qualified_groupby_field = "to_char(%s,'yyyy-mm')" % qualified_groupby_field
2643 flist = "%s as %s " % (qualified_groupby_field, groupby)
2644 elif groupby_type == 'boolean':
2645 qualified_groupby_field = "coalesce(%s,false)" % qualified_groupby_field
2646 flist = "%s as %s " % (qualified_groupby_field, groupby)
2648 flist = qualified_groupby_field
2650 # Don't allow arbitrary values, as this would be a SQL injection vector!
2651 raise except_orm(_('Invalid group_by'),
2652 _('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
2654 aggregated_fields = [
2656 if f not in ('id', 'sequence')
2657 if fget[f]['type'] in ('integer', 'float')
2658 if (f in self._columns and getattr(self._columns[f], '_classic_write'))]
2659 for f in aggregated_fields:
2660 group_operator = fget[f].get('group_operator', 'sum')
2663 qualified_field = '"%s"."%s"' % (self._table, f)
2664 flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
2666 gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
2668 from_clause, where_clause, where_clause_params = query.get_sql()
2669 where_clause = where_clause and ' WHERE ' + where_clause
2670 limit_str = limit and ' limit %d' % limit or ''
2671 offset_str = offset and ' offset %d' % offset or ''
2672 if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
2674 cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
2677 for r in cr.dictfetchall():
2678 for fld, val in r.items():
2679 if val is None: r[fld] = False
2680 alldata[r['id']] = r
2683 order = orderby or groupby
2684 data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=order, context=context)
2686 # the IDs of records that have groupby field value = False or '' should be included too
2687 data_ids += set(alldata.keys()).difference(data_ids)
2690 data = self.read(cr, uid, data_ids, [groupby], context=context)
2691 # restore order of the search as read() uses the default _order (this is only for groups, so the footprint of data should be small):
2692 data_dict = dict((d['id'], d[groupby] ) for d in data)
2693 result = [{'id': i, groupby: data_dict[i]} for i in data_ids]
2695 result = [{'id': i} for i in data_ids]
2699 d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
2700 if not isinstance(groupby_list, (str, unicode)):
2701 if groupby or not context.get('group_by_no_leaf', False):
2702 d['__context'] = {'group_by': groupby_list[1:]}
2703 if groupby and groupby in fget:
2704 if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
2705 dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7], '%Y-%m')
2706 days = calendar.monthrange(dt.year, dt.month)[1]
2708 date_value = datetime.datetime.strptime(d[groupby][:10], '%Y-%m-%d')
2709 d[groupby] = babel.dates.format_date(
2710 date_value, format='MMMM yyyy', locale=context.get('lang', 'en_US'))
2711 d['__domain'] = [(groupby, '>=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01', '%Y-%m-%d').strftime('%Y-%m-%d') or False),\
2712 (groupby, '<=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days), '%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
2713 del alldata[d['id']][groupby]
2714 d.update(alldata[d['id']])
2717 if groupby and groupby in self._group_by_full:
2718 result = self._read_group_fill_results(cr, uid, domain, groupby, groupby_list,
2719 aggregated_fields, result, read_group_order=order,
2724 def _inherits_join_add(self, current_model, parent_model_name, query):
2726 Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
2727 :param current_model: current model object
2728 :param parent_model_name: name of the parent model for which the clauses should be added
2729 :param query: query object on which the JOIN should be added
2731 inherits_field = current_model._inherits[parent_model_name]
2732 parent_model = self.pool.get(parent_model_name)
2733 parent_alias, parent_alias_statement = query.add_join((current_model._table, parent_model._table, inherits_field, 'id', inherits_field), implicit=True)
2736 def _inherits_join_calc(self, field, query):
2738 Adds missing table select and join clause(s) to ``query`` for reaching
2739 the field coming from an '_inherits' parent table (no duplicates).
2741 :param field: name of inherited field to reach
2742 :param query: query object on which the JOIN should be added
2743 :return: qualified name of field, to be used in SELECT clause
2745 current_table = self
2746 parent_alias = '"%s"' % current_table._table
2747 while field in current_table._inherit_fields and not field in current_table._columns:
2748 parent_model_name = current_table._inherit_fields[field][0]
2749 parent_table = self.pool.get(parent_model_name)
2750 parent_alias = self._inherits_join_add(current_table, parent_model_name, query)
2751 current_table = parent_table
2752 return '%s."%s"' % (parent_alias, field)
2754 def _parent_store_compute(self, cr):
2755 if not self._parent_store:
2757 _logger.info('Computing parent left and right for table %s...', self._table)
2758 def browse_rec(root, pos=0):
2760 where = self._parent_name+'='+str(root)
2762 where = self._parent_name+' IS NULL'
2763 if self._parent_order:
2764 where += ' order by '+self._parent_order
2765 cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
2767 for id in cr.fetchall():
2768 pos2 = browse_rec(id[0], pos2)
2769 cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
2771 query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
2772 if self._parent_order:
2773 query += ' order by ' + self._parent_order
2776 for (root,) in cr.fetchall():
2777 pos = browse_rec(root, pos)
2780 def _update_store(self, cr, f, k):
2781 _logger.info("storing computed values of fields.function '%s'", k)
2782 ss = self._columns[k]._symbol_set
2783 update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
2784 cr.execute('select id from '+self._table)
2785 ids_lst = map(lambda x: x[0], cr.fetchall())
2788 ids_lst = ids_lst[40:]
2789 res = f.get(cr, self, iids, k, SUPERUSER_ID, {})
2790 for key, val in res.items():
2793 # if val is a many2one, just write the ID
2794 if type(val) == tuple:
2796 if val is not False:
2797 cr.execute(update_query, (ss[1](val), key))
2799 def _check_selection_field_value(self, cr, uid, field, value, context=None):
2800 """Raise except_orm if value is not among the valid values for the selection field"""
2801 if self._columns[field]._type == 'reference':
2802 val_model, val_id_str = value.split(',', 1)
2805 val_id = long(val_id_str)
2809 raise except_orm(_('ValidateError'),
2810 _('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
2814 if isinstance(self._columns[field].selection, (tuple, list)):
2815 if val in dict(self._columns[field].selection):
2817 elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
2819 raise except_orm(_('ValidateError'),
2820 _('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._table, field))
2822 def _check_removed_columns(self, cr, log=False):
2823 # iterate on the database columns to drop the NOT NULL constraints
2824 # of fields which were required but have been removed (or will be added by another module)
2825 columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
2826 columns += MAGIC_COLUMNS
2827 cr.execute("SELECT a.attname, a.attnotnull"
2828 " FROM pg_class c, pg_attribute a"
2829 " WHERE c.relname=%s"
2830 " AND c.oid=a.attrelid"
2831 " AND a.attisdropped=%s"
2832 " AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
2833 " AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
2835 for column in cr.dictfetchall():
2837 _logger.debug("column %s is in the table %s but not in the corresponding object %s",
2838 column['attname'], self._table, self._name)
2839 if column['attnotnull']:
2840 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
2841 _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
2842 self._table, column['attname'])
2844 def _save_constraint(self, cr, constraint_name, type):
2846 Record the creation of a constraint for this model, to make it possible
2847 to delete it later when the module is uninstalled. Type can be either
2848 'f' or 'u' depending on the constraing being a foreign key or not.
2850 assert type in ('f', 'u')
2852 SELECT 1 FROM ir_model_constraint, ir_module_module
2853 WHERE ir_model_constraint.module=ir_module_module.id
2854 AND ir_model_constraint.name=%s
2855 AND ir_module_module.name=%s
2856 """, (constraint_name, self._module))
2859 INSERT INTO ir_model_constraint
2860 (name, date_init, date_update, module, model, type)
2861 VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
2862 (SELECT id FROM ir_module_module WHERE name=%s),
2863 (SELECT id FROM ir_model WHERE model=%s), %s)""",
2864 (constraint_name, self._module, self._name, type))
2866 def _save_relation_table(self, cr, relation_table):
2868 Record the creation of a many2many for this model, to make it possible
2869 to delete it later when the module is uninstalled.
2872 SELECT 1 FROM ir_model_relation, ir_module_module
2873 WHERE ir_model_relation.module=ir_module_module.id
2874 AND ir_model_relation.name=%s
2875 AND ir_module_module.name=%s
2876 """, (relation_table, self._module))
2878 cr.execute("""INSERT INTO ir_model_relation (name, date_init, date_update, module, model)
2879 VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
2880 (SELECT id FROM ir_module_module WHERE name=%s),
2881 (SELECT id FROM ir_model WHERE model=%s))""",
2882 (relation_table, self._module, self._name))
2884 # checked version: for direct m2o starting from `self`
2885 def _m2o_add_foreign_key_checked(self, source_field, dest_model, ondelete):
2886 assert self.is_transient() or not dest_model.is_transient(), \
2887 'Many2One relationships from non-transient Model to TransientModel are forbidden'
2888 if self.is_transient() and not dest_model.is_transient():
2889 # TransientModel relationships to regular Models are annoying
2890 # usually because they could block deletion due to the FKs.
2891 # So unless stated otherwise we default them to ondelete=cascade.
2892 ondelete = ondelete or 'cascade'
2893 fk_def = (self._table, source_field, dest_model._table, ondelete or 'set null')
2894 self._foreign_keys.add(fk_def)
2895 _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
2897 # unchecked version: for custom cases, such as m2m relationships
2898 def _m2o_add_foreign_key_unchecked(self, source_table, source_field, dest_model, ondelete):
2899 fk_def = (source_table, source_field, dest_model._table, ondelete or 'set null')
2900 self._foreign_keys.add(fk_def)
2901 _schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
2903 def _drop_constraint(self, cr, source_table, constraint_name):
2904 cr.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (source_table,constraint_name))
2906 def _m2o_fix_foreign_key(self, cr, source_table, source_field, dest_model, ondelete):
2907 # Find FK constraint(s) currently established for the m2o field,
2908 # and see whether they are stale or not
2909 cr.execute("""SELECT confdeltype as ondelete_rule, conname as constraint_name,
2910 cl2.relname as foreign_table
2911 FROM pg_constraint as con, pg_class as cl1, pg_class as cl2,
2912 pg_attribute as att1, pg_attribute as att2
2913 WHERE con.conrelid = cl1.oid
2914 AND cl1.relname = %s
2915 AND con.confrelid = cl2.oid
2916 AND array_lower(con.conkey, 1) = 1
2917 AND con.conkey[1] = att1.attnum
2918 AND att1.attrelid = cl1.oid
2919 AND att1.attname = %s
2920 AND array_lower(con.confkey, 1) = 1
2921 AND con.confkey[1] = att2.attnum
2922 AND att2.attrelid = cl2.oid
2923 AND att2.attname = %s
2924 AND con.contype = 'f'""", (source_table, source_field, 'id'))
2925 constraints = cr.dictfetchall()
2927 if len(constraints) == 1:
2928 # Is it the right constraint?
2930 if cons['ondelete_rule'] != POSTGRES_CONFDELTYPES.get((ondelete or 'set null').upper(), 'a')\
2931 or cons['foreign_table'] != dest_model._table:
2932 # Wrong FK: drop it and recreate
2933 _schema.debug("Table '%s': dropping obsolete FK constraint: '%s'",
2934 source_table, cons['constraint_name'])
2935 self._drop_constraint(cr, source_table, cons['constraint_name'])
2937 # it's all good, nothing to do!
2940 # Multiple FKs found for the same field, drop them all, and re-create
2941 for cons in constraints:
2942 _schema.debug("Table '%s': dropping duplicate FK constraints: '%s'",
2943 source_table, cons['constraint_name'])
2944 self._drop_constraint(cr, source_table, cons['constraint_name'])
2946 # (re-)create the FK
2947 self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete)
2951 def _auto_init(self, cr, context=None):
2954 Call _field_create and, unless _auto is False:
2956 - create the corresponding table in database for the model,
2957 - possibly add the parent columns in database,
2958 - possibly add the columns 'create_uid', 'create_date', 'write_uid',
2959 'write_date' in database if _log_access is True (the default),
2960 - report on database columns no more existing in _columns,
2961 - remove no more existing not null constraints,
2962 - alter existing database columns to match _columns,
2963 - create database tables to match _columns,
2964 - add database indices to match _columns,
2965 - save in self._foreign_keys a list a foreign keys to create (see
2969 self._foreign_keys = set()
2970 raise_on_invalid_object_name(self._name)
2973 store_compute = False
2975 update_custom_fields = context.get('update_custom_fields', False)
2976 self._field_create(cr, context=context)
2977 create = not self._table_exist(cr)
2978 if getattr(self, '_auto', True):
2981 self._create_table(cr)
2984 if self._parent_store:
2985 if not self._parent_columns_exist(cr):
2986 self._create_parent_columns(cr)
2987 store_compute = True
2989 # Create the create_uid, create_date, write_uid, write_date, columns if desired.
2990 if self._log_access:
2991 self._add_log_columns(cr)
2993 self._check_removed_columns(cr, log=False)
2995 # iterate on the "object columns"
2996 column_data = self._select_column_data(cr)
2998 for k, f in self._columns.iteritems():
2999 if k in MAGIC_COLUMNS:
3001 # Don't update custom (also called manual) fields
3002 if f.manual and not update_custom_fields:
3005 if isinstance(f, fields.one2many):
3006 self._o2m_raise_on_missing_reference(cr, f)
3008 elif isinstance(f, fields.many2many):
3009 self._m2m_raise_or_create_relation(cr, f)
3012 res = column_data.get(k)
3014 # The field is not found as-is in database, try if it
3015 # exists with an old name.
3016 if not res and hasattr(f, 'oldname'):
3017 res = column_data.get(f.oldname)
3019 cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
3021 column_data[k] = res
3022 _schema.debug("Table '%s': renamed column '%s' to '%s'",
3023 self._table, f.oldname, k)
3025 # The field already exists in database. Possibly
3026 # change its type, rename it, drop it or change its
3029 f_pg_type = res['typname']
3030 f_pg_size = res['size']
3031 f_pg_notnull = res['attnotnull']
3032 if isinstance(f, fields.function) and not f.store and\
3033 not getattr(f, 'nodrop', False):
3034 _logger.info('column %s (%s) in table %s removed: converted to a function !\n',
3035 k, f.string, self._table)
3036 cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
3038 _schema.debug("Table '%s': dropped column '%s' with cascade",
3042 f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
3047 ('text', 'char', pg_varchar(f.size), '::%s' % pg_varchar(f.size)),
3048 ('varchar', 'text', 'TEXT', ''),
3049 ('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3050 ('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
3051 ('timestamp', 'date', 'date', '::date'),
3052 ('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3053 ('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
3055 if f_pg_type == 'varchar' and f._type == 'char' and ((f.size is None and f_pg_size) or f_pg_size < f.size):
3056 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
3057 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, pg_varchar(f.size)))
3058 cr.execute('UPDATE "%s" SET "%s"=temp_change_size::%s' % (self._table, k, pg_varchar(f.size)))
3059 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
3061 _schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
3062 self._table, k, f_pg_size or 'unlimited', f.size or 'unlimited')
3064 if (f_pg_type==c[0]) and (f._type==c[1]):
3065 if f_pg_type != f_obj_type:
3067 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
3068 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
3069 cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
3070 cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
3072 _schema.debug("Table '%s': column '%s' changed type from %s to %s",
3073 self._table, k, c[0], c[1])
3076 if f_pg_type != f_obj_type:
3080 newname = k + '_moved' + str(i)
3081 cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
3082 "WHERE c.relname=%s " \
3083 "AND a.attname=%s " \
3084 "AND c.oid=a.attrelid ", (self._table, newname))
3085 if not cr.fetchone()[0]:
3089 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
3090 cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
3091 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
3092 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
3093 _schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
3094 self._table, k, f_pg_type, f._type, newname)
3096 # if the field is required and hasn't got a NOT NULL constraint
3097 if f.required and f_pg_notnull == 0:
3098 # set the field to the default value if any
3099 if k in self._defaults:
3100 if callable(self._defaults[k]):
3101 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
3103 default = self._defaults[k]
3105 if default is not None:
3106 ss = self._columns[k]._symbol_set
3107 query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
3108 cr.execute(query, (ss[1](default),))
3109 # add the NOT NULL constraint
3112 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
3114 _schema.debug("Table '%s': column '%s': added NOT NULL constraint",
3117 msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
3118 "If you want to have it, you should update the records and execute manually:\n"\
3119 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
3120 _schema.warning(msg, self._table, k, self._table, k)
3122 elif not f.required and f_pg_notnull == 1:
3123 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
3125 _schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
3128 indexname = '%s_%s_index' % (self._table, k)
3129 cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
3130 res2 = cr.dictfetchall()
3131 if not res2 and f.select:
3132 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
3134 if f._type == 'text':
3135 # FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
3136 msg = "Table '%s': Adding (b-tree) index for %s column '%s'."\
3137 "This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
3138 " because there is a length limit for indexable btree values!\n"\
3139 "Use a search view instead if you simply want to make the field searchable."
3140 _schema.warning(msg, self._table, f._type, k)
3141 if res2 and not f.select:
3142 cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
3144 msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
3145 _schema.debug(msg, self._table, k, f._type)
3147 if isinstance(f, fields.many2one):
3148 dest_model = self.pool.get(f._obj)
3149 if dest_model._table != 'ir_actions':
3150 self._m2o_fix_foreign_key(cr, self._table, k, dest_model, f.ondelete)
3152 # The field doesn't exist in database. Create it if necessary.
3154 if not isinstance(f, fields.function) or f.store:
3155 # add the missing field
3156 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
3157 cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
3158 _schema.debug("Table '%s': added column '%s' with definition=%s",
3159 self._table, k, get_pg_type(f)[1])
3162 if not create and k in self._defaults:
3163 if callable(self._defaults[k]):
3164 default = self._defaults[k](self, cr, SUPERUSER_ID, context)
3166 default = self._defaults[k]
3168 ss = self._columns[k]._symbol_set
3169 query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
3170 cr.execute(query, (ss[1](default),))
3172 _logger.debug("Table '%s': setting default value of new column %s", self._table, k)
3174 # remember the functions to call for the stored fields
3175 if isinstance(f, fields.function):
3177 if f.store is not True: # i.e. if f.store is a dict
3178 order = f.store[f.store.keys()[0]][2]
3179 todo_end.append((order, self._update_store, (f, k)))
3181 # and add constraints if needed
3182 if isinstance(f, fields.many2one):
3183 if not self.pool.get(f._obj):
3184 raise except_orm('Programming Error', 'There is no reference available for %s' % (f._obj,))
3185 dest_model = self.pool.get(f._obj)
3186 ref = dest_model._table
3187 # ir_actions is inherited so foreign key doesn't work on it
3188 if ref != 'ir_actions':
3189 self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete)
3191 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
3195 cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
3196 _schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
3199 msg = "WARNING: unable to set column %s of table %s not null !\n"\
3200 "Try to re-run: openerp-server --update=module\n"\
3201 "If it doesn't work, update records and execute manually:\n"\
3202 "ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
3203 _logger.warning(msg, k, self._table, self._table, k)
3207 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
3208 create = not bool(cr.fetchone())
3210 cr.commit() # start a new transaction
3212 self._add_sql_constraints(cr)
3215 self._execute_sql(cr)
3218 self._parent_store_compute(cr)
3223 def _auto_end(self, cr, context=None):
3224 """ Create the foreign keys recorded by _auto_init. """
3225 for t, k, r, d in self._foreign_keys:
3226 cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
3227 self._save_constraint(cr, "%s_%s_fkey" % (t, k), 'f')
3229 del self._foreign_keys
3232 def _table_exist(self, cr):
3233 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
3237 def _create_table(self, cr):
3238 cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id))' % (self._table,))
3239 cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,))
3240 _schema.debug("Table '%s': created", self._table)
3243 def _parent_columns_exist(self, cr):
3244 cr.execute("""SELECT c.relname
3245 FROM pg_class c, pg_attribute a
3246 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
3247 """, (self._table, 'parent_left'))
3251 def _create_parent_columns(self, cr):
3252 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
3253 cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
3254 if 'parent_left' not in self._columns:
3255 _logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
3257 _schema.debug("Table '%s': added column '%s' with definition=%s",
3258 self._table, 'parent_left', 'INTEGER')
3259 elif not self._columns['parent_left'].select:
3260 _logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
3262 if 'parent_right' not in self._columns:
3263 _logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
3265 _schema.debug("Table '%s': added column '%s' with definition=%s",
3266 self._table, 'parent_right', 'INTEGER')
3267 elif not self._columns['parent_right'].select:
3268 _logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
3270 if self._columns[self._parent_name].ondelete not in ('cascade', 'restrict'):
3271 _logger.error("The column %s on object %s must be set as ondelete='cascade' or 'restrict'",
3272 self._parent_name, self._name)
3277 def _add_log_columns(self, cr):
3278 for field, field_def in LOG_ACCESS_COLUMNS.iteritems():
3281 FROM pg_class c, pg_attribute a
3282 WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
3283 """, (self._table, field))
3285 cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, field, field_def))
3287 _schema.debug("Table '%s': added column '%s' with definition=%s",
3288 self._table, field, field_def)
3291 def _select_column_data(self, cr):
3292 # attlen is the number of bytes necessary to represent the type when
3293 # the type has a fixed size. If the type has a varying size attlen is
3294 # -1 and atttypmod is the size limit + 4, or -1 if there is no limit.
3295 cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN (CASE WHEN a.atttypmod=-1 THEN 0 ELSE a.atttypmod-4 END) ELSE a.attlen END as size " \
3296 "FROM pg_class c,pg_attribute a,pg_type t " \
3297 "WHERE c.relname=%s " \
3298 "AND c.oid=a.attrelid " \
3299 "AND a.atttypid=t.oid", (self._table,))
3300 return dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
3303 def _o2m_raise_on_missing_reference(self, cr, f):
3304 # TODO this check should be a method on fields.one2many.
3306 other = self.pool.get(f._obj)
3308 # TODO the condition could use fields_get_keys().
3309 if f._fields_id not in other._columns.keys():
3310 if f._fields_id not in other._inherit_fields.keys():
3311 raise except_orm('Programming Error', "There is no reference field '%s' found for '%s'" % (f._fields_id, f._obj,))
3313 def _m2m_raise_or_create_relation(self, cr, f):
3314 m2m_tbl, col1, col2 = f._sql_names(self)
3315 self._save_relation_table(cr, m2m_tbl)
3316 cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (m2m_tbl,))
3317 if not cr.dictfetchall():
3318 if not self.pool.get(f._obj):
3319 raise except_orm('Programming Error', 'Many2Many destination model does not exist: `%s`' % (f._obj,))
3320 dest_model = self.pool.get(f._obj)
3321 ref = dest_model._table
3322 cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s"))' % (m2m_tbl, col1, col2, col1, col2))
3323 # create foreign key references with ondelete=cascade, unless the targets are SQL views
3324 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (ref,))
3325 if not cr.fetchall():
3326 self._m2o_add_foreign_key_unchecked(m2m_tbl, col2, dest_model, 'cascade')
3327 cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (self._table,))
3328 if not cr.fetchall():
3329 self._m2o_add_foreign_key_unchecked(m2m_tbl, col1, self, 'cascade')
3331 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col1, m2m_tbl, col1))
3332 cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col2, m2m_tbl, col2))
3333 cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (m2m_tbl, self._table, ref))
3335 _schema.debug("Create table '%s': m2m relation between '%s' and '%s'", m2m_tbl, self._table, ref)
3338 def _add_sql_constraints(self, cr):
3341 Modify this model's database table constraints so they match the one in
3345 def unify_cons_text(txt):
3346 return txt.lower().replace(', ',',').replace(' (','(')
3348 for (key, con, _) in self._sql_constraints:
3349 conname = '%s_%s' % (self._table, key)
3351 self._save_constraint(cr, conname, 'u')
3352 cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
3353 existing_constraints = cr.dictfetchall()
3357 'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
3358 'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
3359 self._table, conname, con),
3360 'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
3365 'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
3366 'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
3367 'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
3373 if not existing_constraints:
3374 # constraint does not exists:
3375 sql_actions['add']['execute'] = True
3376 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3377 elif unify_cons_text(con) not in [unify_cons_text(item['condef']) for item in existing_constraints]:
3378 # constraint exists but its definition has changed:
3379 sql_actions['drop']['execute'] = True
3380 sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
3381 sql_actions['add']['execute'] = True
3382 sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
3384 # we need to add the constraint:
3385 sql_actions = [item for item in sql_actions.values()]
3386 sql_actions.sort(key=lambda x: x['order'])
3387 for sql_action in [action for action in sql_actions if action['execute']]:
3389 cr.execute(sql_action['query'])
3391 _schema.debug(sql_action['msg_ok'])
3393 _schema.warning(sql_action['msg_err'])
3397 def _execute_sql(self, cr):
3398 """ Execute the SQL code from the _sql attribute (if any)."""
3399 if hasattr(self, "_sql"):
3400 for line in self._sql.split(';'):
3401 line2 = line.replace('\n', '').strip()
3407 # Update objects that uses this one to update their _inherits fields
3410 def _inherits_reload_src(self):
3411 """ Recompute the _inherit_fields mapping on each _inherits'd child model."""
3412 for obj in self.pool.models.values():
3413 if self._name in obj._inherits:
3414 obj._inherits_reload()
3417 def _inherits_reload(self):
3418 """ Recompute the _inherit_fields mapping.
3420 This will also call itself on each inherits'd child model.
3424 for table in self._inherits:
3425 other = self.pool.get(table)
3426 for col in other._columns.keys():
3427 res[col] = (table, self._inherits[table], other._columns[col], table)
3428 for col in other._inherit_fields.keys():
3429 res[col] = (table, self._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
3430 self._inherit_fields = res
3431 self._all_columns = self._get_column_infos()
3432 self._inherits_reload_src()
3435 def _get_column_infos(self):
3436 """Returns a dict mapping all fields names (direct fields and
3437 inherited field via _inherits) to a ``column_info`` struct
3438 giving detailed columns """
3440 for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems():
3441 result[k] = fields.column_info(k, col, parent, m2o, original_parent)
3442 for k, col in self._columns.iteritems():
3443 result[k] = fields.column_info(k, col)
3447 def _inherits_check(self):
3448 for table, field_name in self._inherits.items():
3449 if field_name not in self._columns:
3450 _logger.info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.', field_name, self._name)
3451 self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
3452 required=True, ondelete="cascade")
3453 elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() not in ("cascade", "restrict"):
3454 _logger.warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade" or "restrict", forcing it to required + cascade.', field_name, self._name)
3455 self._columns[field_name].required = True
3456 self._columns[field_name].ondelete = "cascade"
3458 #def __getattr__(self, name):
3460 # Proxies attribute accesses to the `inherits` parent so we can call methods defined on the inherited parent
3461 # (though inherits doesn't use Python inheritance).
3462 # Handles translating between local ids and remote ids.
3463 # Known issue: doesn't work correctly when using python's own super(), don't involve inherit-based inheritance
3464 # when you have inherits.
3466 # for model, field in self._inherits.iteritems():
3467 # proxy = self.pool.get(model)
3468 # if hasattr(proxy, name):
3469 # attribute = getattr(proxy, name)
3470 # if not hasattr(attribute, '__call__'):
3474 # return super(orm, self).__getattr__(name)
3476 # def _proxy(cr, uid, ids, *args, **kwargs):
3477 # objects = self.browse(cr, uid, ids, kwargs.get('context', None))
3478 # lst = [obj[field].id for obj in objects if obj[field]]
3479 # return getattr(proxy, name)(cr, uid, lst, *args, **kwargs)
3484 def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
3485 """ Return the definition of each field.
3487 The returned value is a dictionary (indiced by field name) of
3488 dictionaries. The _inherits'd fields are included. The string, help,
3489 and selection (if present) attributes are translated.
3491 :param cr: database cursor
3492 :param user: current user id
3493 :param allfields: list of fields
3494 :param context: context arguments, like lang, time zone
3495 :return: dictionary of field dictionaries, each one describing a field of the business object
3496 :raise AccessError: * if user has no create/write rights on the requested object
3502 write_access = self.check_access_rights(cr, user, 'write', raise_exception=False) \
3503 or self.check_access_rights(cr, user, 'create', raise_exception=False)
3507 translation_obj = self.pool.get('ir.translation')
3508 for parent in self._inherits:
3509 res.update(self.pool.get(parent).fields_get(cr, user, allfields, context))
3511 for f, field in self._columns.iteritems():
3512 if (allfields and f not in allfields) or \
3513 (field.groups and not self.user_has_groups(cr, user, groups=field.groups, context=context)):
3516 res[f] = fields.field_to_dict(self, cr, user, field, context=context)
3518 if not write_access:
3519 res[f]['readonly'] = True
3520 res[f]['states'] = {}
3522 if 'lang' in context:
3523 if 'string' in res[f]:
3524 res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context['lang'])
3526 res[f]['string'] = res_trans
3527 if 'help' in res[f]:
3528 help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context['lang'])
3530 res[f]['help'] = help_trans
3531 if 'selection' in res[f]:
3532 if isinstance(field.selection, (tuple, list)):
3533 sel = field.selection
3535 for key, val in sel:
3538 val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context['lang'], val)
3539 sel2.append((key, val2 or val))
3540 res[f]['selection'] = sel2
3544 def check_field_access_rights(self, cr, user, operation, fields, context=None):
3546 Check the user access rights on the given fields. This raises Access
3547 Denied if the user does not have the rights. Otherwise it returns the
3548 fields (as is if the fields is not falsy, or the readable/writable
3549 fields if fields is falsy).
3552 """Predicate to test if the user has access to the given field name."""
3553 # Ignore requested field if it doesn't exist. This is ugly but
3554 # it seems to happen at least with 'name_alias' on res.partner.
3555 if field_name not in self._all_columns:
3557 field = self._all_columns[field_name].column
3559 return self.user_has_groups(cr, user, groups=field.groups, context=context)
3563 fields = filter(p, self._all_columns.keys())
3565 filtered_fields = filter(lambda a: not p(a), fields)
3567 _logger.warning('Access Denied by ACLs for operation: %s, uid: %s, model: %s, fields: %s', operation, user, self._name, ', '.join(filtered_fields))
3570 _('The requested operation cannot be completed due to security restrictions. '
3571 'Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
3572 (self._description, operation))
3575 def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
3576 """ Read records with given ids with the given fields
3578 :param cr: database cursor
3579 :param user: current user id
3580 :param ids: id or list of the ids of the records to read
3581 :param fields: optional list of field names to return (default: all fields would be returned)
3582 :type fields: list (example ['field_name_1', ...])
3583 :param context: optional context dictionary - it may contains keys for specifying certain options
3584 like ``context_lang``, ``context_tz`` to alter the results of the call.
3585 A special ``bin_size`` boolean flag may also be passed in the context to request the
3586 value of all fields.binary columns to be returned as the size of the binary instead of its
3587 contents. This can also be selectively overriden by passing a field-specific flag
3588 in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
3589 Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
3590 :return: list of dictionaries((dictionary per record asked)) with requested field values
3591 :rtype: [{‘name_of_the_field’: value, ...}, ...]
3592 :raise AccessError: * if user has no read rights on the requested object
3593 * if user tries to bypass access rules for read on the requested object
3599 self.check_access_rights(cr, user, 'read')
3600 fields = self.check_field_access_rights(cr, user, 'read', fields)
3601 if isinstance(ids, (int, long)):
3605 select = map(lambda x: isinstance(x, dict) and x['id'] or x, select)
3606 result = self._read_flat(cr, user, select, fields, context, load)
3609 for key, v in r.items():
3613 if isinstance(ids, (int, long, dict)):
3614 return result and result[0] or False
3617 def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
3622 if fields_to_read is None:
3623 fields_to_read = self._columns.keys()
3625 # Construct a clause for the security rules.
3626 # 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses,
3627 # or will at least contain self._table.
3628 rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
3630 # all inherited fields + all non inherited fields for which the attribute whose name is in load is True
3631 fields_pre = [f for f in fields_to_read if
3632 f == self.CONCURRENCY_CHECK_FIELD
3633 or (f in self._columns and getattr(self._columns[f], '_classic_write'))
3634 ] + self._inherits.values()
3638 def convert_field(f):
3639 f_qual = '%s."%s"' % (self._table, f) # need fully-qualified references in case len(tables) > 1
3640 if f in ('create_date', 'write_date'):
3641 return "date_trunc('second', %s) as %s" % (f_qual, f)
3642 if f == self.CONCURRENCY_CHECK_FIELD:
3643 if self._log_access:
3644 return "COALESCE(%s.write_date, %s.create_date, (now() at time zone 'UTC'))::timestamp AS %s" % (self._table, self._table, f,)
3645 return "(now() at time zone 'UTC')::timestamp AS %s" % (f,)
3646 if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
3647 return 'length(%s) as "%s"' % (f_qual, f)
3650 fields_pre2 = map(convert_field, fields_pre)
3651 order_by = self._parent_order or self._order
3652 select_fields = ','.join(fields_pre2 + ['%s.id' % self._table])
3653 query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table)
3655 query += " AND " + (' OR '.join(rule_clause))
3656 query += " ORDER BY " + order_by
3657 for sub_ids in cr.split_for_in_conditions(ids):
3658 cr.execute(query, [tuple(sub_ids)] + rule_params)
3659 results = cr.dictfetchall()
3660 result_ids = [x['id'] for x in results]
3661 self._check_record_rules_result_count(cr, user, sub_ids, result_ids, 'read', context=context)
3664 res = map(lambda x: {'id': x}, ids)
3666 if context.get('lang'):
3667 for f in fields_pre:
3668 if f == self.CONCURRENCY_CHECK_FIELD:
3670 if self._columns[f].translate:
3671 ids = [x['id'] for x in res]
3672 #TODO: optimize out of this loop
3673 res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context['lang'], ids)
3675 r[f] = res_trans.get(r['id'], False) or r[f]
3677 for table in self._inherits:
3678 col = self._inherits[table]
3679 cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()]
3682 res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
3690 if not record[col]: # if the record is deleted from _inherits table?
3692 record.update(res3[record[col]])
3693 if col not in fields_to_read:
3696 # all fields which need to be post-processed by a simple function (symbol_get)
3697 fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
3700 for f in fields_post:
3701 r[f] = self._columns[f]._symbol_get(r[f])
3702 ids = [x['id'] for x in res]
3704 # all non inherited fields for which the attribute whose name is in load is False
3705 fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
3707 # Compute POST fields
3709 for f in fields_post:
3710 todo.setdefault(self._columns[f]._multi, [])
3711 todo[self._columns[f]._multi].append(f)
3712 for key, val in todo.items():
3714 res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
3715 assert res2 is not None, \
3716 'The function field "%s" on the "%s" model returned None\n' \
3717 '(a dictionary was expected).' % (val[0], self._name)
3720 if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
3721 multi_fields = res2.get(record['id'],{})
3723 record[pos] = multi_fields.get(pos,[])
3726 res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
3729 record[f] = res2[record['id']]
3733 # Warn about deprecated fields now that fields_pre and fields_post are computed
3734 # Explicitly use list() because we may receive tuples
3735 for f in list(fields_pre) + list(fields_post):
3736 field_column = self._all_columns.get(f) and self._all_columns.get(f).column
3737 if field_column and field_column.deprecated:
3738 _logger.warning('Field %s.%s is deprecated: %s', self._name, f, field_column.deprecated)
3742 for field in vals.copy():
3744 if field in self._columns:
3745 fobj = self._columns[field]
3752 for group in groups:
3753 module = group.split(".")[0]
3754 grp = group.split(".")[1]
3755 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
3756 (grp, module, 'res.groups', user))
3757 readonly = cr.fetchall()
3758 if readonly[0][0] >= 1:
3761 elif readonly[0][0] == 0:
3767 if type(vals[field]) == type([]):
3769 elif type(vals[field]) == type(0.0):
3771 elif type(vals[field]) == type(''):
3772 vals[field] = '=No Permission='
3777 # TODO check READ access
3778 def perm_read(self, cr, user, ids, context=None, details=True):
3780 Returns some metadata about the given records.
3782 :param details: if True, \*_uid fields are replaced with the name of the user
3783 :return: list of ownership dictionaries for each requested record
3784 :rtype: list of dictionaries with the following keys:
3787 * create_uid: user who created the record
3788 * create_date: date when the record was created
3789 * write_uid: last user who changed the record
3790 * write_date: date of the last change to the record
3791 * xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
3798 uniq = isinstance(ids, (int, long))
3802 if self._log_access:
3803 fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
3804 quoted_table = '"%s"' % self._table
3805 fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields)
3806 query = '''SELECT %s, __imd.module, __imd.name
3807 FROM %s LEFT JOIN ir_model_data __imd
3808 ON (__imd.model = %%s and __imd.res_id = %s.id)
3809 WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
3810 cr.execute(query, (self._name, tuple(ids)))
3811 res = cr.dictfetchall()
3814 r[key] = r[key] or False
3815 if details and key in ('write_uid', 'create_uid') and r[key]:
3817 r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
3819 pass # Leave the numeric uid there
3820 r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
3821 del r['name'], r['module']
3826 def _check_concurrency(self, cr, ids, context):
3829 if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
3831 check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp)"
3832 for sub_ids in cr.split_for_in_conditions(ids):
3835 id_ref = "%s,%s" % (self._name, id)
3836 update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
3838 ids_to_check.extend([id, update_date])
3839 if not ids_to_check:
3841 cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
3844 # mention the first one only to keep the error message readable
3845 raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
3847 def _check_record_rules_result_count(self, cr, uid, ids, result_ids, operation, context=None):
3848 """Verify the returned rows after applying record rules matches
3849 the length of `ids`, and raise an appropriate exception if it does not.
3851 ids, result_ids = set(ids), set(result_ids)
3852 missing_ids = ids - result_ids
3854 # Attempt to distinguish record rule restriction vs deleted records,
3855 # to provide a more specific error message - check if the missinf
3856 cr.execute('SELECT id FROM ' + self._table + ' WHERE id IN %s', (tuple(missing_ids),))
3858 # the missing ids are (at least partially) hidden by access rules
3859 if uid == SUPERUSER_ID:
3861 _logger.warning('Access Denied by record rules for operation: %s, uid: %s, model: %s', operation, uid, self._name)
3862 raise except_orm(_('Access Denied'),
3863 _('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
3864 (self._description, operation))
3866 # If we get here, the missing_ids are not in the database
3867 if operation in ('read','unlink'):
3868 # No need to warn about deleting an already deleted record.
3869 # And no error when reading a record that was deleted, to prevent spurious
3870 # errors for non-transactional search/read sequences coming from clients
3872 _logger.warning('Failed operation on deleted record(s): %s, uid: %s, model: %s', operation, uid, self._name)
3873 raise except_orm(_('Missing document(s)'),
3874 _('One of the documents you are trying to access has been deleted, please try again after refreshing.'))
3877 def check_access_rights(self, cr, uid, operation, raise_exception=True): # no context on purpose.
3878 """Verifies that the operation given by ``operation`` is allowed for the user
3879 according to the access rights."""
3880 return self.pool.get('ir.model.access').check(cr, uid, self._name, operation, raise_exception)
3882 def check_access_rule(self, cr, uid, ids, operation, context=None):
3883 """Verifies that the operation given by ``operation`` is allowed for the user
3884 according to ir.rules.
3886 :param operation: one of ``write``, ``unlink``
3887 :raise except_orm: * if current ir.rules do not permit this operation.
3888 :return: None if the operation is allowed
3890 if uid == SUPERUSER_ID:
3893 if self.is_transient():
3894 # Only one single implicit access rule for transient models: owner only!
3895 # This is ok to hardcode because we assert that TransientModels always
3896 # have log_access enabled so that the create_uid column is always there.
3897 # And even with _inherits, these fields are always present in the local
3898 # table too, so no need for JOINs.
3899 cr.execute("""SELECT distinct create_uid
3901 WHERE id IN %%s""" % self._table, (tuple(ids),))
3902 uids = [x[0] for x in cr.fetchall()]
3903 if len(uids) != 1 or uids[0] != uid:
3904 raise except_orm(_('Access Denied'),
3905 _('For this kind of document, you may only access records you created yourself.\n\n(Document type: %s)') % (self._description,))
3907 where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
3909 where_clause = ' and ' + ' and '.join(where_clause)
3910 for sub_ids in cr.split_for_in_conditions(ids):
3911 cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
3912 ' WHERE ' + self._table + '.id IN %s' + where_clause,
3913 [sub_ids] + where_params)
3914 returned_ids = [x['id'] for x in cr.dictfetchall()]
3915 self._check_record_rules_result_count(cr, uid, sub_ids, returned_ids, operation, context=context)
3917 def _workflow_trigger(self, cr, uid, ids, trigger, context=None):
3918 """Call given workflow trigger as a result of a CRUD operation"""
3919 wf_service = netsvc.LocalService("workflow")
3921 getattr(wf_service, trigger)(uid, self._name, res_id, cr)
3923 def _workflow_signal(self, cr, uid, ids, signal, context=None):
3924 """Send given workflow signal and return a dict mapping ids to workflow results"""
3925 wf_service = netsvc.LocalService("workflow")
3928 result[res_id] = wf_service.trg_validate(uid, self._name, res_id, signal, cr)
3931 def unlink(self, cr, uid, ids, context=None):
3933 Delete records with given ids
3935 :param cr: database cursor
3936 :param uid: current user id
3937 :param ids: id or list of ids
3938 :param context: (optional) context arguments, like lang, time zone
3940 :raise AccessError: * if user has no unlink rights on the requested object
3941 * if user tries to bypass access rules for unlink on the requested object
3942 :raise UserError: if the record is default property for other records
3947 if isinstance(ids, (int, long)):
3950 result_store = self._store_get_values(cr, uid, ids, self._all_columns.keys(), context)
3952 self._check_concurrency(cr, ids, context)
3954 self.check_access_rights(cr, uid, 'unlink')
3956 ir_property = self.pool.get('ir.property')
3958 # Check if the records are used as default properties.
3959 domain = [('res_id', '=', False),
3960 ('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
3962 if ir_property.search(cr, uid, domain, context=context):
3963 raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
3965 # Delete the records' properties.
3966 property_ids = ir_property.search(cr, uid, [('res_id', 'in', ['%s,%s' % (self._name, i) for i in ids])], context=context)
3967 ir_property.unlink(cr, uid, property_ids, context=context)
3969 self._workflow_trigger(cr, uid, ids, 'trg_delete', context=context)
3971 self.check_access_rule(cr, uid, ids, 'unlink', context=context)
3972 pool_model_data = self.pool.get('ir.model.data')
3973 ir_values_obj = self.pool.get('ir.values')
3974 for sub_ids in cr.split_for_in_conditions(ids):
3975 cr.execute('delete from ' + self._table + ' ' \
3976 'where id IN %s', (sub_ids,))
3978 # Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
3979 # as these are not connected with real database foreign keys, and would be dangling references.
3980 # Note: following steps performed as admin to avoid access rights restrictions, and with no context
3981 # to avoid possible side-effects during admin calls.
3982 # Step 1. Calling unlink of ir_model_data only for the affected IDS
3983 reference_ids = pool_model_data.search(cr, SUPERUSER_ID, [('res_id','in',list(sub_ids)),('model','=',self._name)])
3984 # Step 2. Marching towards the real deletion of referenced records
3986 pool_model_data.unlink(cr, SUPERUSER_ID, reference_ids)
3988 # For the same reason, removing the record relevant to ir_values
3989 ir_value_ids = ir_values_obj.search(cr, uid,
3990 ['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
3993 ir_values_obj.unlink(cr, uid, ir_value_ids, context=context)
3995 for order, object, store_ids, fields in result_store:
3996 if object != self._name:
3997 obj = self.pool.get(object)
3998 cr.execute('select id from '+obj._table+' where id IN %s', (tuple(store_ids),))
3999 rids = map(lambda x: x[0], cr.fetchall())
4001 obj._store_set_values(cr, uid, rids, fields, context)
4008 def write(self, cr, user, ids, vals, context=None):
4010 Update records with given ids with the given field values
4012 :param cr: database cursor
4013 :param user: current user id
4015 :param ids: object id or list of object ids to update according to **vals**
4016 :param vals: field values to update, e.g {'field_name': new_field_value, ...}
4017 :type vals: dictionary
4018 :param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
4019 :type context: dictionary
4021 :raise AccessError: * if user has no write rights on the requested object
4022 * if user tries to bypass access rules for write on the requested object
4023 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
4024 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
4026 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
4028 + For a many2many field, a list of tuples is expected.
4029 Here is the list of tuple that are accepted, with the corresponding semantics ::
4031 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
4032 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
4033 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
4034 (3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
4035 (4, ID) link to existing record with id = ID (adds a relationship)
4036 (5) unlink all (like using (3,ID) for all linked records)
4037 (6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
4040 [(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
4042 + For a one2many field, a lits of tuples is expected.
4043 Here is the list of tuple that are accepted, with the corresponding semantics ::
4045 (0, 0, { values }) link to a new record that needs to be created with the given values dictionary
4046 (1, ID, { values }) update the linked record with id = ID (write *values* on it)
4047 (2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
4050 [(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
4052 + For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
4053 + For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
4057 self.check_field_access_rights(cr, user, 'write', vals.keys())
4058 for field in vals.copy():
4060 if field in self._columns:
4061 fobj = self._columns[field]
4062 elif field in self._inherit_fields:
4063 fobj = self._inherit_fields[field][2]
4070 for group in groups:
4071 module = group.split(".")[0]
4072 grp = group.split(".")[1]
4073 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
4074 (grp, module, 'res.groups', user))
4075 readonly = cr.fetchall()
4076 if readonly[0][0] >= 1:
4087 if isinstance(ids, (int, long)):
4090 self._check_concurrency(cr, ids, context)
4091 self.check_access_rights(cr, user, 'write')
4093 result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
4095 # No direct update of parent_left/right
4096 vals.pop('parent_left', None)
4097 vals.pop('parent_right', None)
4099 parents_changed = []
4100 parent_order = self._parent_order or self._order
4101 if self._parent_store and (self._parent_name in vals):
4102 # The parent_left/right computation may take up to
4103 # 5 seconds. No need to recompute the values if the
4104 # parent is the same.
4105 # Note: to respect parent_order, nodes must be processed in
4106 # order, so ``parents_changed`` must be ordered properly.
4107 parent_val = vals[self._parent_name]
4109 query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \
4110 (self._table, self._parent_name, self._parent_name, parent_order)
4111 cr.execute(query, (tuple(ids), parent_val))
4113 query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \
4114 (self._table, self._parent_name, parent_order)
4115 cr.execute(query, (tuple(ids),))
4116 parents_changed = map(operator.itemgetter(0), cr.fetchall())
4123 totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
4125 field_column = self._all_columns.get(field) and self._all_columns.get(field).column
4126 if field_column and field_column.deprecated:
4127 _logger.warning('Field %s.%s is deprecated: %s', self._name, field, field_column.deprecated)
4128 if field in self._columns:
4129 if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
4130 if (not totranslate) or not self._columns[field].translate:
4131 upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
4132 upd1.append(self._columns[field]._symbol_set[1](vals[field]))
4133 direct.append(field)
4135 upd_todo.append(field)
4137 updend.append(field)
4138 if field in self._columns \
4139 and hasattr(self._columns[field], 'selection') \
4141 self._check_selection_field_value(cr, user, field, vals[field], context=context)
4143 if self._log_access:
4144 upd0.append('write_uid=%s')
4145 upd0.append("write_date=(now() at time zone 'UTC')")
4149 self.check_access_rule(cr, user, ids, 'write', context=context)
4150 for sub_ids in cr.split_for_in_conditions(ids):
4151 cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
4152 'where id IN %s', upd1 + [sub_ids])
4153 if cr.rowcount != len(sub_ids):
4154 raise except_orm(_('AccessError'),
4155 _('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
4160 if self._columns[f].translate:
4161 src_trans = self.pool.get(self._name).read(cr, user, ids, [f])[0][f]
4164 # Inserting value to DB
4165 self.write(cr, user, ids, {f: vals[f]})
4166 self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
4169 # call the 'set' method of fields which are not classic_write
4170 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
4172 # default element in context must be removed when call a one2many or many2many
4173 rel_context = context.copy()
4174 for c in context.items():
4175 if c[0].startswith('default_'):
4176 del rel_context[c[0]]
4178 for field in upd_todo:
4180 result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
4182 unknown_fields = updend[:]
4183 for table in self._inherits:
4184 col = self._inherits[table]
4186 for sub_ids in cr.split_for_in_conditions(ids):
4187 cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
4188 'where id IN %s', (sub_ids,))
4189 nids.extend([x[0] for x in cr.fetchall()])
4193 if self._inherit_fields[val][0] == table:
4195 unknown_fields.remove(val)
4197 self.pool.get(table).write(cr, user, nids, v, context)
4201 'No such field(s) in model %s: %s.',
4202 self._name, ', '.join(unknown_fields))
4203 self._validate(cr, user, ids, context)
4205 # TODO: use _order to set dest at the right position and not first node of parent
4206 # We can't defer parent_store computation because the stored function
4207 # fields that are computer may refer (directly or indirectly) to
4208 # parent_left/right (via a child_of domain)
4211 self.pool._init_parent[self._name] = True
4213 order = self._parent_order or self._order
4214 parent_val = vals[self._parent_name]
4216 clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
4218 clause, params = '%s IS NULL' % (self._parent_name,), ()
4220 for id in parents_changed:
4221 cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
4222 pleft, pright = cr.fetchone()
4223 distance = pright - pleft + 1
4225 # Positions of current siblings, to locate proper insertion point;
4226 # this can _not_ be fetched outside the loop, as it needs to be refreshed
4227 # after each update, in case several nodes are sequentially inserted one
4228 # next to the other (i.e computed incrementally)
4229 cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params)
4230 parents = cr.fetchall()
4232 # Find Position of the element
4234 for (parent_pright, parent_id) in parents:
4237 position = parent_pright + 1
4239 # It's the first node of the parent
4244 cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
4245 position = cr.fetchone()[0] + 1
4247 if pleft < position <= pright:
4248 raise except_orm(_('UserError'), _('Recursivity Detected.'))
4250 if pleft < position:
4251 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
4252 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
4253 cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
4255 cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
4256 cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
4257 cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
4259 result += self._store_get_values(cr, user, ids, vals.keys(), context)
4263 for order, object, ids_to_update, fields_to_recompute in result:
4264 key = (object, tuple(fields_to_recompute))
4265 done.setdefault(key, {})
4266 # avoid to do several times the same computation
4268 for id in ids_to_update:
4269 if id not in done[key]:
4270 done[key][id] = True
4272 self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context)
4274 self._workflow_trigger(cr, user, ids, 'trg_write', context=context)
4278 # TODO: Should set perm to user.xxx
4280 def create(self, cr, user, vals, context=None):
4282 Create a new record for the model.
4284 The values for the new record are initialized using the ``vals``
4285 argument, and if necessary the result of ``default_get()``.
4287 :param cr: database cursor
4288 :param user: current user id
4290 :param vals: field values for new record, e.g {'field_name': field_value, ...}
4291 :type vals: dictionary
4292 :param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
4293 :type context: dictionary
4294 :return: id of new record created
4295 :raise AccessError: * if user has no create rights on the requested object
4296 * if user tries to bypass access rules for create on the requested object
4297 :raise ValidateError: if user tries to enter invalid value for a field that is not in selection
4298 :raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
4300 **Note**: The type of field values to pass in ``vals`` for relationship fields is specific.
4301 Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how
4308 if self.is_transient():
4309 self._transient_vacuum(cr, user)
4311 self.check_access_rights(cr, user, 'create')
4313 if self._log_access:
4314 for f in LOG_ACCESS_COLUMNS:
4315 if vals.pop(f, None) is not None:
4317 'Field `%s` is not allowed when creating the model `%s`.',
4319 vals = self._add_missing_default_values(cr, user, vals, context)
4322 for v in self._inherits:
4323 if self._inherits[v] not in vals:
4326 tocreate[v] = {'id': vals[self._inherits[v]]}
4327 (upd0, upd1, upd2) = ('', '', [])
4330 for v in vals.keys():
4331 if v in self._inherit_fields and v not in self._columns:
4332 (table, col, col_detail, original_parent) = self._inherit_fields[v]
4333 tocreate[table][v] = vals[v]
4336 if (v not in self._inherit_fields) and (v not in self._columns):
4338 unknown_fields.append(v)
4341 'No such field(s) in model %s: %s.',
4342 self._name, ', '.join(unknown_fields))
4344 # Try-except added to filter the creation of those records whose filds are readonly.
4345 # Example : any dashboard which has all the fields readonly.(due to Views(database views))
4347 cr.execute("SELECT nextval('"+self._sequence+"')")
4349 raise except_orm(_('UserError'),
4350 _('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.'))
4352 id_new = cr.fetchone()[0]
4353 for table in tocreate:
4354 if self._inherits[table] in vals:
4355 del vals[self._inherits[table]]
4357 record_id = tocreate[table].pop('id', None)
4359 # When linking/creating parent records, force context without 'no_store_function' key that
4360 # defers stored functions computing, as these won't be computed in batch at the end of create().
4361 parent_context = dict(context)
4362 parent_context.pop('no_store_function', None)
4364 if record_id is None or not record_id:
4365 record_id = self.pool.get(table).create(cr, user, tocreate[table], context=parent_context)
4367 self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=parent_context)
4369 upd0 += ',' + self._inherits[table]
4371 upd2.append(record_id)
4373 #Start : Set bool fields to be False if they are not touched(to make search more powerful)
4374 bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
4376 for bool_field in bool_fields:
4377 if bool_field not in vals:
4378 vals[bool_field] = False
4380 for field in vals.copy():
4382 if field in self._columns:
4383 fobj = self._columns[field]
4385 fobj = self._inherit_fields[field][2]
4391 for group in groups:
4392 module = group.split(".")[0]
4393 grp = group.split(".")[1]
4394 cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
4395 (grp, module, 'res.groups', user))
4396 readonly = cr.fetchall()
4397 if readonly[0][0] >= 1:
4400 elif readonly[0][0] == 0:
4408 if self._columns[field]._classic_write:
4409 upd0 = upd0 + ',"' + field + '"'
4410 upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
4411 upd2.append(self._columns[field]._symbol_set[1](vals[field]))
4412 #for the function fields that receive a value, we set them directly in the database
4413 #(they may be required), but we also need to trigger the _fct_inv()
4414 if (hasattr(self._columns[field], '_fnct_inv')) and not isinstance(self._columns[field], fields.related):
4415 #TODO: this way to special case the related fields is really creepy but it shouldn't be changed at
4416 #one week of the release candidate. It seems the only good way to handle correctly this is to add an
4417 #attribute to make a field `really readonly´ and thus totally ignored by the create()... otherwise
4418 #if, for example, the related has a default value (for usability) then the fct_inv is called and it
4419 #may raise some access rights error. Changing this is a too big change for now, and is thus postponed
4420 #after the release but, definitively, the behavior shouldn't be different for related and function
4422 upd_todo.append(field)
4424 #TODO: this `if´ statement should be removed because there is no good reason to special case the fields
4425 #related. See the above TODO comment for further explanations.
4426 if not isinstance(self._columns[field], fields.related):
4427 upd_todo.append(field)
4428 if field in self._columns \
4429 and hasattr(self._columns[field], 'selection') \
4431 self._check_selection_field_value(cr, user, field, vals[field], context=context)
4432 if self._log_access:
4433 upd0 += ',create_uid,create_date,write_uid,write_date'
4434 upd1 += ",%s,(now() at time zone 'UTC'),%s,(now() at time zone 'UTC')"
4435 upd2.extend((user, user))
4436 cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
4437 self.check_access_rule(cr, user, [id_new], 'create', context=context)
4438 upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
4440 if self._parent_store and not context.get('defer_parent_store_computation'):
4442 self.pool._init_parent[self._name] = True
4444 parent = vals.get(self._parent_name, False)
4446 cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
4448 result_p = cr.fetchall()
4449 for (pleft,) in result_p:
4454 cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
4455 pleft_old = cr.fetchone()[0]
4458 cr.execute('select max(parent_right) from '+self._table)
4459 pleft = cr.fetchone()[0] or 0
4460 cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
4461 cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
4462 cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
4464 # default element in context must be remove when call a one2many or many2many
4465 rel_context = context.copy()
4466 for c in context.items():
4467 if c[0].startswith('default_'):
4468 del rel_context[c[0]]
4471 for field in upd_todo:
4472 result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
4473 self._validate(cr, user, [id_new], context)
4475 if not context.get('no_store_function', False):
4476 result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
4479 for order, object, ids, fields2 in result:
4480 if not (object, ids, fields2) in done:
4481 self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
4482 done.append((object, ids, fields2))
4484 if self._log_create and not (context and context.get('no_store_function', False)):
4485 message = self._description + \
4487 self.name_get(cr, user, [id_new], context=context)[0][1] + \
4488 "' " + _("created.")
4489 self.log(cr, user, id_new, message, True, context=context)
4490 self._workflow_trigger(cr, user, [id_new], 'trg_create', context=context)
4493 def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None):
4494 """Fetch records as objects allowing to use dot notation to browse fields and relations
4496 :param cr: database cursor
4497 :param uid: current user id
4498 :param select: id or list of ids.
4499 :param context: context arguments, like lang, time zone
4500 :rtype: object or list of objects requested
4503 self._list_class = list_class or browse_record_list
4505 # need to accepts ints and longs because ids coming from a method
4506 # launched by button in the interface have a type long...
4507 if isinstance(select, (int, long)):
4508 return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
4509 elif isinstance(select, list):
4510 return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context=context)
4512 return browse_null()
4514 def _store_get_values(self, cr, uid, ids, fields, context):
4515 """Returns an ordered list of fields.functions to call due to
4516 an update operation on ``fields`` of records with ``ids``,
4517 obtained by calling the 'store' functions of these fields,
4518 as setup by their 'store' attribute.
4520 :return: [(priority, model_name, [record_ids,], [function_fields,])]
4522 if fields is None: fields = []
4523 stored_functions = self.pool._store_function.get(self._name, [])
4525 # use indexed names for the details of the stored_functions:
4526 model_name_, func_field_to_compute_, id_mapping_fnct_, trigger_fields_, priority_ = range(5)
4528 # only keep functions that should be triggered for the ``fields``
4530 to_compute = [f for f in stored_functions \
4531 if ((not f[trigger_fields_]) or set(fields).intersection(f[trigger_fields_]))]
4534 for function in to_compute:
4535 # use admin user for accessing objects having rules defined on store fields
4536 target_ids = [id for id in function[id_mapping_fnct_](self, cr, SUPERUSER_ID, ids, context) if id]
4538 # the compound key must consider the priority and model name
4539 key = (function[priority_], function[model_name_])
4540 for target_id in target_ids:
4541 mapping.setdefault(key, {}).setdefault(target_id,set()).add(tuple(function))
4543 # Here mapping looks like:
4544 # { (10, 'model_a') : { target_id1: [ (function_1_tuple, function_2_tuple) ], ... }
4545 # (20, 'model_a') : { target_id2: [ (function_3_tuple, function_4_tuple) ], ... }
4546 # (99, 'model_a') : { target_id1: [ (function_5_tuple, function_6_tuple) ], ... }
4549 # Now we need to generate the batch function calls list
4551 # { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] }
4553 for ((priority,model), id_map) in mapping.iteritems():
4554 functions_ids_maps = {}
4555 # function_ids_maps =
4556 # { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] }
4557 for id, functions in id_map.iteritems():
4558 functions_ids_maps.setdefault(tuple(functions), []).append(id)
4559 for functions, ids in functions_ids_maps.iteritems():
4560 call_map.setdefault((priority,model),[]).append((priority, model, ids,
4561 [f[func_field_to_compute_] for f in functions]))
4562 ordered_keys = call_map.keys()
4566 result = reduce(operator.add, (call_map[k] for k in ordered_keys))
4569 def _store_set_values(self, cr, uid, ids, fields, context):
4570 """Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
4571 respecting ``multi`` attributes), and stores the resulting values in the database directly."""
4576 if self._log_access:
4577 cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
4581 field_dict.setdefault(r[0], [])
4582 res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
4583 write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
4584 for i in self.pool._store_function.get(self._name, []):
4586 up_write_date = write_date + datetime.timedelta(hours=i[5])
4587 if datetime.datetime.now() < up_write_date:
4589 field_dict[r[0]].append(i[1])
4595 if self._columns[f]._multi not in keys:
4596 keys.append(self._columns[f]._multi)
4597 todo.setdefault(self._columns[f]._multi, [])
4598 todo[self._columns[f]._multi].append(f)
4602 # use admin user for accessing objects having rules defined on store fields
4603 result = self._columns[val[0]].get(cr, self, ids, val, SUPERUSER_ID, context=context)
4604 for id, value in result.items():
4606 for f in value.keys():
4607 if f in field_dict[id]:
4614 if self._columns[v]._type == 'many2one':
4616 value[v] = value[v][0]
4619 upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
4620 upd1.append(self._columns[v]._symbol_set[1](value[v]))
4623 cr.execute('update "' + self._table + '" set ' + \
4624 ','.join(upd0) + ' where id = %s', upd1)
4628 # use admin user for accessing objects having rules defined on store fields
4629 result = self._columns[f].get(cr, self, ids, f, SUPERUSER_ID, context=context)
4630 for r in result.keys():
4632 if r in field_dict.keys():
4633 if f in field_dict[r]:
4635 for id, value in result.items():
4636 if self._columns[f]._type == 'many2one':
4641 cr.execute('update "' + self._table + '" set ' + \
4642 '"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
4648 def perm_write(self, cr, user, ids, fields, context=None):
4649 raise NotImplementedError(_('This method does not exist anymore'))
4651 # TODO: ameliorer avec NULL
4652 def _where_calc(self, cr, user, domain, active_test=True, context=None):
4653 """Computes the WHERE clause needed to implement an OpenERP domain.
4654 :param domain: the domain to compute
4656 :param active_test: whether the default filtering of records with ``active``
4657 field set to ``False`` should be applied.
4658 :return: the query expressing the given domain as provided in domain
4659 :rtype: osv.query.Query
4664 # if the object has a field named 'active', filter out all inactive
4665 # records unless they were explicitely asked for
4666 if 'active' in self._all_columns and (active_test and context.get('active_test', True)):
4668 # the item[0] trick below works for domain items and '&'/'|'/'!'
4670 if not any(item[0] == 'active' for item in domain):
4671 domain.insert(0, ('active', '=', 1))
4673 domain = [('active', '=', 1)]
4676 e = expression.expression(cr, user, domain, self, context)
4677 tables = e.get_tables()
4678 where_clause, where_params = e.to_sql()
4679 where_clause = where_clause and [where_clause] or []
4681 where_clause, where_params, tables = [], [], ['"%s"' % self._table]
4683 return Query(tables, where_clause, where_params)
4685 def _check_qorder(self, word):
4686 if not regex_order.match(word):
4687 raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
4690 def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
4691 """Add what's missing in ``query`` to implement all appropriate ir.rules
4692 (using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
4694 :param query: the current query object
4696 def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None):
4697 """ :param string parent_model: string of the parent model
4698 :param model child_object: model object, base of the rule application
4701 if parent_model and child_object:
4702 # as inherited rules are being applied, we need to add the missing JOIN
4703 # to reach the parent table (if it was not JOINed yet in the query)
4704 parent_alias = child_object._inherits_join_add(child_object, parent_model, query)
4705 # inherited rules are applied on the external table -> need to get the alias and replace
4706 parent_table = self.pool.get(parent_model)._table
4707 added_clause = [clause.replace('"%s"' % parent_table, '"%s"' % parent_alias) for clause in added_clause]
4708 # change references to parent_table to parent_alias, because we now use the alias to refer to the table
4710 for table in added_tables:
4711 # table is just a table name -> switch to the full alias
4712 if table == '"%s"' % parent_table:
4713 new_tables.append('"%s" as "%s"' % (parent_table, parent_alias))
4714 # table is already a full statement -> replace reference to the table to its alias, is correct with the way aliases are generated
4716 new_tables.append(table.replace('"%s"' % parent_table, '"%s"' % parent_alias))
4717 added_tables = new_tables
4718 query.where_clause += added_clause
4719 query.where_clause_params += added_params
4720 for table in added_tables:
4721 if table not in query.tables:
4722 query.tables.append(table)
4726 # apply main rules on the object
4727 rule_obj = self.pool.get('ir.rule')
4728 rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, self._name, mode, context=context)
4729 apply_rule(rule_where_clause, rule_where_clause_params, rule_tables)
4731 # apply ir.rules from the parents (through _inherits)
4732 for inherited_model in self._inherits:
4733 rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, inherited_model, mode, context=context)
4734 apply_rule(rule_where_clause, rule_where_clause_params, rule_tables,
4735 parent_model=inherited_model, child_object=self)
4737 def _generate_m2o_order_by(self, order_field, query):
4739 Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
4740 either native m2o fields or function/related fields that are stored, including
4741 intermediate JOINs for inheritance if required.
4743 :return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
4745 if order_field not in self._columns and order_field in self._inherit_fields:
4746 # also add missing joins for reaching the table containing the m2o field
4747 qualified_field = self._inherits_join_calc(order_field, query)
4748 order_field_column = self._inherit_fields[order_field][2]
4750 qualified_field = '"%s"."%s"' % (self._table, order_field)
4751 order_field_column = self._columns[order_field]
4753 assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
4754 if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
4755 _logger.debug("Many2one function/related fields must be stored " \
4756 "to be used as ordering fields! Ignoring sorting for %s.%s",
4757 self._name, order_field)
4760 # figure out the applicable order_by for the m2o
4761 dest_model = self.pool.get(order_field_column._obj)
4762 m2o_order = dest_model._order
4763 if not regex_order.match(m2o_order):
4764 # _order is complex, can't use it here, so we default to _rec_name
4765 m2o_order = dest_model._rec_name
4767 # extract the field names, to be able to qualify them and add desc/asc
4769 for order_part in m2o_order.split(","):
4770 m2o_order_list.append(order_part.strip().split(" ", 1)[0].strip())
4771 m2o_order = m2o_order_list
4773 # Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
4774 # as we don't want to exclude results that have NULL values for the m2o
4775 src_table, src_field = qualified_field.replace('"', '').split('.', 1)
4776 dst_alias, dst_alias_statement = query.add_join((src_table, dest_model._table, src_field, 'id', src_field), implicit=False, outer=True)
4777 qualify = lambda field: '"%s"."%s"' % (dst_alias, field)
4778 return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
4780 def _generate_order_by(self, order_spec, query):
4782 Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
4783 a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
4785 :raise" except_orm in case order_spec is malformed
4787 order_by_clause = ''
4788 order_spec = order_spec or self._order
4790 order_by_elements = []
4791 self._check_qorder(order_spec)
4792 for order_part in order_spec.split(','):
4793 order_split = order_part.strip().split(' ')
4794 order_field = order_split[0].strip()
4795 order_direction = order_split[1].strip() if len(order_split) == 2 else ''
4797 if order_field == 'id':
4798 order_by_elements.append('"%s"."id" %s' % (self._table, order_direction))
4799 elif order_field in self._columns:
4800 order_column = self._columns[order_field]
4801 if order_column._classic_read:
4802 inner_clause = '"%s"."%s"' % (self._table, order_field)
4803 elif order_column._type == 'many2one':
4804 inner_clause = self._generate_m2o_order_by(order_field, query)
4806 continue # ignore non-readable or "non-joinable" fields
4807 elif order_field in self._inherit_fields:
4808 parent_obj = self.pool.get(self._inherit_fields[order_field][3])
4809 order_column = parent_obj._columns[order_field]
4810 if order_column._classic_read:
4811 inner_clause = self._inherits_join_calc(order_field, query)
4812 elif order_column._type == 'many2one':
4813 inner_clause = self._generate_m2o_order_by(order_field, query)
4815 continue # ignore non-readable or "non-joinable" fields
4817 if isinstance(inner_clause, list):
4818 for clause in inner_clause:
4819 order_by_elements.append("%s %s" % (clause, order_direction))
4821 order_by_elements.append("%s %s" % (inner_clause, order_direction))
4822 if order_by_elements:
4823 order_by_clause = ",".join(order_by_elements)
4825 return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
4827 def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
4829 Private implementation of search() method, allowing specifying the uid to use for the access right check.
4830 This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
4831 by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
4832 This is ok at the security level because this method is private and not callable through XML-RPC.
4834 :param access_rights_uid: optional user ID to use when checking access rights
4835 (not for ir.rules, this is only for ir.model.access)
4839 self.check_access_rights(cr, access_rights_uid or user, 'read')
4841 # For transient models, restrict acces to the current user, except for the super-user
4842 if self.is_transient() and self._log_access and user != SUPERUSER_ID:
4843 args = expression.AND(([('create_uid', '=', user)], args or []))
4845 query = self._where_calc(cr, user, args, context=context)
4846 self._apply_ir_rules(cr, user, query, 'read', context=context)
4847 order_by = self._generate_order_by(order, query)
4848 from_clause, where_clause, where_clause_params = query.get_sql()
4850 limit_str = limit and ' limit %d' % limit or ''
4851 offset_str = offset and ' offset %d' % offset or ''
4852 where_str = where_clause and (" WHERE %s" % where_clause) or ''
4855 cr.execute('SELECT count("%s".id) FROM ' % self._table + from_clause + where_str + limit_str + offset_str, where_clause_params)
4858 cr.execute('SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str, where_clause_params)
4860 return [x[0] for x in res]
4862 # returns the different values ever entered for one field
4863 # this is used, for example, in the client when the user hits enter on
4865 def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
4868 if field in self._inherit_fields:
4869 return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
4871 return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
4873 def copy_data(self, cr, uid, id, default=None, context=None):
4875 Copy given record's data with all its fields values
4877 :param cr: database cursor
4878 :param uid: current user id
4879 :param id: id of the record to copy
4880 :param default: field values to override in the original values of the copied record
4881 :type default: dictionary
4882 :param context: context arguments, like lang, time zone
4883 :type context: dictionary
4884 :return: dictionary containing all the field values
4890 # avoid recursion through already copied records in case of circular relationship
4891 seen_map = context.setdefault('__copy_data_seen',{})
4892 if id in seen_map.setdefault(self._name,[]):
4894 seen_map[self._name].append(id)
4898 if 'state' not in default:
4899 if 'state' in self._defaults:
4900 if callable(self._defaults['state']):
4901 default['state'] = self._defaults['state'](self, cr, uid, context)
4903 default['state'] = self._defaults['state']
4905 context_wo_lang = context.copy()
4906 if 'lang' in context:
4907 del context_wo_lang['lang']
4908 data = self.read(cr, uid, [id,], context=context_wo_lang)
4912 raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
4914 # build a black list of fields that should not be copied
4915 blacklist = set(MAGIC_COLUMNS + ['parent_left', 'parent_right'])
4916 def blacklist_given_fields(obj):
4917 # blacklist the fields that are given by inheritance
4918 for other, field_to_other in obj._inherits.items():
4919 blacklist.add(field_to_other)
4920 if field_to_other in default:
4921 # all the fields of 'other' are given by the record: default[field_to_other],
4922 # except the ones redefined in self
4923 blacklist.update(set(self.pool.get(other)._all_columns) - set(self._columns))
4925 blacklist_given_fields(self.pool.get(other))
4926 blacklist_given_fields(self)
4929 for f, colinfo in self._all_columns.items():
4930 field = colinfo.column
4933 elif f in blacklist:
4935 elif isinstance(field, fields.function):
4937 elif field._type == 'many2one':
4938 res[f] = data[f] and data[f][0]
4939 elif field._type == 'one2many':
4940 other = self.pool.get(field._obj)
4941 # duplicate following the order of the ids because we'll rely on
4942 # it later for copying translations in copy_translation()!
4943 lines = [other.copy_data(cr, uid, line_id, context=context) for line_id in sorted(data[f])]
4944 # the lines are duplicated using the wrong (old) parent, but then
4945 # are reassigned to the correct one thanks to the (0, 0, ...)
4946 res[f] = [(0, 0, line) for line in lines if line]
4947 elif field._type == 'many2many':
4948 res[f] = [(6, 0, data[f])]
4954 def copy_translations(self, cr, uid, old_id, new_id, context=None):
4958 # avoid recursion through already copied records in case of circular relationship
4959 seen_map = context.setdefault('__copy_translations_seen',{})
4960 if old_id in seen_map.setdefault(self._name,[]):
4962 seen_map[self._name].append(old_id)
4964 trans_obj = self.pool.get('ir.translation')
4965 # TODO it seems fields_get can be replaced by _all_columns (no need for translation)
4966 fields = self.fields_get(cr, uid, context=context)
4968 translation_records = []
4969 for field_name, field_def in fields.items():
4970 # we must recursively copy the translations for o2o and o2m
4971 if field_def['type'] == 'one2many':
4972 target_obj = self.pool.get(field_def['relation'])
4973 old_record, new_record = self.read(cr, uid, [old_id, new_id], [field_name], context=context)
4974 # here we rely on the order of the ids to match the translations
4975 # as foreseen in copy_data()
4976 old_children = sorted(old_record[field_name])
4977 new_children = sorted(new_record[field_name])
4978 for (old_child, new_child) in zip(old_children, new_children):
4979 target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
4980 # and for translatable fields we keep them for copy
4981 elif field_def.get('translate'):
4983 if field_name in self._columns:
4984 trans_name = self._name + "," + field_name
4985 elif field_name in self._inherit_fields:
4986 trans_name = self._inherit_fields[field_name][0] + "," + field_name
4988 trans_ids = trans_obj.search(cr, uid, [
4989 ('name', '=', trans_name),
4990 ('res_id', '=', old_id)
4992 translation_records.extend(trans_obj.read(cr, uid, trans_ids, context=context))
4994 for record in translation_records:
4996 record['res_id'] = new_id
4997 trans_obj.create(cr, uid, record, context=context)
5000 def copy(self, cr, uid, id, default=None, context=None):
5002 Duplicate record with given id updating it with default values
5004 :param cr: database cursor
5005 :param uid: current user id
5006 :param id: id of the record to copy
5007 :param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
5008 :type default: dictionary
5009 :param context: context arguments, like lang, time zone
5010 :type context: dictionary
5011 :return: id of the newly created record
5016 context = context.copy()
5017 data = self.copy_data(cr, uid, id, default, context)
5018 new_id = self.create(cr, uid, data, context)
5019 self.copy_translations(cr, uid, id, new_id, context)
5022 def exists(self, cr, uid, ids, context=None):
5023 """Checks whether the given id or ids exist in this model,
5024 and return the list of ids that do. This is simple to use for
5025 a truth test on a browse_record::
5030 :param ids: id or list of ids to check for existence
5031 :type ids: int or [int]
5032 :return: the list of ids that currently exist, out of
5035 if type(ids) in (int, long):
5037 query = 'SELECT id FROM "%s"' % self._table
5038 cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
5039 return [x[0] for x in cr.fetchall()]
5041 def check_recursion(self, cr, uid, ids, context=None, parent=None):
5042 _logger.warning("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
5044 assert parent is None or parent in self._columns or parent in self._inherit_fields,\
5045 "The 'parent' parameter passed to check_recursion() must be None or a valid field name"
5046 return self._check_recursion(cr, uid, ids, context, parent)
5048 def _check_recursion(self, cr, uid, ids, context=None, parent=None):
5050 Verifies that there is no loop in a hierarchical structure of records,
5051 by following the parent relationship using the **parent** field until a loop
5052 is detected or until a top-level record is found.
5054 :param cr: database cursor
5055 :param uid: current user id
5056 :param ids: list of ids of records to check
5057 :param parent: optional parent field name (default: ``self._parent_name = parent_id``)
5058 :return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
5062 parent = self._parent_name
5064 query = 'SELECT distinct "%s" FROM "%s" WHERE id IN %%s' % (parent, self._table)
5067 for i in range(0, len(ids), cr.IN_MAX):
5068 sub_ids_parent = ids_parent[i:i+cr.IN_MAX]
5069 cr.execute(query, (tuple(sub_ids_parent),))
5070 ids_parent2.extend(filter(None, map(lambda x: x[0], cr.fetchall())))
5071 ids_parent = ids_parent2
5072 for i in ids_parent:
5077 def _get_external_ids(self, cr, uid, ids, *args, **kwargs):
5078 """Retrieve the External ID(s) of any database record.
5080 **Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
5082 :return: map of ids to the list of their fully qualified External IDs
5083 in the form ``module.key``, or an empty list when there's no External
5084 ID for a record, e.g.::
5086 { 'id': ['module.ext_id', 'module.ext_id_bis'],
5089 ir_model_data = self.pool.get('ir.model.data')
5090 data_ids = ir_model_data.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
5091 data_results = ir_model_data.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
5094 # can't use dict.fromkeys() as the list would be shared!
5096 for record in data_results:
5097 result[record['res_id']].append('%(module)s.%(name)s' % record)
5100 def get_external_id(self, cr, uid, ids, *args, **kwargs):
5101 """Retrieve the External ID of any database record, if there
5102 is one. This method works as a possible implementation
5103 for a function field, to be able to add it to any
5104 model object easily, referencing it as ``Model.get_external_id``.
5106 When multiple External IDs exist for a record, only one
5107 of them is returned (randomly).
5109 :return: map of ids to their fully qualified XML ID,
5110 defaulting to an empty string when there's none
5111 (to be usable as a function field),
5114 { 'id': 'module.ext_id',
5117 results = self._get_xml_ids(cr, uid, ids)
5118 for k, v in results.iteritems():
5125 # backwards compatibility
5126 get_xml_id = get_external_id
5127 _get_xml_ids = _get_external_ids
5130 def is_transient(self):
5131 """ Return whether the model is transient.
5133 See :class:`TransientModel`.
5136 return self._transient
5138 def _transient_clean_rows_older_than(self, cr, seconds):
5139 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5140 cr.execute("SELECT id FROM " + self._table + " WHERE"
5141 " COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp <"
5142 " ((now() at time zone 'UTC') - interval %s)", ("%s seconds" % seconds,))
5143 ids = [x[0] for x in cr.fetchall()]
5144 self.unlink(cr, SUPERUSER_ID, ids)
5146 def _transient_clean_old_rows(self, cr, count):
5147 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5149 "SELECT id, COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp"
5150 " AS t FROM " + self._table +
5151 " ORDER BY t LIMIT %s", (count,))
5152 ids = [x[0] for x in cr.fetchall()]
5153 self.unlink(cr, SUPERUSER_ID, ids)
5155 def _transient_vacuum(self, cr, uid, force=False):
5156 """Clean the transient records.
5158 This unlinks old records from the transient model tables whenever the
5159 "_transient_max_count" or "_max_age" conditions (if any) are reached.
5160 Actual cleaning will happen only once every "_transient_check_time" calls.
5161 This means this method can be called frequently called (e.g. whenever
5162 a new record is created).
5164 assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
5165 self._transient_check_count += 1
5166 if (not force) and (self._transient_check_count % self._transient_check_time):
5167 self._transient_check_count = 0
5170 # Age-based expiration
5171 if self._transient_max_hours:
5172 self._transient_clean_rows_older_than(cr, self._transient_max_hours * 60 * 60)
5174 # Count-based expiration
5175 if self._transient_max_count:
5176 self._transient_clean_old_rows(cr, self._transient_max_count)
5180 def resolve_2many_commands(self, cr, uid, field_name, commands, fields=None, context=None):
5181 """ Serializes one2many and many2many commands into record dictionaries
5182 (as if all the records came from the database via a read()). This
5183 method is aimed at onchange methods on one2many and many2many fields.
5185 Because commands might be creation commands, not all record dicts
5186 will contain an ``id`` field. Commands matching an existing record
5187 will have an ``id``.
5189 :param field_name: name of the one2many or many2many field matching the commands
5190 :type field_name: str
5191 :param commands: one2many or many2many commands to execute on ``field_name``
5192 :type commands: list((int|False, int|False, dict|False))
5193 :param fields: list of fields to read from the database, when applicable
5194 :type fields: list(str)
5195 :returns: records in a shape similar to that returned by ``read()``
5196 (except records may be missing the ``id`` field if they don't exist in db)
5199 result = [] # result (list of dict)
5200 record_ids = [] # ids of records to read
5201 updates = {} # {id: dict} of updates on particular records
5203 for command in commands:
5204 if not isinstance(command, (list, tuple)):
5205 record_ids.append(command)
5206 elif command[0] == 0:
5207 result.append(command[2])
5208 elif command[0] == 1:
5209 record_ids.append(command[1])
5210 updates.setdefault(command[1], {}).update(command[2])
5211 elif command[0] in (2, 3):
5212 record_ids = [id for id in record_ids if id != command[1]]
5213 elif command[0] == 4:
5214 record_ids.append(command[1])
5215 elif command[0] == 5:
5216 result, record_ids = [], []
5217 elif command[0] == 6:
5218 result, record_ids = [], list(command[2])
5220 # read the records and apply the updates
5221 other_model = self.pool.get(self._all_columns[field_name].column._obj)
5222 for record in other_model.read(cr, uid, record_ids, fields=fields, context=context):
5223 record.update(updates.get(record['id'], {}))
5224 result.append(record)
5228 # for backward compatibility
5229 resolve_o2m_commands_to_record_dicts = resolve_2many_commands
5231 # keep this import here, at top it will cause dependency cycle errors
5234 class Model(BaseModel):
5235 """Main super-class for regular database-persisted OpenERP models.
5237 OpenERP models are created by inheriting from this class::
5242 The system will later instantiate the class once per database (on
5243 which the class' module is installed).
5246 _register = False # not visible in ORM registry, meant to be python-inherited only
5247 _transient = False # True in a TransientModel
5249 class TransientModel(BaseModel):
5250 """Model super-class for transient records, meant to be temporarily
5251 persisted, and regularly vaccuum-cleaned.
5253 A TransientModel has a simplified access rights management,
5254 all users can create new records, and may only access the
5255 records they created. The super-user has unrestricted access
5256 to all TransientModel records.
5259 _register = False # not visible in ORM registry, meant to be python-inherited only
5262 class AbstractModel(BaseModel):
5263 """Abstract Model super-class for creating an abstract class meant to be
5264 inherited by regular models (Models or TransientModels) but not meant to
5265 be usable on its own, or persisted.
5267 Technical note: we don't want to make AbstractModel the super-class of
5268 Model or BaseModel because it would not make sense to put the main
5269 definition of persistence methods such as create() in it, and still we
5270 should be able to override them within an AbstractModel.
5272 _auto = False # don't create any database backend for AbstractModels
5273 _register = False # not visible in ORM registry, meant to be python-inherited only
5276 def itemgetter_tuple(items):
5277 """ Fixes itemgetter inconsistency (useful in some cases) of not returning
5278 a tuple if len(items) == 1: always returns an n-tuple where n = len(items)
5283 return lambda gettable: (gettable[items[0]],)
5284 return operator.itemgetter(*items)
5285 class ImportWarning(Warning):
5286 """ Used to send warnings upwards the stack during the import process
5291 def convert_pgerror_23502(model, fields, info, e):
5292 m = re.match(r'^null value in column "(?P<field>\w+)" violates '
5293 r'not-null constraint\n',
5295 field_name = m.group('field')
5296 if not m or field_name not in fields:
5297 return {'message': unicode(e)}
5298 message = _(u"Missing required value for the field '%s'.") % field_name
5299 field = fields.get(field_name)
5301 message = _(u"%s This might be '%s' in the current model, or a field "
5302 u"of the same name in an o2m.") % (message, field['string'])
5305 'field': field_name,
5308 PGERROR_TO_OE = collections.defaultdict(
5309 # shape of mapped converters
5310 lambda: (lambda model, fvg, info, pgerror: {'message': unicode(pgerror)}), {
5311 # not_null_violation
5312 '23502': convert_pgerror_23502,
5314 # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: